├── .flake8 ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── .style.yapf ├── LICENSE ├── README.rst ├── docs ├── Makefile ├── requirements.txt └── source │ ├── _static │ ├── custom.css │ ├── logo.svg │ └── mincepy.svg │ ├── apidoc.rst │ ├── basics.rst │ ├── conf.py │ ├── examples │ ├── quick-start.ipynb │ └── restaurants.ipynb │ └── index.rst ├── pyproject.toml ├── release.sh ├── src └── pyos │ ├── __init__.py │ ├── cli.py │ ├── config.py │ ├── db │ ├── __init__.py │ ├── constants.py │ ├── database.py │ ├── fs.py │ ├── lib.py │ ├── migrations.py │ ├── queries.py │ ├── schema.py │ └── utils.py │ ├── exceptions.py │ ├── fmt.py │ ├── fs │ ├── __init__.py │ ├── nodes.py │ └── utils.py │ ├── glob.py │ ├── lib.py │ ├── os │ ├── __init__.py │ ├── nodb.py │ ├── path.py │ ├── types.py │ └── withdb.py │ ├── pathlib.py │ ├── provides.py │ ├── psh │ ├── __init__.py │ ├── argparse_types.py │ ├── cmds │ │ ├── __init__.py │ │ ├── cat.py │ │ ├── cd.py │ │ ├── connect.py │ │ ├── find.py │ │ ├── history.py │ │ ├── load.py │ │ ├── locate.py │ │ ├── log.py │ │ ├── ls.py │ │ ├── meta.py │ │ ├── mkdir.py │ │ ├── mv.py │ │ ├── oid.py │ │ ├── pwd.py │ │ ├── rm.py │ │ ├── rsync.py │ │ ├── save.py │ │ └── tree.py │ ├── completion.py │ ├── constants.py │ ├── flags.py │ ├── shell.py │ └── utils.py │ ├── psh_lib │ ├── __init__.py │ ├── opts.py │ └── utils.py │ ├── pyos.py │ ├── representers.py │ ├── results.py │ ├── utils.py │ └── version.py └── test ├── __init__.py ├── conftest.py ├── db ├── test_db.py └── test_fs.py ├── fs ├── __init__.py ├── test_dir_nodes.py ├── test_nodes.py ├── test_path.py └── test_utils.py ├── os ├── test_os.py ├── test_os_path.py └── test_os_path_python.py ├── pathlib ├── test_dirs.py └── test_path.py ├── psh ├── __init__.py ├── conftest.py ├── test_cat.py ├── test_cd.py ├── test_completion.py ├── test_find.py ├── test_load.py ├── test_locate.py ├── test_ls.py ├── test_meta.py ├── test_mkdir.py ├── test_mv.py ├── test_oid.py ├── test_piping.py ├── test_rm.py ├── test_rsync.py ├── test_save.py └── test_tree.py ├── psh_lib ├── test_opts.py └── test_results.py ├── support.py ├── test_glob.py └── utils.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, E266, E501, W503, F403, F401, E704, F821, F722, W504 3 | max-line-length = 100 4 | max-complexity = 18 5 | select = B,C,E,F,W,T4,B9 6 | exclude = 7 | .git 8 | __pycache__ 9 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: continuous-integration 2 | 3 | on: [push, pull_request] 4 | 5 | 6 | jobs: 7 | 8 | pre-commit: 9 | 10 | runs-on: ubuntu-latest 11 | timeout-minutes: 30 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | 16 | - name: Set up Python 3.11 17 | uses: actions/setup-python@v2 18 | with: 19 | python-version: 3.11 20 | 21 | - name: Install python dependencies 22 | run: | 23 | pip install -e .[dev,gui] 24 | pip freeze 25 | - name: Run pre-commit 26 | run: 27 | pre-commit run --all-files || ( git status --short ; git diff ; exit 1 ) 28 | 29 | 30 | tests: 31 | runs-on: ubuntu-latest 32 | 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | python-version: ['3.9', '3.10', '3.11', '3.12'] 37 | 38 | steps: 39 | - uses: actions/checkout@v2 40 | 41 | - name: Set up Python ${{ matrix.python-version }} 42 | uses: actions/setup-python@v2 43 | with: 44 | python-version: ${{ matrix.python-version }} 45 | 46 | - name: Install python dependencies 47 | run: pip install -e .[dev,gui] 48 | 49 | - name: Create MongoDB Docker container 50 | id: build_mongo_docker 51 | uses: DigiPie/mongo-action@v1.0.1 52 | with: 53 | port: 27017 54 | 55 | - name: Run pytest 56 | run: pytest --cov=pyos -sv -p no:nb_regression test 57 | 58 | - name: Create xml coverage 59 | run: coverage xml 60 | 61 | - name: Upload coverage to Codecov 62 | if: github.repository == 'muhrin/pyos' 63 | uses: codecov/codecov-action@v1 64 | with: 65 | file: ./coverage.xml 66 | name: pyos 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | .pytest_cache 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/_build/ 58 | 59 | # PyBuilder 60 | target/ 61 | 62 | #Ipython Notebook 63 | .ipynb_checkpoints 64 | 65 | 66 | # JetBrains IDE stuff 67 | .idea/ 68 | 69 | # Virtual environment directories 70 | /venv*/ 71 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v5.0.0 4 | hooks: 5 | - id: end-of-file-fixer 6 | - id: fix-encoding-pragma 7 | args: [ --remove ] 8 | - id: mixed-line-ending 9 | - id: trailing-whitespace 10 | 11 | - repo: https://github.com/ikamensh/flynt/ 12 | rev: '1.0.1' 13 | hooks: 14 | - id: flynt 15 | 16 | - repo: https://github.com/psf/black 17 | rev: 25.1.0 18 | hooks: 19 | - id: black 20 | exclude: (.*)/migrations 21 | 22 | - repo: https://github.com/pycqa/flake8 23 | rev: 7.2.0 24 | hooks: 25 | - id: flake8 26 | 27 | - repo: https://github.com/pycqa/isort 28 | rev: '6.0.1' 29 | hooks: 30 | - id: isort 31 | 32 | - repo: https://github.com/PyCQA/bandit 33 | rev: 1.8.3 34 | hooks: 35 | - id: bandit 36 | args: [ "-c", "pyproject.toml" ] 37 | additional_dependencies: [ '.[toml]' ] 38 | 39 | - repo: https://github.com/PyCQA/pylint 40 | # Configuration help can be found here: 41 | # https://pylint.pycqa.org/en/latest/user_guide/installation/pre-commit-integration.html 42 | rev: v3.3.6 43 | hooks: 44 | - id: pylint 45 | alias: pylint-with-spelling 46 | stages: [ manual ] 47 | language: system 48 | types: [ python ] 49 | require_serial: true 50 | exclude: 51 | (?x)^( 52 | docs/.*| 53 | test/.* 54 | )$ 55 | 56 | - repo: https://github.com/commitizen-tools/commitizen 57 | rev: v4.4.1 58 | hooks: 59 | - id: commitizen 60 | stages: [ commit-msg ] 61 | 62 | - repo: https://github.com/srstevenson/nb-clean 63 | rev: 4.0.1 64 | hooks: 65 | - id: nb-clean 66 | -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | based_on_style = google 3 | column_limit = 100 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. _documentation: https://pyos.readthedocs.io/en/latest/ 2 | 3 | pyOS 4 | ==== 5 | 6 | .. image:: https://codecov.io/gh/muhrin/pyos/branch/develop/graph/badge.svg 7 | :target: https://codecov.io/gh/muhrin/pyos 8 | :alt: Coverage 9 | 10 | .. image:: https://travis-ci.com/muhrin/pyos.svg?branch=master 11 | :target: https://travis-ci.com/github/muhrin/pyos 12 | :alt: Travis CI 13 | 14 | .. image:: https://img.shields.io/pypi/v/pyos.svg 15 | :target: https://pypi.python.org/pypi/pyos/ 16 | :alt: Latest Version 17 | 18 | .. image:: https://img.shields.io/pypi/wheel/pyos.svg 19 | :target: https://pypi.python.org/pypi/pyos/ 20 | 21 | .. image:: https://img.shields.io/pypi/pyversions/pyos.svg 22 | :target: https://pypi.python.org/pypi/pyos/ 23 | 24 | .. image:: https://img.shields.io/pypi/l/pyos.svg 25 | :target: https://pypi.python.org/pypi/pyos/ 26 | 27 | A fresh way to interact with your python objects as though they were files on your filesystem. 28 | 29 | Installation 30 | ------------ 31 | 32 | As easy as: 33 | 34 | 1. Install MongoDB 35 | 36 | Ubuntu: 37 | 38 | 39 | .. code-block:: shell 40 | 41 | sudo apt install mongodb 42 | 43 | 2. Install pyos: 44 | 45 | .. code-block:: shell 46 | 47 | pip install pyos 48 | 49 | 3. Jump in to the shell: 50 | 51 | .. code-block:: shell 52 | 53 | > ipython 54 | 55 | In [1]: from pyos.pyos import * 56 | In [2]: ls() 57 | 58 | 59 | From here you can `save()` objects, use familiar linux commands (`ls()`, `mv()`, `find()`, etc) and a whole lot more. Head over to the documentation_ to find out how. 60 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pyos 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # ONLY FOR ReadTheDocs 2 | .[dev] 3 | autodoc 4 | nbsphinx 5 | -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | @import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Condensed|Roboto+Mono|Roboto+Slab'); 2 | 3 | h1.logo { 4 | text-align: center !important; 5 | } 6 | -------------------------------------------------------------------------------- /docs/source/apidoc.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | 5 | ``pyos`` 6 | -------- 7 | 8 | 9 | .. automodule:: pyos 10 | :members: 11 | 12 | 13 | ``pyos.db`` 14 | ----------- 15 | 16 | 17 | .. automodule:: pyos.db 18 | :members: 19 | 20 | 21 | ``pyos.fs`` 22 | ----------- 23 | 24 | 25 | .. automodule:: pyos.fs 26 | :members: 27 | 28 | 29 | ``pyos.os`` 30 | ----------- 31 | 32 | 33 | .. automodule:: pyos.os 34 | :members: 35 | 36 | 37 | 38 | ``pyos.psh`` 39 | ------------ 40 | 41 | 42 | .. automodule:: pyos.psh 43 | :members: 44 | 45 | 46 | ``pyos.psh_lib`` 47 | ---------------- 48 | 49 | 50 | .. automodule:: pyos.psh_lib 51 | :members: 52 | -------------------------------------------------------------------------------- /docs/source/basics.rst: -------------------------------------------------------------------------------- 1 | Get to know pyOS 2 | ++++++++++++++++ 3 | 4 | The object system 5 | ================= 6 | 7 | The object system structure is designed to emulate many of the features familiar in disk based filesystems. Python objects are stored in directories, with the root of the filesystem starting at '``/``'. 8 | 9 | Objects 10 | ------- 11 | 12 | Objects emulate files as commonly found on disk filesystems. Some details: 13 | 14 | - Every object has a name and exists in a directory. 15 | - Object names within a directory are unique. 16 | - Object names can be any valid string, however it is advantageous to use value python variable names as they can then be used in tab-completion helpers. 17 | 18 | 19 | Directories 20 | ----------- 21 | 22 | Directories in pyOS provide a familiar way to organise your objects, just was you would in your filesystem. 23 | 24 | 25 | Design decisions 26 | ---------------- 27 | 28 | The guiding principle was to produce something as familiar as possible, but, data integrity, performance and simplicity was prioritised when they would conflict with familiarity. Performance considerations were prioritised in the following order: 29 | 30 | 1. Querying. Finding objects (by state or metadata) should be as fast as possible, including queries involving directory starting points e.g., find all objects with some metadata in the directory ``/home/`` and below. 31 | 2. Insertions. Inserting objects, especially in batches, should be very fast. 32 | 3. Deletions. Deleting objects can be somewhat slow compared to other operations as it is expected to be performed less frequently. 33 | 34 | These help to explain the fact a directory and an object can have the same name in the same directory. The current path of an object is stored in an object's metadata (as a directory and an object name) and there is no explicit representation of a directory (hence why an empty directory cannot exist). 35 | To understand how this results from the above goals consider the situation of having explicit directory objects. 36 | 37 | To save a new object in a directory would require loading the directory object, adding our object to it, and re-saving it. 38 | In the meantime, if another client is also saving to that directory we would be blocked until they were done. 39 | This incurs a performance hit when inserting, going against goal 2) above, and makes the code more difficult as an explicit locking mechanism must be put in place. 40 | 41 | Ah, but "how can we prevent two objects with the same name existing in the same folder then?", you ask. Well that's done by having a simple unique index the metadata ensuring that no two objects can share the same directory and name. 42 | This, metadata based, structure also comes with significant query performance benefits because a query looking for data related to particular paths can be performed by making one request to the database that puts a condition on the directory key instead of recursively having to look for a folder, and then querying any subfolders, and their subfolders, etc. all as separate queries. 43 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # pyos documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Mar 31 17:03:20 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | from importlib.machinery import SourceFileLoader 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | # 21 | import os 22 | import sys 23 | 24 | sys.path.insert(0, os.path.abspath(os.path.dirname("__file__"))) 25 | 26 | module = SourceFileLoader( 27 | "version", 28 | os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "pyos", "version.py"), 29 | ).load_module() 30 | 31 | autoclass_content = "both" 32 | 33 | # -- General configuration ------------------------------------------------ 34 | 35 | # If your documentation needs a minimal Sphinx version, state it here. 36 | # 37 | # needs_sphinx = '1.0' 38 | 39 | # Add any Sphinx extension module names here, as strings. They can be 40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 41 | # ones. 42 | extensions = [ 43 | "sphinx.ext.autodoc", 44 | "sphinx.ext.doctest", 45 | "sphinx.ext.coverage", 46 | "sphinx.ext.viewcode", 47 | "sphinx.ext.autosummary", 48 | "nbsphinx", 49 | ] 50 | 51 | # Add any paths that contain templates here, relative to this directory. 52 | templates_path = ["_templates"] 53 | 54 | # The suffix(es) of source filenames. 55 | # You can specify multiple suffix as a list of string: 56 | # 57 | # source_suffix = ['.rst', '.md'] 58 | source_suffix = ".rst" 59 | 60 | # The master toctree document. 61 | master_doc = "index" 62 | 63 | # General information about the project. 64 | project = "pyOS" 65 | copyright = "2020, Martin Uhrin" 66 | author = "Martin Uhrin" 67 | 68 | # The version info for the project you're documenting, acts as replacement for 69 | # |version| and |release|, also used in various other places throughout the 70 | # built documents. 71 | # 72 | # The short X.Y version. 73 | version = ".".join(map(str, module.version_info[:-1])) 74 | # The full version, including alpha/beta/rc tags. 75 | release = ".".join(map(str, module.version_info)) 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | # 80 | # This is also used if you do content translation via gettext catalogs. 81 | # Usually you set "language" from the command line for these cases. 82 | language = None 83 | 84 | # List of patterns, relative to source directory, that match files and 85 | # directories to ignore when looking for source files. 86 | # This patterns also effect to html_static_path and html_extra_path 87 | exclude_patterns = [] 88 | 89 | # The name of the Pygments (syntax highlighting) style to use. 90 | pygments_style = "sphinx" 91 | 92 | # If true, `todo` and `todoList` produce output, else they produce nothing. 93 | todo_include_todos = False 94 | 95 | # -- Options for HTML output ---------------------------------------------- 96 | 97 | # The theme to use for HTML and HTML Help pages. See the documentation for 98 | # a list of builtin themes. 99 | # 100 | html_theme = "alabaster" 101 | 102 | # Theme options are theme-specific and customize the look and feel of a theme 103 | # further. For a list of options available for each theme, see the 104 | # documentation. 105 | # 106 | # html_theme_options = {} 107 | html_theme_options = { 108 | "analytics_id": "UA-17296547-3", 109 | "codecov_button": True, 110 | "description": "Python object storage with versioning made simple", 111 | "github_button": True, 112 | "github_repo": "pyos", 113 | "github_type": "star", 114 | "github_user": "muhrin", 115 | "travis_button": True, 116 | "logo": "logo.svg", 117 | "logo_name": True, 118 | } 119 | 120 | # Add any paths that contain custom static files (such as style sheets) here, 121 | # relative to this directory. They are copied after the builtin static files, 122 | # so a file named "default.css" will overwrite the builtin "default.css". 123 | html_static_path = ["_static"] 124 | 125 | # -- Options for HTMLHelp output ------------------------------------------ 126 | 127 | # Output file base name for HTML help builder. 128 | htmlhelp_basename = "pyosdoc" 129 | 130 | # -- Options for LaTeX output --------------------------------------------- 131 | 132 | latex_elements = { 133 | # The paper size ('letterpaper' or 'a4paper'). 134 | # 135 | # 'papersize': 'a4paper', 136 | # The font size ('10pt', '11pt' or '12pt'). 137 | # 138 | # 'pointsize': '12pt', 139 | # Additional stuff for the LaTeX preamble. 140 | # 141 | # 'preamble': '', 142 | # Latex figure (float) alignment 143 | # 144 | # 'figure_align': 'htbp', 145 | } 146 | 147 | # Grouping the document tree into LaTeX files. List of tuples 148 | # (source start file, target name, title, 149 | # author, documentclass [howto, manual, or own class]). 150 | latex_documents = [ 151 | (master_doc, "pyos.tex", "pyOS Documentation", "Martin Uhrin", "manual"), 152 | ] 153 | 154 | # -- Options for manual page output --------------------------------------- 155 | 156 | # One entry per manual page. List of tuples 157 | # (source start file, name, description, authors, manual section). 158 | man_pages = [(master_doc, "pyos", "pyOS Documentation", [author], 1)] 159 | 160 | # -- Options for Texinfo output ------------------------------------------- 161 | 162 | # Grouping the document tree into Texinfo files. List of tuples 163 | # (source start file, target name, title, author, 164 | # dir menu entry, description, category) 165 | texinfo_documents = [ 166 | ( 167 | master_doc, 168 | "pyos", 169 | "pyOS Documentation", 170 | author, 171 | "pyos", 172 | "One line description of project.", 173 | "Miscellaneous", 174 | ), 175 | ] 176 | 177 | # -- Options for Epub output ---------------------------------------------- 178 | 179 | # Bibliographic Dublin Core info. 180 | epub_title = project 181 | epub_author = author 182 | epub_publisher = author 183 | epub_copyright = copyright 184 | 185 | # The unique identifier of the text. This can be a ISBN number 186 | # or the project homepage. 187 | # 188 | # epub_identifier = '' 189 | 190 | # A unique identification for the text. 191 | # 192 | # epub_uid = '' 193 | 194 | # A list of files that should not be packed into the epub file. 195 | epub_exclude_files = ["search.html"] 196 | 197 | # 198 | # html_logo = 'logo.svg' 199 | # html_favicon = 'icon.png' 200 | # 201 | html_sidebars = { 202 | "**": [ 203 | "about.html", 204 | "navigation.html", 205 | "searchbox.html", 206 | ] 207 | } 208 | 209 | autosummary_generate = True 210 | -------------------------------------------------------------------------------- /docs/source/examples/quick-start.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Quick Start\n", 8 | "===========\n", 9 | "\n", 10 | "This is a quick introduction to PyOS, see [the restaurants example](restaurants.ipynb) for a more complete example." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "from pyos.pyos import *\n", 20 | "from mincepy.testing import Car" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "Our ``Car`` class has two members, ``make`` and ``colour``, so let's save a couple in the current directory:" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "pwd()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "ls()" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "ferrari = Car('ferrari', 'red')\n", 55 | "save(ferrari, 'ferrari')\n", 56 | "skoda = Car('skoda', 'green')\n", 57 | "save(skoda, 'skoda')\n", 58 | "ls -l ()" 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "Let's have a look at the cars:" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "ls | cat" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "We see some additional properties of the objects used to store them in the database but our colours and makes are there.\n", 82 | "Now, let's move them to their own folder..." 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "mv('ferrari', 'skoda', 'garage/')\n", 92 | "ls()" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "metadata": {}, 98 | "source": [ 99 | "...and set some metadata" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "meta -s ('garage/ferrari', reg='VD394') # '-s' for set\n", 109 | "meta -s ('garage/skoda', reg='N317')\n", 110 | "meta('garage/ferrari') # This gets our metadata" 111 | ] 112 | }, 113 | { 114 | "cell_type": "markdown", 115 | "metadata": {}, 116 | "source": [ 117 | "The metadata can be used to search:" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": null, 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "find(meta=dict(reg='VD394')) | locate" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "Finally, let's clean up." 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | " rm -r ('garage/')\n", 143 | " ls()" 144 | ] 145 | } 146 | ], 147 | "metadata": { 148 | "kernelspec": { 149 | "display_name": "Python 3", 150 | "language": "python", 151 | "name": "python3" 152 | }, 153 | "language_info": { 154 | "codemirror_mode": { 155 | "name": "ipython", 156 | "version": 2 157 | }, 158 | "file_extension": ".py", 159 | "mimetype": "text/x-python", 160 | "name": "python", 161 | "nbconvert_exporter": "python", 162 | "pygments_lexer": "ipython2" 163 | }, 164 | "pycharm": { 165 | "stem_cell": { 166 | "cell_type": "raw", 167 | "metadata": { 168 | "collapsed": false 169 | }, 170 | "source": [] 171 | } 172 | } 173 | }, 174 | "nbformat": 4, 175 | "nbformat_minor": 0 176 | } 177 | -------------------------------------------------------------------------------- /docs/source/examples/restaurants.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Restaurants example\n", 8 | "===================\n", 9 | "\n", 10 | "This example takes you through some of the basics of PyOS and introduces you to some of the commands, syntax and \n", 11 | "concepts. So let's dive in, and enjoy! \n" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "from pyos.pyos import *" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "pwd()" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "ls()" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "Ok, so we're in the `martin` folder but there's nothing in it. Let's download some data and store it in dictionaries." 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "import mincepy\n", 55 | "import urllib.request\n", 56 | "import json\n", 57 | "\n", 58 | "ids = []\n", 59 | "with urllib.request.urlopen('https://raw.githubusercontent.com/ozlerhakan/mongodb-json-files/master/datasets/restaurant.json') as url: \n", 60 | " for line, _ in zip(url, range(399)): \n", 61 | " data = json.loads(line.decode()) \n", 62 | " data.pop('_id') \n", 63 | " ids.append(mincepy.Dict(data).save())" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "ls()" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "So what have we got? Let's ``cat`` a couple of our dictionaries to see:" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "cat(ids[0], ids[1])" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "Wait a minute. What's with all the boxes?\n", 96 | "\n", 97 | "Pretty, isn't it? That's just the default way PyOS prints your objects, you can see that they're really just dicts by\n", 98 | "using:" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "cat(ids[0], ids[1], representer=str)" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "Now let's give them meaningful names, converting spaces and forward slashes" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "for resto in ls | load: \n", 124 | " mv(resto, resto['name'].replace(' ', '_').replace('/', '_')) \n", 125 | " \n", 126 | "ls()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "The search functionality\n", 134 | "------------------------\n", 135 | "\n", 136 | "In the mood for curry in Edinburgh?" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "cat(find(state={'type_of_food': 'Curry', 'address line 2': 'Edinburgh'}))" 146 | ] 147 | }, 148 | { 149 | "cell_type": "markdown", 150 | "metadata": {}, 151 | "source": [ 152 | "Let's organise our resto's a little more into folders" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": null, 158 | "metadata": {}, 159 | "outputs": [], 160 | "source": [ 161 | "for resto in ls | load:\n", 162 | " mv(resto, resto['type_of_food'] + '/') # All directories in PyOS end with a '/'\n", 163 | " \n", 164 | "ls()" 165 | ] 166 | }, 167 | { 168 | "cell_type": "markdown", 169 | "metadata": {}, 170 | "source": [ 171 | "Beautiful, let's just check:" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": null, 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [ 180 | "ls('Pizza/')" 181 | ] 182 | }, 183 | { 184 | "cell_type": "markdown", 185 | "metadata": {}, 186 | "source": [ 187 | "You know what, I'm vegetarian anyway - let's delete the chicken joints" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "rm('Chicken/')" 197 | ] 198 | }, 199 | { 200 | "cell_type": "markdown", 201 | "metadata": {}, 202 | "source": [ 203 | "Whoops, I forgot the -r" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": null, 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "rm -r ('Chicken/')\n", 213 | "ls()" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "Using metadata\n", 221 | "--------------\n", 222 | "\n", 223 | "Let's add my personal ratings as metadata" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "meta -u ('Curry/Ahmeds_Tandoori', martins_rating=5.5) # -u for 'update'\n", 233 | "meta -u ('Curry/Abida', martins_rating=6)\n", 234 | "meta -u ('Curry/Abduls', martins_rating=4) \n", 235 | "\n", 236 | "# Let's have a look\n", 237 | "meta('Curry/Ahmeds_Tandoori')" 238 | ] 239 | }, 240 | { 241 | "cell_type": "markdown", 242 | "metadata": {}, 243 | "source": [ 244 | "And now let's find the good ones" 245 | ] 246 | }, 247 | { 248 | "cell_type": "code", 249 | "execution_count": null, 250 | "metadata": {}, 251 | "outputs": [], 252 | "source": [ 253 | "find(meta=dict(martins_rating=queries.gt(5)))\n" 254 | ] 255 | }, 256 | { 257 | "cell_type": "markdown", 258 | "metadata": {}, 259 | "source": [ 260 | "You can also 'pipe' results to another callable:" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": null, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "find(meta=dict(martins_rating=queries.gt(5))) | len\n" 270 | ] 271 | } 272 | ], 273 | "metadata": { 274 | "kernelspec": { 275 | "display_name": "PyCharm (pyos)", 276 | "language": "python", 277 | "name": "pycharm-3c4673b5" 278 | }, 279 | "language_info": { 280 | "codemirror_mode": { 281 | "name": "ipython", 282 | "version": 3 283 | }, 284 | "file_extension": ".py", 285 | "mimetype": "text/x-python", 286 | "name": "python", 287 | "nbconvert_exporter": "python", 288 | "pygments_lexer": "ipython3" 289 | }, 290 | "pycharm": { 291 | "stem_cell": { 292 | "cell_type": "raw", 293 | "metadata": { 294 | "collapsed": false 295 | }, 296 | "source": [] 297 | } 298 | } 299 | }, 300 | "nbformat": 4, 301 | "nbformat_minor": 1 302 | } 303 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. pyos documentation master file, created by 2 | sphinx-quickstart on Fri Mar 31 17:03:20 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. _pyOS: https://github.com/muhrin/pyos 7 | .. _Django: https://www.djangoproject.com/ 8 | .. _SQLAlchemy: https://www.sqlalchemy.org/ 9 | 10 | 11 | 12 | Welcome to pyOS's documentation! 13 | ================================ 14 | 15 | .. image:: https://codecov.io/gh/muhrin/pyos/branch/develop/graph/badge.svg 16 | :target: https://codecov.io/gh/muhrin/pyos 17 | :alt: Coveralls 18 | 19 | .. image:: https://travis-ci.org/muhrin/pyos.svg 20 | :target: https://travis-ci.org/muhrin/pyos 21 | :alt: Travis CI 22 | 23 | .. image:: https://img.shields.io/pypi/v/pyos.svg 24 | :target: https://pypi.python.org/pypi/pyos/ 25 | :alt: Latest Version 26 | 27 | .. image:: https://img.shields.io/pypi/wheel/pyos.svg 28 | :target: https://pypi.python.org/pypi/pyos/ 29 | 30 | .. image:: https://img.shields.io/pypi/pyversions/pyos.svg 31 | :target: https://pypi.python.org/pypi/pyos/ 32 | 33 | .. image:: https://img.shields.io/pypi/l/pyos.svg 34 | :target: https://pypi.python.org/pypi/pyos/ 35 | 36 | 37 | `pyOS`_: A fresh way to interact with your python objects as though they were files on your filesystem. 38 | 39 | 40 | Features 41 | ++++++++ 42 | 43 | * Familiar shell like interface: ``ls``, ``mv``, ``cat``, ``rm`` but on your objects! 44 | * Powerful and fast ``find`` facility to locate and organise your python world. 45 | * Version control of all your objects by default. 46 | * Familiar flags from the shell e.g. ``ls (-l)``, ``cp(-n)``. 47 | * Easy pipe-like chaining e.g. ``find(meta=dict(name='martin')) | rm`` 48 | 49 | Think of pyOS as a mixture between an ORM like `Django`_ or `SQLAlchemy`_ but where you can store any class by providing 50 | a simple mapper, from then on you interact with your objects as if they were files in a filesystem. 51 | Many of the familiar \*nix commands are available (``ls``, ``mv``, ``rm``, ``tree``, ``cat``, 52 | ``find``) except they take on a new, more powerful form because you're in a fully fledged python environment! 53 | 54 | 55 | Installation 56 | ++++++++++++ 57 | 58 | Installation with pip: 59 | 60 | .. code-block:: shell 61 | 62 | pip install pyos 63 | 64 | 65 | Installation from git: 66 | 67 | .. code-block:: shell 68 | 69 | # via pip 70 | pip install https://github.com/muhrin/pyos/archive/master.zip 71 | 72 | # manually 73 | git clone https://github.com/muhrin/pyos.git 74 | cd pyos 75 | python setup.py install 76 | 77 | 78 | Development 79 | +++++++++++ 80 | 81 | Clone the project: 82 | 83 | .. code-block:: shell 84 | 85 | git clone https://github.com/muhrin/pyos.git 86 | cd pyos 87 | 88 | 89 | Create a new virtualenv for `pyOS`_: 90 | 91 | .. code-block:: shell 92 | 93 | virtualenv -p python3 pyos 94 | 95 | Install all requirements for `pyOS`_: 96 | 97 | .. code-block:: shell 98 | 99 | env/bin/pip install -e '.[dev]' 100 | 101 | Table Of Contents 102 | +++++++++++++++++ 103 | 104 | .. toctree:: 105 | :glob: 106 | :maxdepth: 3 107 | 108 | basics 109 | examples/quick-start.ipynb 110 | examples/restaurants.ipynb 111 | apidoc 112 | 113 | 114 | Versioning 115 | ++++++++++ 116 | 117 | This software follows `Semantic Versioning`_ 118 | 119 | 120 | .. _Semantic Versioning: http://semver.org/ 121 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ['flit_core >=3.9,<4'] 3 | build-backend = 'flit_core.buildapi' 4 | 5 | [project] 6 | name = "pyos" 7 | dynamic = ["version"] 8 | description = "Browse python objects as if they were files on disk" 9 | readme = "README.rst" 10 | authors = [ 11 | {name = "Martin Uhrin", email = "martin.uhrin.10@ucl.ac.uk"} 12 | ] 13 | license = {file = "LICENSE"} 14 | classifiers = [ 15 | 'Development Status :: 4 - Beta', 16 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', 17 | 'Programming Language :: Python', 18 | 'Programming Language :: Python :: 3.9', 19 | 'Programming Language :: Python :: 3.10', 20 | 'Programming Language :: Python :: 3.11', 21 | 'Programming Language :: Python :: 3.12', 22 | 'Programming Language :: Python :: 3.13', 23 | ] 24 | keywords = ["database", "schemaless", "nosql", "orm", "object-store", "concurrent", "optimistic-locking"] 25 | requires-python = ">=3.9" 26 | dependencies = [ 27 | "anytree", 28 | "cmd2 ~= 1.3.2", 29 | "columnize", 30 | "mincepy>=0.16.1, <0.18", 31 | "click", 32 | "ipython", 33 | "pandas", 34 | "pymongo", 35 | "pyprnt", 36 | "pytray >= 0.3.0", 37 | "stevedore", 38 | "tqdm", 39 | "yarl" 40 | ] 41 | 42 | [project.optional-dependencies] 43 | gui = ["mincepy[gui]"] 44 | dev = [ 45 | "cmd2-ext-test", 46 | "ipython", 47 | "jupyter-sphinx", 48 | "nbsphinx", 49 | "pip", 50 | "pytest>4", 51 | "pytest-cov", 52 | "pre-commit", 53 | "sphinx", 54 | "sphinx-autobuild", 55 | "yapf", 56 | "pylint", 57 | "twine" 58 | ] 59 | 60 | [project.urls] 61 | Documentation = "https://pyos.readthedocs.org/" 62 | Source = "https://github.com/muhrin/pyos/" 63 | 64 | [project.scripts] 65 | pyos = "pyos.cli:pyos" 66 | psh = "pyos.cli:psh_" 67 | 68 | [project.entry-points."mincepy.plugins.types"] 69 | pyos_types = "pyos.provides:get_types" 70 | 71 | [tool.flit.module] 72 | name = 'pyos' 73 | 74 | [tool.flit.sdist] 75 | exclude = [ 76 | '.github/', 77 | 'docs/', 78 | 'examples/', 79 | 'test/', 80 | ] 81 | 82 | [tool.flynt] 83 | line-length = 100 84 | fail-on-change = true 85 | 86 | [tool.isort] 87 | profile = "black" 88 | force_sort_within_sections = true 89 | include_trailing_comma = true 90 | line_length = 100 91 | multi_line_output = 3 92 | 93 | [tool.pylint.MASTER] 94 | ignore-paths = '^src/reax/lightning/.*$' # These are copy-paste from lightning, so don't lint 95 | 96 | [tool.pylint.format] 97 | max-line-length = 100 98 | 99 | [tool.pylint.messages_control] 100 | disable = [ 101 | # Unfortunately jaxtyping decorator creates a function that seems to mistakenly be identified as 102 | # not returning anything, so we have to disable the error below for now 103 | 'assignment-from-no-return', 104 | 'duplicate-code', 105 | 'import-outside-toplevel', 106 | 'missing-docstring', 107 | 'locally-disabled', 108 | 'too-few-public-methods', 109 | 'too-many-arguments', 110 | 'too-many-instance-attributes', 111 | 'use-dict-literal', 112 | 'fixme', 113 | ] 114 | 115 | [tool.pylint.design] 116 | max-locals = 20 117 | max-branches = 14 118 | max-returns = 8 119 | max-positional-arguments = 8 120 | 121 | [tool.bandit] 122 | exclude_dirs = ["test/"] 123 | skips = ["B101"] 124 | 125 | [tool.black] 126 | line-length = 100 127 | 128 | [tool.docformatter] 129 | wrap-summaries = 100 130 | wrap-descriptions = 100 131 | -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | 2 | PACKAGE="pyos" 3 | REMOTE="muhrin" 4 | VERSION_FILE=${PACKAGE}/version.py 5 | 6 | version=$1 7 | while true; do 8 | read -p "Release version ${version}? " yn 9 | case $yn in 10 | [Yy]* ) break;; 11 | [Nn]* ) exit;; 12 | * ) echo "Please answer yes or no.";; 13 | esac 14 | done 15 | 16 | 17 | ver_info=`python -c "print(tuple(int(entry) for entry in '$version'.split('.')))"` 18 | sed -i "/^version_info/c version_info = ${ver_info}" $VERSION_FILE 19 | 20 | current_branch=`git rev-parse --abbrev-ref HEAD` 21 | 22 | tag="v${version}" 23 | relbranch="release-${version}" 24 | 25 | echo Releasing version $version 26 | 27 | git checkout -b $relbranch 28 | git add ${VERSION_FILE} 29 | git commit --no-verify -m "Release ${version}" 30 | 31 | git tag -a $tag -m "Version $version" 32 | 33 | 34 | # Merge into master 35 | 36 | git checkout master 37 | git merge $relbranch 38 | 39 | # And back into the working branch (usually develop) 40 | git checkout $current_branch 41 | git merge $relbranch 42 | 43 | git branch -d $relbranch 44 | 45 | # Push everything 46 | git push --tags $REMOTE master $current_branch 47 | 48 | 49 | # Release on pypi 50 | rm -r dist build *.egg-info 51 | python setup.py sdist 52 | python setup.py bdist_wheel --universal 53 | 54 | twine upload dist/* 55 | -------------------------------------------------------------------------------- /src/pyos/__init__.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=anomalous-backslash-in-string, pointless-statement 2 | # flake8: noqa: W605 3 | """ 4 | 5 | ____ ____ 6 | / __ \/ __ / 7 | ____ __ __/ / / / /_ 8 | / __ \/ / / / / / /\__ \ 9 | / /_/ / /_/ / /_/ /___/ / 10 | / .___/\__, /\____//____/ 11 | /_/ /____/ 12 | 13 | 14 | A new way to interact with your python objects. PyOS allows you to treat python objects in a 15 | similar way to the way you interact with files on your filesystem at the moment except in an ipython 16 | console or script. Objects are stored in a database and presented to the user as existing in a 17 | virtual file system. Many of the familiar *nix commands are available (ls, mv, rm, tree, cat, find) 18 | except they take on a new, more powerful form because you're in a fully fledged python environment 19 | and not restricted to just two types (file and directories) like on a traditional filesystem but 20 | indeed any python type that can be stored by PyOS's backend. 21 | """ 22 | # pylint: disable=wrong-import-position 23 | # Order is important here, first we list all the 'base' modules, then the rest 24 | from . import ( 25 | config, 26 | db, 27 | exceptions, 28 | fmt, 29 | fs, 30 | lib, 31 | os, 32 | pathlib, 33 | psh, 34 | psh_lib, 35 | representers, 36 | results, 37 | version, 38 | ) 39 | from .db import connect 40 | from .exceptions import * # pylint: disable=redefined-builtin 41 | from .lib import * 42 | from .pathlib import Path, PurePath, working_path 43 | from .version import * 44 | from .version import __version__ 45 | 46 | _MODULES = "os", "config", "db", "fmt", "fs", "pathlib", "psh_lib", "psh" 47 | _DEPRECATED = ("working_path",) 48 | _ADDITIONAL = ("PurePath", "Path", "__version__", "connect" "results", "representers") 49 | 50 | __all__ = version.__all__ + lib.__all__ + exceptions.__all__ + _MODULES + _DEPRECATED + _ADDITIONAL 51 | -------------------------------------------------------------------------------- /src/pyos/cli.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import click 4 | 5 | from pyos import psh 6 | 7 | 8 | @click.command(name="psh") 9 | @click.argument("script", default="") 10 | @click.option("-c", "--cmd", required=False, help="commands to be invoked directly") 11 | def psh_(script, cmd): 12 | headless_cmds = None 13 | if cmd: 14 | headless_cmds = list(part.strip() for part in cmd.split(";")) 15 | elif script: 16 | with open(script, "r", encoding="utf-8") as script_file: 17 | headless_cmds = list(line.rstrip() for line in script_file.readlines()) 18 | 19 | # Need to clear args because otherwise cmd2 picks them up 20 | if headless_cmds: 21 | psh.PyosShell.execute(*headless_cmds) 22 | return 23 | 24 | app = psh.PyosShell() 25 | sys.exit(app.cmdloop()) 26 | 27 | 28 | @click.group() 29 | def pyos(): 30 | pass 31 | 32 | 33 | @pyos.command() 34 | def shell(): 35 | """Start the pyos shell""" 36 | # Need to clear args because otherwise cmd2 picks them up 37 | app = psh.PyosShell() 38 | sys.exit(app.cmdloop()) 39 | -------------------------------------------------------------------------------- /src/pyos/config.py: -------------------------------------------------------------------------------- 1 | __all__ = "DIR_KEY", "NAME_KEY", "KEYS" 2 | 3 | DIR_KEY = "_directory" 4 | NAME_KEY = "name" 5 | 6 | KEYS = (DIR_KEY, NAME_KEY) 7 | -------------------------------------------------------------------------------- /src/pyos/db/__init__.py: -------------------------------------------------------------------------------- 1 | """Database related classes, functions and constants""" 2 | 3 | # This relies on each of the submodules having an __all__ variable. 4 | from . import database, fs, lib, queries, utils 5 | from .database import * 6 | from .lib import * 7 | from .utils import * 8 | 9 | ADDITIONAL = ("queries", "fs") 10 | 11 | __all__ = ( 12 | database.__all__ + lib.__all__ + utils.__all__ + ADDITIONAL 13 | ) # pylint: disable=undefined-variable 14 | -------------------------------------------------------------------------------- /src/pyos/db/constants.py: -------------------------------------------------------------------------------- 1 | FILESYSTEM_COLLECTION = "pyos_fs" 2 | PYOS_COLLECTION = "pyos" 3 | SETTINGS_VERSION = "version" 4 | -------------------------------------------------------------------------------- /src/pyos/db/database.py: -------------------------------------------------------------------------------- 1 | import getpass 2 | from typing import Final, Optional, Sequence 3 | from urllib import parse 4 | 5 | import mincepy 6 | import mincepy.archives 7 | 8 | from . import fs, schema 9 | 10 | __all__ = "connect", "init", "get_historian", "reset", "get_session" 11 | 12 | DEFAULT_DB: Final[str] = "pyos" 13 | 14 | _GLOBAL_SESSION: Optional["Session"] = None 15 | 16 | 17 | class Session(mincepy.archives.ArchiveListener): 18 | 19 | def __init__(self, historian: mincepy.Historian, cwd: fs.Path = None): 20 | """Start a new session""" 21 | self._historian = historian 22 | 23 | self._cwd = None 24 | if cwd: 25 | self.set_cwd(cwd) 26 | else: 27 | try: 28 | # Default working directory for a session is simply a folder in root with the user's name 29 | self.set_cwd(_get_homedir()) 30 | except ValueError: 31 | self.set_cwd(fs.ROOT_PATH) 32 | 33 | historian.archive.add_archive_listener(self) 34 | 35 | @property 36 | def historian(self) -> mincepy.Historian: 37 | return self._historian 38 | 39 | @property 40 | def cwd(self) -> fs.Path: 41 | return self._cwd 42 | 43 | def set_cwd(self, path: fs.Path): 44 | """Set the current working directory""" 45 | if fs.find_entry(path, historian=self._historian) is None: 46 | raise ValueError(f"Path does not exist: {path}") 47 | 48 | self._cwd = path 49 | 50 | def close(self): 51 | """Close this session. This object cannot be used after this call""" 52 | self._historian.archive.remove_archive_listener(self) 53 | 54 | del self._cwd 55 | del self._historian 56 | 57 | def on_bulk_write(self, archive: mincepy.Archive, ops: Sequence[mincepy.operations.Operation]): 58 | """Called when an archive is about to perform a sequence of write operations but has not performed them yet. 59 | The listener must not assume that the operations will be completed as there are a number of reasons why this 60 | process could be interrupted. 61 | """ 62 | assert archive is self._historian.archive 63 | new_objects = [] # Keep track of the new objects being saved 64 | deleted_objects = [] 65 | for oper in ops: 66 | if isinstance(oper, mincepy.operations.Insert): 67 | if oper.snapshot_id.version == 0: 68 | new_objects.append(oper.obj_id) 69 | elif oper.record.is_deleted_record(): 70 | deleted_objects.append(oper.obj_id) 71 | 72 | if new_objects: 73 | fs.execute_instructions( 74 | [ 75 | fs.SetObjPath(obj_id, self._cwd + (str(obj_id),), only_new=True) 76 | for obj_id in new_objects 77 | ] 78 | ) 79 | 80 | if deleted_objects: 81 | fs.remove_objs(tuple(deleted_objects)) 82 | 83 | 84 | def connect(uri: str = "", use_globally=True) -> mincepy.Historian: 85 | parsed = parse.urlparse(uri) 86 | if parsed.fragment == "": 87 | parsed = parsed._replace(fragment=DEFAULT_DB) 88 | 89 | historian = mincepy.connect(parsed, use_globally=use_globally) 90 | init(historian, use_globally) 91 | 92 | return historian 93 | 94 | 95 | def init(historian: mincepy.Historian = None, use_globally=True) -> mincepy.Historian: 96 | """Initialise a Historian such that it is ready to be used with pyOS""" 97 | global _GLOBAL_SESSION # pylint: disable=global-statement 98 | historian = historian or mincepy.get_historian() 99 | 100 | schema.ensure_up_to_date(historian) 101 | 102 | # Create the global session 103 | session = Session(historian) 104 | 105 | if use_globally: 106 | _GLOBAL_SESSION = session 107 | 108 | return historian 109 | 110 | 111 | def get_historian() -> mincepy.Historian: 112 | """Get the active historian in pyos""" 113 | # flake8: noqa: F824 114 | global _GLOBAL_SESSION # pylint: disable=global-statement, global-variable-not-assigned 115 | if _GLOBAL_SESSION is None: 116 | raise RuntimeError( 117 | "A global pyOS session has not been initialised. Call connect() or init() first." 118 | ) 119 | 120 | return _GLOBAL_SESSION.historian 121 | 122 | 123 | def get_session() -> Session: 124 | # flake8: noqa: F824 125 | global _GLOBAL_SESSION # pylint: disable=global-variable-not-assigned 126 | return _GLOBAL_SESSION 127 | 128 | 129 | def reset(): 130 | # flake8: noqa: F824 131 | global _GLOBAL_SESSION # pylint: disable=global-statement, global-variable-not-assigned 132 | if _GLOBAL_SESSION is not None: 133 | _GLOBAL_SESSION.close() 134 | 135 | 136 | def _get_homedir() -> fs.Path: 137 | """Get the home directory for the current user""" 138 | return ( 139 | "/", 140 | getpass.getuser(), 141 | ) 142 | -------------------------------------------------------------------------------- /src/pyos/db/migrations.py: -------------------------------------------------------------------------------- 1 | import mincepy 2 | import mincepy.mongo 3 | 4 | from . import constants 5 | from .. import config 6 | 7 | 8 | def initial(historian: mincepy.Historian): 9 | """ 10 | Version 0. 11 | 12 | Initial migration. Make sure meta indices are there.""" 13 | 14 | # Make sure the indexes are there 15 | historian.meta.create_index( 16 | [(config.NAME_KEY, mincepy.ASCENDING), (config.DIR_KEY, mincepy.ASCENDING)], 17 | unique=True, 18 | where_exist=True, 19 | ) 20 | historian.meta.create_index(config.NAME_KEY, unique=False, where_exist=True) 21 | historian.meta.create_index(config.DIR_KEY, unique=False, where_exist=True) 22 | 23 | 24 | def add_pyos_collections(historian: mincepy.Historian): 25 | """ 26 | Version 1. 27 | 28 | Migrates from using metadata to store filesystem information to a dedicated MongoDB collection 29 | """ 30 | from . import fs 31 | 32 | archive: mincepy.mongo.MongoArchive = historian.archive 33 | db = archive.database # pylint: disable=invalid-name 34 | 35 | schema_version = getattr(archive, "schema_version", 0) 36 | 37 | if schema_version >= 2: 38 | meta_entries = archive.data_collection.aggregate( 39 | [{"$match": {"meta": {"$exists": True}}}, {"$replaceRoot": {"newRoot": "$meta"}}] 40 | ) 41 | else: 42 | metas = db[archive.META_COLLECTION] 43 | meta_entries = metas.find({config.DIR_KEY: {"$exists": True}}) 44 | 45 | # Find all the objects that have a directory key 46 | root = fs.FilesystemBuilder(is_root=True) 47 | for meta in meta_entries: 48 | obj_id = meta["_id"] 49 | directory = root 50 | for entry in meta[config.DIR_KEY].split("/")[1:-1]: 51 | directory = directory[entry] 52 | directory.add_obj(meta.get(config.NAME_KEY, str(obj_id)), obj_id) 53 | 54 | records = root.create_edge_records() 55 | 56 | fs_collection = db[constants.FILESYSTEM_COLLECTION] 57 | fs_collection.create_index(fs.Schema.PARENT, unique=False) 58 | # Create a joint, unique, index on the source and name meaning that there cannot be two entries 59 | # with the same name in any directory 60 | fs_collection.create_index( 61 | [(fs.Schema.PARENT, mincepy.ASCENDING), (fs.Schema.NAME, mincepy.ASCENDING)], unique=True 62 | ) 63 | 64 | fs_collection.replace_one({"_id": fs.ROOT_ID}, fs.ROOT, upsert=True) 65 | 66 | if records: 67 | fs_collection.insert_many(records) 68 | 69 | 70 | # Ordered list of migrations 71 | MIGRATIONS = ( 72 | initial, 73 | add_pyos_collections, 74 | ) 75 | -------------------------------------------------------------------------------- /src/pyos/db/queries.py: -------------------------------------------------------------------------------- 1 | """Module with convenience functions for building queries""" 2 | 3 | from typing import Iterable 4 | 5 | from .. import config 6 | 7 | 8 | def or_(*conditions): 9 | if len(conditions) == 1: 10 | return conditions[0] 11 | 12 | return {"$or": list(conditions)} 13 | 14 | 15 | def and_(*conditions): 16 | if len(conditions) == 1: 17 | return conditions 18 | 19 | return {"$and": list(conditions)} 20 | 21 | 22 | def unset_(*keys: Iterable[str]): 23 | return {"$unset": {key: "" for key in keys}} 24 | 25 | 26 | def in_(*args): 27 | if len(args) == 1: 28 | return args[0] 29 | 30 | return {"$in": list(args)} 31 | 32 | 33 | def gt(val): # pylint: disable=invalid-name 34 | return {"$gt": val} 35 | 36 | 37 | gt_ = gt # pylint: disable=invalid-name 38 | 39 | 40 | def subdirs(root: str, start_depth=1, end_depth=1) -> dict: 41 | """Get a query string that will look in subdirectories of root optionally specifying the 42 | start and end depths 43 | """ 44 | if start_depth in [0, 1] and end_depth == -1: 45 | match_root = f"^{root}" 46 | and_below = "" if start_depth == 0 else ".+" 47 | regex = f"{match_root}{and_below}" 48 | else: 49 | if end_depth == -1: 50 | end_depth = "" # This will cause the regex to allow any number of repetitions 51 | 52 | # The breakdown of this regexp is: 53 | # ^{root} - match strings beginning with the root 54 | # ( 55 | # [^/]+ - followed by a '/' or start of string one or more times 56 | # / - followed by exactly on occurrence of '/' 57 | # ) 58 | # {{{start_depth},{end_depth}}} repeated a minimum of start depth and a maximum of 59 | # end_depth times 60 | regex = f"^{root}([^/]+/){{{start_depth},{end_depth}}}$" 61 | 62 | return {config.DIR_KEY: {"$regex": regex}} 63 | 64 | 65 | def dirmatch(directory: str) -> dict: 66 | """Get the query dictionary to search in a particular directory""" 67 | query = {config.DIR_KEY: str(directory)} 68 | if directory == "/": 69 | # Special case for root: all objects that have no DIR_KEY are by default 70 | # considered to be in the root 71 | query = or_(query, {config.DIR_KEY: {"$exists": False}}) 72 | return query 73 | -------------------------------------------------------------------------------- /src/pyos/db/schema.py: -------------------------------------------------------------------------------- 1 | import mincepy.mongo 2 | import pymongo.database 3 | 4 | from . import constants, migrations 5 | 6 | 7 | def get_db_version(database: pymongo.database.Database): 8 | """Get the version number of the database schema""" 9 | if constants.PYOS_COLLECTION not in database.list_collection_names(): 10 | return 0 11 | 12 | coll = database[constants.PYOS_COLLECTION] 13 | return coll.find_one({"_id": "settings"}).get(constants.SETTINGS_VERSION, 0) 14 | 15 | 16 | def set_db_version(database: pymongo.database.Database, version: int): 17 | """Set the version number of the database schema""" 18 | coll = database[constants.PYOS_COLLECTION] 19 | coll.update_one( 20 | {"_id": "settings"}, 21 | {"$set": {constants.SETTINGS_VERSION: version}}, 22 | upsert=True, 23 | ) 24 | 25 | 26 | def get_source_version() -> int: 27 | """Get the current version number of the schema used in this source code. 28 | This is equal to the total number of migrations""" 29 | return len(migrations.MIGRATIONS) 30 | 31 | 32 | def ensure_up_to_date(historian: mincepy.Historian): 33 | """Ensure that the database schema is up-to-date, performing migrations to bring it up to 34 | date if it is not.""" 35 | archive: mincepy.mongo.MongoArchive = historian.archive 36 | db = archive.database # pylint: disable=invalid-name 37 | db_version = get_db_version(db) 38 | 39 | if db_version < get_source_version(): 40 | # Apply the migrations that need to be applied one by one 41 | for migration in migrations.MIGRATIONS[db_version:]: 42 | migration(historian) 43 | db_version += 1 44 | set_db_version(db, db_version) 45 | 46 | return True 47 | 48 | return False 49 | -------------------------------------------------------------------------------- /src/pyos/db/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import deprecation 4 | 5 | from .. import config, os, version 6 | 7 | __all__ = "path_to_meta_dict", "get_obj_name", "path_from_meta_entry" 8 | 9 | 10 | @deprecation.deprecated( 11 | deprecated_in="0.8.0", 12 | removed_in="0.9.0", 13 | current_version=version.__version__, 14 | details="No longer use metadata to store filesystem location", 15 | ) 16 | def new_meta(orig: dict, new: dict) -> dict: 17 | merged = new.copy() 18 | if not orig: 19 | return merged 20 | 21 | for name in config.KEYS: 22 | if name in orig: 23 | if name.startswith("_"): 24 | # Always take internal, i.e. underscored, keys 25 | merged[name] = orig[name] 26 | else: 27 | merged.setdefault(name, orig[name]) 28 | 29 | return merged 30 | 31 | 32 | @deprecation.deprecated( 33 | deprecated_in="0.8.0", 34 | removed_in="0.9.0", 35 | current_version=version.__version__, 36 | details="No longer use metadata to store filesystem location", 37 | ) 38 | def path_to_meta_dict(path: os.PathSpec) -> dict: 39 | """ 40 | :param path: the path to get a dictionary for 41 | :return: the meta dictionary with the path 42 | """ 43 | if path is None: 44 | return {} 45 | 46 | path = os.path.normpath(path) 47 | meta = {} 48 | if path.endswith(os.sep): 49 | meta[config.DIR_KEY] = path 50 | else: 51 | dirname, basename = os.path.split(path) 52 | meta[config.NAME_KEY] = basename 53 | if dirname: 54 | meta[config.DIR_KEY] = os.path.abspath(dirname) 55 | 56 | return meta 57 | 58 | 59 | @deprecation.deprecated( 60 | deprecated_in="0.8.0", 61 | removed_in="0.9.0", 62 | current_version=version.__version__, 63 | details="No longer use metadata to store filesystem location", 64 | ) 65 | def path_from_meta_entry(obj_id, meta: dict) -> Optional[str]: 66 | parts = [] 67 | if config.DIR_KEY in meta: 68 | parts.append(meta[config.DIR_KEY]) 69 | if config.NAME_KEY in meta: 70 | parts.append(meta[config.NAME_KEY]) 71 | else: 72 | parts.append(str(obj_id)) 73 | 74 | if not parts: 75 | return None 76 | 77 | return "".join(parts) 78 | 79 | 80 | @deprecation.deprecated( 81 | deprecated_in="0.8.0", 82 | removed_in="0.9.0", 83 | current_version=version.__version__, 84 | details="No longer use metadata to store filesystem location", 85 | ) 86 | def get_obj_name(obj_id, meta: dict) -> str: 87 | """Get the name of an object. This will be the name that is used to represent this object on 88 | the virtual filesystem and is stored in the metadata""" 89 | return meta.get(config.NAME_KEY, str(obj_id)) 90 | -------------------------------------------------------------------------------- /src/pyos/exceptions.py: -------------------------------------------------------------------------------- 1 | __all__ = "PyOSError", "IsADirectoryError", "NotADirectoryError", "FileNotFoundError" 2 | 3 | # pylint: disable=redefined-builtin 4 | 5 | 6 | class PyOSError(Exception): 7 | """Raised when there is a PyOS exception""" 8 | 9 | 10 | class IsADirectoryError(PyOSError): # pylint: disable=redefined-builtin 11 | """Raise when a file is expected but a directory was passed""" 12 | 13 | 14 | class NotADirectoryError(PyOSError): # pylint: disable=redefined-builtin 15 | """Raise when a directory is expected but a file was passed""" 16 | 17 | 18 | class FileNotFoundError(PyOSError): # pylint: disable=redefined-builtin 19 | """A file was not found""" 20 | 21 | 22 | class FileExistsError(PyOSError): # pylint: disable=redefined-builtin 23 | """Raised when trying to create a file or directory which already exists""" 24 | 25 | def __init__(self, *args, existing_entry_id=None, path=None): 26 | super().__init__(*args) 27 | self.entry_id = existing_entry_id 28 | self.path = path 29 | -------------------------------------------------------------------------------- /src/pyos/fmt.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import inspect 3 | import typing 4 | from typing import Mapping 5 | 6 | 7 | def pretty_type_string(obj_type: typing.Type) -> str: 8 | """Given an type will return a simple type string""" 9 | type_str = str(obj_type) 10 | if type_str.startswith(" 2: 15 | return f"{parts[0]}:{parts[-1]}" 16 | # return '..'.join([parts[0], parts[-1]]) 17 | 18 | return type_str 19 | 20 | 21 | def obj_dict(obj): 22 | """Given an object return a dictionary that represents it""" 23 | if isinstance(obj, Mapping): # pylint: disable=isinstance-second-argument-not-valid-type 24 | return dict(obj) 25 | 26 | repr_dict = {} 27 | for name in dir(obj): 28 | if not name.startswith("_"): 29 | try: 30 | value = getattr(obj, name) 31 | if not inspect.isroutine(value): 32 | repr_dict[name] = value 33 | except Exception as exc: # pylint: disable=broad-except 34 | repr_dict[name] = f"{type(exc).__name__}: {exc}" 35 | 36 | return repr_dict 37 | 38 | 39 | def pretty_datetime(value) -> str: 40 | if isinstance(value, type): 41 | return pretty_type_string(value) 42 | if isinstance(value, datetime.datetime): 43 | if value.year == datetime.datetime.now().year: 44 | fmt = "%b %d %H:%M" 45 | else: 46 | fmt = "%b %d %Y" 47 | 48 | return value.strftime(fmt) 49 | 50 | return str(value) 51 | -------------------------------------------------------------------------------- /src/pyos/fs/__init__.py: -------------------------------------------------------------------------------- 1 | """Higher-level classes and functions for browsing the database as a filesystem""" 2 | 3 | from . import nodes, utils 4 | from .nodes import * 5 | from .utils import * 6 | 7 | __all__ = nodes.__all__ + utils.__all__ 8 | -------------------------------------------------------------------------------- /src/pyos/fs/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import mincepy 4 | import mincepy.frontend 5 | 6 | from . import nodes 7 | from .. import db, os 8 | from .. import results as results_ 9 | 10 | __all__ = ("find",) 11 | 12 | 13 | # pylint: disable=redefined-builtin 14 | def find( 15 | *starting_point, 16 | meta: Optional[dict] = None, 17 | state: Optional[dict] = None, 18 | type=None, 19 | obj_filter: mincepy.Expr = None, 20 | mindepth=0, 21 | maxdepth=-1, 22 | historian: Optional[mincepy.Historian] = None, 23 | ) -> nodes.FrozenResultsNode: 24 | """ 25 | Find objects matching the given criteria 26 | 27 | :param starting_point: the starting points for the search, if not supplied defaults to '/' 28 | :param meta: filter criteria for the metadata 29 | :param state: filter criteria for the object's saved state 30 | :param type: restrict the search to this type (can be a tuple of types) 31 | :param mindepth: the minimum depth from the starting point(s) to search in 32 | :param maxdepth: the maximum depth from the starting point(s) to search in 33 | :param historian: the Historian to use 34 | :return: results node 35 | """ 36 | if not starting_point: 37 | starting_point = (os.getcwd(),) 38 | 39 | meta = (meta or {}).copy() 40 | state = (state or {}).copy() 41 | 42 | expr = mincepy.Empty() 43 | if obj_filter: 44 | expr = obj_filter 45 | if state: 46 | expr &= mincepy.build_expr(mincepy.frontend.flatten_filter("state", state)[0]) 47 | 48 | def yield_results(): 49 | for path in starting_point: 50 | for matching in _iter_matching( 51 | path, 52 | obj_filter=expr, 53 | obj_type=type, 54 | meta_filter=meta, 55 | mindepth=mindepth, 56 | maxdepth=maxdepth, 57 | historian=historian, 58 | ): 59 | descendent_path = db.fs.Entry.path(matching) 60 | path = os.withdb.from_fs_path(descendent_path) 61 | yield nodes.ObjectNode(db.fs.Entry.id(matching), path) 62 | 63 | results = nodes.FrozenResultsNode(results_.CachingResults(yield_results())) 64 | results.show("relpath", mode=nodes.SINGLE_COLUMN_VIEW) 65 | return results 66 | 67 | 68 | def _iter_matching( 69 | path, 70 | obj_filter, 71 | obj_type, 72 | meta_filter, 73 | mindepth: int, 74 | maxdepth: int, 75 | historian: mincepy.Historian, 76 | ): 77 | # Find the filesystem entry we're looking for 78 | start_fs_path = os.withdb.to_fs_path(path) 79 | entry = db.fs.find_entry(start_fs_path, historian=historian) 80 | entry_id = db.fs.Entry.id(entry) 81 | 82 | if db.fs.Entry.is_obj(entry): 83 | if mindepth == 0: 84 | entry[db.fs.Schema.PATH] = path 85 | yield entry 86 | else: 87 | yield from _iter_descendents( 88 | entry_id, 89 | start_fs_path, 90 | obj_filter=obj_filter, 91 | obj_type=obj_type, 92 | meta_filter=meta_filter, 93 | mindepth=mindepth, 94 | maxdepth=maxdepth, 95 | historian=historian, 96 | ) 97 | 98 | 99 | def _iter_descendents( 100 | dir_fsid, 101 | start_path, 102 | obj_filter=None, 103 | obj_type=None, 104 | meta_filter=None, 105 | mindepth=0, 106 | maxdepth=-1, 107 | historian: mincepy.Historian = None, 108 | ): 109 | # Find the filesystem entry we're looking for 110 | historian = historian or db.get_historian() 111 | 112 | objects_iter = db.fs.iter_descendents( 113 | dir_fsid, 114 | type=db.fs.Schema.TYPE_OBJ, # Only interested in objects 115 | obj_filter=obj_filter, 116 | obj_type=obj_type, 117 | meta_filter=meta_filter, 118 | max_depth=maxdepth if maxdepth != -1 else None, 119 | path=start_path, 120 | historian=historian, 121 | ) 122 | 123 | for descendent in objects_iter: 124 | depth = db.fs.Entry.depth(descendent) 125 | if depth >= mindepth: 126 | yield descendent 127 | -------------------------------------------------------------------------------- /src/pyos/glob.py: -------------------------------------------------------------------------------- 1 | """Filename globbing utility.""" 2 | 3 | import contextlib 4 | import fnmatch 5 | import itertools 6 | import re 7 | 8 | from . import exceptions, os 9 | 10 | __all__ = ["glob", "iglob", "escape"] 11 | 12 | 13 | def glob(pathname, *, root_dir=None, recursive=False, include_hidden=False): 14 | """Return a list of paths matching a pathname pattern. 15 | 16 | The pattern may contain simple shell-style wildcards a la 17 | fnmatch. Unlike fnmatch, filenames starting with a 18 | dot are special cases that are not matched by '*' and '?' 19 | patterns by default. 20 | 21 | If `include_hidden` is true, the patterns '*', '?', '**' will match hidden 22 | directories. 23 | 24 | If `recursive` is true, the pattern '**' will match any files and 25 | zero or more directories and subdirectories. 26 | """ 27 | return list( 28 | iglob( 29 | pathname, 30 | root_dir=root_dir, 31 | recursive=recursive, 32 | include_hidden=include_hidden, 33 | ) 34 | ) 35 | 36 | 37 | def iglob(pathname, *, root_dir=None, recursive=False, include_hidden=False): 38 | """Return an iterator which yields the paths matching a pathname pattern. 39 | 40 | The pattern may contain simple shell-style wildcards a la 41 | fnmatch. However, unlike fnmatch, filenames starting with a 42 | dot are special cases that are not matched by '*' and '?' 43 | patterns. 44 | 45 | If recursive is true, the pattern '**' will match any files and 46 | zero or more directories and subdirectories. 47 | """ 48 | if root_dir is not None: 49 | root_dir = os.fspath(root_dir) 50 | else: 51 | root_dir = pathname[:0] 52 | itr = _iglob(pathname, root_dir, recursive, False, include_hidden=include_hidden) 53 | if not pathname or recursive and _isrecursive(pathname[:2]): 54 | try: 55 | string = next(itr) # skip empty string 56 | if string: 57 | itr = itertools.chain((string,), itr) 58 | except StopIteration: 59 | pass 60 | return itr 61 | 62 | 63 | def _iglob(pathname, root_dir, recursive, dironly, include_hidden=False): 64 | # pylint: disable=too-many-branches 65 | 66 | dirname, basename = os.path.split(pathname) 67 | if not has_magic(pathname): 68 | assert not dironly 69 | if basename: 70 | if _lexists(_join(root_dir, pathname)): 71 | yield pathname 72 | else: 73 | # Patterns ending with a slash should match only directories 74 | if _isdir(_join(root_dir, dirname)): 75 | yield pathname 76 | return 77 | if not dirname: 78 | if recursive and _isrecursive(basename): 79 | yield from _glob2(root_dir, basename, dironly, include_hidden=include_hidden) 80 | else: 81 | yield from _glob1(root_dir, basename, dironly, include_hidden=include_hidden) 82 | return 83 | # `os.path.split()` returns the argument itself as a dirname if it is a 84 | # drive or UNC path. Prevent an infinite recursion if a drive or UNC path 85 | # contains magic characters (i.e. r'\\?\C:'). 86 | if dirname != pathname and has_magic(dirname): 87 | dirs = _iglob(dirname, root_dir, recursive, True, include_hidden=include_hidden) 88 | else: 89 | dirs = [dirname] 90 | if has_magic(basename): 91 | if recursive and _isrecursive(basename): 92 | glob_in_dir = _glob2 93 | else: 94 | glob_in_dir = _glob1 95 | else: 96 | glob_in_dir = _glob0 97 | for dirname in dirs: 98 | for name in glob_in_dir( 99 | _join(root_dir, dirname), basename, dironly, include_hidden=include_hidden 100 | ): 101 | yield os.path.join(dirname, name) 102 | 103 | 104 | # These 2 helper functions non-recursively glob inside a literal directory. 105 | # They return a list of basenames. _glob1 accepts a pattern while _glob0 106 | # takes a literal basename (so it only has to check for its existence). 107 | 108 | 109 | def _glob1(dirname, pattern, dironly, include_hidden=False): 110 | names = _listdir(dirname, dironly) 111 | if include_hidden or not _ishidden(pattern): 112 | names = (x for x in names if include_hidden or not _ishidden(x)) 113 | return fnmatch.filter(names, pattern) 114 | 115 | 116 | def _glob0(dirname, basename, dironly, include_hidden=False): # pylint: disable=unused-argument 117 | if basename: 118 | if _lexists(_join(dirname, basename)): 119 | return [basename] 120 | else: 121 | # `os.path.split()` returns an empty basename for paths ending with a 122 | # directory separator. 'q*x/' should match only directories. 123 | if _isdir(dirname): 124 | return [basename] 125 | return [] 126 | 127 | 128 | # Following functions are not public but can be used by third-party code. 129 | 130 | 131 | def glob0(dirname, pattern): 132 | return _glob0(dirname, pattern, None, False) 133 | 134 | 135 | def glob1(dirname, pattern): 136 | return _glob1(dirname, pattern, None, False) 137 | 138 | 139 | # This helper function recursively yields relative pathnames inside a literal 140 | # directory. 141 | 142 | 143 | def _glob2(dirname, pattern, dironly, include_hidden=False): 144 | assert _isrecursive(pattern) 145 | yield pattern[:0] 146 | yield from _rlistdir(dirname, dironly, include_hidden=include_hidden) 147 | 148 | 149 | # If dironly is false, yields all file names inside a directory. 150 | # If dironly is true, yields only directory names. 151 | def _iterdir(dirname, dironly: bool): 152 | try: 153 | if dirname: 154 | arg = dirname 155 | elif isinstance(dirname, bytes): 156 | arg = bytes(os.curdir, "ASCII") 157 | else: 158 | arg = os.curdir 159 | with os.scandir(arg) as it: # pylint: disable=invalid-name 160 | for entry in it: 161 | try: 162 | if not dironly or entry.is_dir(): 163 | yield entry.name 164 | except exceptions.PyOSError: 165 | pass 166 | except exceptions.PyOSError: 167 | return 168 | 169 | 170 | def _listdir(dirname, dironly): 171 | with contextlib.closing(_iterdir(dirname, dironly)) as it: # pylint: disable=invalid-name 172 | return list(it) 173 | 174 | 175 | # Recursively yields relative pathnames inside a literal directory. 176 | def _rlistdir(dirname, dironly, include_hidden=False): 177 | names = _listdir(dirname, dironly) 178 | for name in names: 179 | if include_hidden or not _ishidden(name): 180 | yield name 181 | path = _join(dirname, name) if dirname else name 182 | for pname in _rlistdir(path, dironly, include_hidden=include_hidden): 183 | yield _join(name, pname) 184 | 185 | 186 | _lexists = os.path.lexists 187 | 188 | _isdir = os.path.isdir 189 | 190 | 191 | def _join(dirname, basename): 192 | # It is common if dirname or basename is empty 193 | if not dirname or not basename: 194 | return dirname or basename 195 | return os.path.join(dirname, basename) 196 | 197 | 198 | magic_check = re.compile("([*?[])") 199 | magic_check_bytes = re.compile(b"([*?[])") 200 | 201 | 202 | def has_magic(string): 203 | if isinstance(string, bytes): 204 | match = magic_check_bytes.search(string) 205 | else: 206 | match = magic_check.search(string) 207 | return match is not None 208 | 209 | 210 | def _ishidden(path): 211 | return path[0] in (".", b"."[0]) 212 | 213 | 214 | def _isrecursive(pattern): 215 | if isinstance(pattern, bytes): 216 | return pattern == b"**" 217 | 218 | return pattern == "**" 219 | 220 | 221 | def escape(pathname): 222 | """Escape all special characters.""" 223 | # Escaping is done by wrapping any of "*?[" between square brackets. 224 | # Metacharacters do not work in the drive part and shouldn't be escaped. 225 | drive, pathname = os.path.splitdrive(pathname) 226 | if isinstance(pathname, bytes): 227 | pathname = magic_check_bytes.sub(rb"[\1]", pathname) 228 | else: 229 | pathname = magic_check.sub(r"[\1]", pathname) 230 | return drive + pathname 231 | -------------------------------------------------------------------------------- /src/pyos/lib.py: -------------------------------------------------------------------------------- 1 | import mincepy 2 | 3 | from . import db 4 | 5 | __all__ = "init", "reset" 6 | 7 | 8 | def connect(uri: str = "") -> mincepy.Historian: 9 | return db.connect(uri) 10 | 11 | 12 | def init() -> mincepy.Historian: 13 | return db.init() 14 | 15 | 16 | def reset(): 17 | db.reset() 18 | -------------------------------------------------------------------------------- /src/pyos/os/__init__.py: -------------------------------------------------------------------------------- 1 | """Classes and functions related to pyos' virtual filesystem. This module is modelled on python's 2 | os module and users familiar that will find many of the methods familiar.""" 3 | 4 | # pylint: disable=cyclic-import 5 | 6 | from . import path, types 7 | from .nodb import DirEntry, curdir, fsdecode, fsencode, fspath, pardir, sep 8 | from .types import * 9 | from .withdb import chdir, getcwd, isdir, listdir, makedirs, remove, rename, scandir, unlink 10 | 11 | _ADDITIONAL = ( 12 | "path", 13 | "getcwd", 14 | "chdir", 15 | "fspath", 16 | "listdir", 17 | "remove", 18 | "sep", 19 | "unlink", 20 | "curdir", 21 | "pardir", 22 | "rename", 23 | "scandir", 24 | "DirEntry", 25 | "isdir", 26 | "makedirs", 27 | "fsencode", 28 | "fsdecode", 29 | ) 30 | 31 | __all__ = types.__all__ + _ADDITIONAL # pylint: disable=undefined-variable 32 | -------------------------------------------------------------------------------- /src/pyos/os/nodb.py: -------------------------------------------------------------------------------- 1 | """Methods and classes that do not need interaction with the database and are therefore safe to use from modules 2 | that need access to pyos.db without causing a cyclic dependency.""" 3 | 4 | import posixpath 5 | from typing import Union 6 | 7 | from . import types 8 | 9 | # pylint: disable=invalid-name 10 | 11 | sep = "/" # The path separator pylint: disable=invalid-name 12 | curdir = "." # Used to refer to the current directory 13 | pardir = ".." # Used to refer to the parent directory 14 | 15 | _ENCODING = "UTF-8" 16 | 17 | isabs = posixpath.isabs 18 | join = posixpath.join 19 | normpath = posixpath.normpath 20 | basename = posixpath.basename 21 | dirname = posixpath.dirname 22 | split = posixpath.split 23 | commonprefix = posixpath.commonprefix 24 | 25 | 26 | def check_arg_types(funcname, *args): 27 | for arg in args: 28 | if not isinstance(arg, str): 29 | raise TypeError( 30 | f"{funcname}() argument must be str, bytes, or " 31 | "os.PathLike object, not {arg.__class__.__name__}" 32 | ) from None 33 | 34 | 35 | # region pyos.os 36 | 37 | 38 | class DirEntry: 39 | 40 | def __init__(self, obj_name: str, obj_path: str, is_file: bool): 41 | self._name = obj_name 42 | self._path = obj_path 43 | self._is_file = is_file 44 | 45 | @property 46 | def name(self) -> str: 47 | return self._name 48 | 49 | @property 50 | def path(self) -> str: 51 | return self._path 52 | 53 | def is_dir(self): 54 | return not self.is_file() 55 | 56 | def is_file(self): 57 | return self._is_file 58 | 59 | 60 | def fspath(file_path: types.PathSpec) -> Union[str, bytes]: 61 | """Return the pyOS representation of the path. 62 | 63 | If str is passed in, it is returned unchanged. 64 | Otherwise, __fspath__() is called and its value is returned as long as it is a str. In all other 65 | cases, TypeError is raised.""" 66 | if isinstance(file_path, (str, bytes)): 67 | return file_path 68 | 69 | # Work from the object's type to match method resolution of other magic methods. 70 | path_type = type(file_path) 71 | try: 72 | path_repr = path_type.__fspath__(file_path) # pylint: disable=unnecessary-dunder-call 73 | except AttributeError: 74 | if hasattr(path_type, "__fspath__"): 75 | raise 76 | 77 | raise TypeError("expected str or os.PathLike object, not " + path_type.__name__) from None 78 | 79 | if isinstance(path_repr, (str, bytes)): 80 | return path_repr 81 | 82 | raise TypeError( 83 | f"expected {path_type.__name__}.__fspath__() to return str, not {type(path_repr).__name__}" 84 | ) 85 | 86 | 87 | def fsencode(filename: types.PathSpec) -> bytes: 88 | filename = fspath(filename) 89 | if isinstance(filename, str): 90 | return filename.encode(_ENCODING) 91 | # else: 92 | return filename 93 | 94 | 95 | def fsdecode(filename: types.PathSpec) -> str: 96 | filename = fspath(filename) # Does type-checking of `filename`. 97 | if isinstance(filename, bytes): 98 | return filename.decode(_ENCODING) 99 | # else: 100 | return filename 101 | 102 | 103 | splitdrive = posixpath.splitdrive 104 | 105 | # endregion 106 | -------------------------------------------------------------------------------- /src/pyos/os/path.py: -------------------------------------------------------------------------------- 1 | """Pyos versions os python's os.path methods""" 2 | 3 | # from . pathlib import * 4 | from .nodb import ( 5 | basename, 6 | commonprefix, 7 | curdir, 8 | dirname, 9 | isabs, 10 | join, 11 | normpath, 12 | pardir, 13 | sep, 14 | split, 15 | splitdrive, 16 | ) 17 | from .withdb import abspath, exists, expanduser, isdir, isfile, lexists, relpath 18 | 19 | __all__ = ( 20 | "sep", 21 | "curdir", 22 | "pardir", 23 | "isabs", 24 | "abspath", 25 | "join", 26 | "normpath", 27 | "basename", 28 | "dirname", 29 | "exists", 30 | "lexists", 31 | "expanduser", 32 | "split", 33 | "relpath", 34 | "commonprefix", 35 | "isdir", 36 | "isfile", 37 | "splitdrive", 38 | ) 39 | -------------------------------------------------------------------------------- /src/pyos/os/types.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Union 3 | 4 | __all__ = "PathLike", "PathSpec" 5 | 6 | 7 | class PathLike(metaclass=abc.ABCMeta): 8 | """An abstract base class for objects representing a pyos path, e.g. pyos.pathlib.PurePath.""" 9 | 10 | @abc.abstractmethod 11 | def __fspath__(self) -> str: 12 | """Return the pyos path representation of the object.""" 13 | 14 | 15 | PathSpec = Union[str, PathLike] 16 | -------------------------------------------------------------------------------- /src/pyos/pathlib.py: -------------------------------------------------------------------------------- 1 | """Module that deals with directories and paths""" 2 | 3 | import contextlib 4 | import pathlib 5 | from typing import Iterable, Sequence, Union 6 | import uuid 7 | 8 | import deprecation 9 | import mincepy 10 | 11 | from . import exceptions, fs, os, version 12 | 13 | __all__ = ("Path", "PurePath") 14 | 15 | 16 | class PurePath(os.PathLike): 17 | """A path in PyOS. Where possible the convention follows that of a PurePosixPath in pathlib. 18 | 19 | This is a 'pure' path in a similar sense to pathlib.PurePath in that it does not interact with 20 | the database at all. 21 | """ 22 | 23 | __slots__ = ("_path",) 24 | 25 | def __init__(self, path: os.PathSpec = "."): 26 | super().__init__() 27 | self._path = os.path.normpath(path) 28 | 29 | @property 30 | def parts(self) -> tuple[str]: 31 | return pathlib.PurePosixPath(self._path).parts 32 | 33 | @property 34 | def parent(self): 35 | return self.__class__(os.path.dirname(self._path)) 36 | 37 | @property 38 | def parents(self) -> Sequence: 39 | """Get a sequence of paths that are the parents of this path""" 40 | current = self.parent 41 | parents = [current] 42 | while current.parent != parents[-1]: 43 | parents.append(current.parent) 44 | current = parents[-1] 45 | return parents 46 | 47 | @property 48 | def name(self): 49 | return os.nodb.basename(self) 50 | 51 | @property 52 | def root(self) -> str: 53 | return "/" 54 | 55 | def __fspath__(self): 56 | return self._path 57 | 58 | def __hash__(self): 59 | return self._path.__hash__() 60 | 61 | def __eq__(self, other): 62 | if not isinstance(other, PurePath): 63 | return False 64 | 65 | return self._path == other._path 66 | 67 | def __repr__(self): 68 | return f"{self.__class__.__name__}('{self._path}')" 69 | 70 | def __str__(self): 71 | return self._path 72 | 73 | def __truediv__(self, other): 74 | if not isinstance(other, PurePath): 75 | if not isinstance(other, str): 76 | raise TypeError(f"Cannot join a path with a '{type(other)}'") 77 | 78 | if os.path.isabs(other): 79 | return self.__class__(other) 80 | 81 | return self.__class__(os.path.join(str(self), str(other))) 82 | 83 | def is_file_path(self) -> bool: 84 | """Returns True if this path specifies a file i.e. does not end with a training '/'""" 85 | return not self.is_dir_path() 86 | 87 | def is_dir_path(self) -> bool: 88 | """Returns True if this path specified a directory i.e. ends with a trailing '/'""" 89 | return self._path.endswith(os.sep) 90 | 91 | def is_absolute(self) -> bool: 92 | return os.path.isabs(self._path) 93 | 94 | @deprecation.deprecated( 95 | deprecated_in="0.8.0", 96 | removed_in="0.9.0", 97 | current_version=version.__version__, 98 | details="We no longer make a distinction between a file path and dir path", 99 | ) 100 | def to_dir(self) -> "PurePath": 101 | """If this path is a file path then a directory with the same name will be 102 | returned. Otherwise, this path will be returned""" 103 | return self 104 | 105 | @deprecation.deprecated( 106 | deprecated_in="0.8.0", 107 | removed_in="0.9.0", 108 | current_version=version.__version__, 109 | details="We no longer make a distinction between a file path and dir path", 110 | ) 111 | def to_file(self): 112 | """If this path is a directory then a file path with the same name will be returned. 113 | Otherwise, this path will be returned""" 114 | return self 115 | 116 | def joinpath(self, *other): 117 | return self.__class__(os.path.join(self, *other)) 118 | 119 | 120 | class Path(PurePath, mincepy.SimpleSavable): 121 | """A path in Pyos. Where possible the convention follows that of a PurePosixPath in pathlib. 122 | The one major exception is that folders are represented with an explicit trailing '/' and 123 | anything else is a file.""" 124 | 125 | ATTRS = ("_path",) 126 | TYPE_ID = uuid.UUID("5eac541e-848c-43aa-818d-50cf8a2b8507") 127 | 128 | def is_file(self) -> bool: 129 | """Returns True if this path is a file path and exists""" 130 | return os.path.isfile(self) 131 | 132 | def is_dir(self) -> bool: 133 | """Returns True if this path is a directory path and exists""" 134 | return os.isdir(self) 135 | 136 | def exists(self) -> bool: 137 | """Test whether a path point exists""" 138 | return os.path.exists(self) 139 | 140 | def unlink(self, missing_ok=False): 141 | if not self.exists(): 142 | if missing_ok: 143 | return 144 | raise exceptions.FileNotFoundError(f"Can't delete '{self}', it does not exist") 145 | 146 | os.unlink(self) 147 | 148 | def iterdir(self) -> Iterable["Path"]: 149 | """ 150 | When the path points to a directory, yield path objects of the directory contents: 151 | 152 | >>> 153 | >>> p = Path('docs') 154 | >>> for child in p.iterdir(): child 155 | ... 156 | Path('docs/conf') 157 | Path('docs/readme') 158 | Path('docs/index.rst') 159 | Path('docs/_build') 160 | Path('docs/_static') 161 | """ 162 | if not self.is_dir(): 163 | raise exceptions.NotADirectoryError(f"Not a directory: {os.path.relpath(self)}") 164 | if not self.exists(): 165 | raise exceptions.FileNotFoundError(f"No such directory: '{os.path.relpath(self)}'") 166 | 167 | node = fs.DirectoryNode(self) 168 | node.expand(1) 169 | for child in node.children: 170 | yield child.abspath 171 | 172 | def rename(self, target: os.PathSpec) -> "Path": 173 | target = Path(target).resolve() 174 | os.rename(self._path, target) 175 | return target 176 | 177 | def resolve(self) -> Union[PurePath, "Path"]: 178 | """Make the path absolute eliminating any . and .. that occur in the path""" 179 | path = os.path.abspath(os.path.relpath(self)) 180 | if path == self._path: 181 | # Avoid constructing a new one as this is currently a little slow 182 | return self 183 | return self.__class__(path) 184 | 185 | 186 | @contextlib.contextmanager 187 | def working_path(path: os.PathSpec): 188 | """Context manager that changes the current working directory for the duration of the context, 189 | returning to the previous directory on exiting""" 190 | orig = os.getcwd() 191 | os.chdir(path) 192 | try: 193 | yield path 194 | finally: 195 | os.chdir(orig) 196 | -------------------------------------------------------------------------------- /src/pyos/provides.py: -------------------------------------------------------------------------------- 1 | """This module is for entry points that extend PyOS' functionality""" 2 | 3 | import pyos 4 | 5 | 6 | def get_types(): 7 | """Provide the list of types and helpers we define for mincepy""" 8 | return (pyos.pathlib.Path,) 9 | -------------------------------------------------------------------------------- /src/pyos/psh/__init__.py: -------------------------------------------------------------------------------- 1 | from . import cmds, completion, flags, shell 2 | from .cmds import * 3 | from .flags import * 4 | from .shell import * 5 | 6 | cwd = completion.PathCompletion(".") # pylint: disable=invalid-name 7 | 8 | _ADDITIONAL = "completion", "cwd" 9 | 10 | __all__ = cmds.__all__ + flags.__all__ + shell.__all__ + _ADDITIONAL 11 | -------------------------------------------------------------------------------- /src/pyos/psh/argparse_types.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import collections 3 | import functools 4 | from typing import Callable 5 | 6 | from mincepy import qops 7 | from pytray import obj_load 8 | 9 | __all__ = ("type_string",) 10 | 11 | 12 | def type_string(value: str) -> type: 13 | """Helper to get types as an argparse parameter""" 14 | return obj_load.load_obj(value) 15 | 16 | 17 | def parse_query(value: str) -> dict: 18 | for oper, func in OPERATORS.items(): 19 | if oper in value: 20 | left, right = value.split(oper, maxsplit=1) 21 | 22 | # Try getting python types from the right hand side (e.g. None, False, 3, 2.6, etc) 23 | try: 24 | right = ast.literal_eval(right.rstrip()) 25 | except ValueError: 26 | pass 27 | 28 | return func(left.strip(), right) 29 | 30 | raise ValueError(f"Unknown condition: {value}") 31 | 32 | 33 | def apply(operator: Callable, name: str, value): 34 | return {name: operator(value)} 35 | 36 | 37 | def _new(operator: Callable): 38 | return functools.partial(apply, operator) 39 | 40 | 41 | OPERATORS = collections.OrderedDict( 42 | [ 43 | ("!=", _new(qops.ne_)), 44 | ("=", _new(lambda x: x)), 45 | (">", _new(qops.gt_)), 46 | ("<", _new(qops.lt_)), 47 | ] 48 | ) 49 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/__init__.py: -------------------------------------------------------------------------------- 1 | """Module for built-in commands""" 2 | 3 | # Commands: 4 | from . import connect 5 | from .cat import cat 6 | from .cd import cd 7 | from .find import find 8 | from .history import history 9 | from .load import load 10 | from .locate import locate 11 | from .log import Log 12 | from .ls import ls 13 | from .meta import meta 14 | from .mkdir import mkdir 15 | from .mv import mv 16 | from .oid import oid 17 | from .pwd import pwd 18 | from .rm import rm 19 | from .rsync import rsync 20 | from .save import save 21 | from .tree import tree 22 | 23 | __all__ = ( 24 | "cat", 25 | "cd", 26 | "find", 27 | "history", 28 | "load", 29 | "ls", 30 | "locate", 31 | "meta", 32 | "mv", 33 | "mkdir", 34 | "oid", 35 | "pwd", 36 | "rm", 37 | "rsync", 38 | "save", 39 | "tree", 40 | "Log", 41 | ) 42 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/cat.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import re 4 | import sys 5 | 6 | import cmd2 7 | 8 | from . import ls 9 | from .. import completion, flags 10 | from ... import db, fs, pathlib, psh_lib, representers 11 | from ... import results as results_ 12 | 13 | _LOGGER = logging.getLogger(__name__) 14 | 15 | 16 | @psh_lib.command() 17 | def cat(*obj_or_ids, representer=None): 18 | """Convert the contents of objects into strings. 19 | A representer can optionally be passed in which should take the passed object and convert it to 20 | a string. 21 | """ 22 | if not obj_or_ids: 23 | return None 24 | 25 | hist = db.get_historian() 26 | to_cat = [] 27 | 28 | for entry in obj_or_ids: 29 | if isinstance(entry, (str, pathlib.Path, fs.BaseNode)): 30 | to_cat.extend(ls.ls(-flags.d, entry)) 31 | else: 32 | to_cat.append(entry) 33 | 34 | representer = representer or representers.get_default() 35 | 36 | def iterator(): 37 | for entry in to_cat: 38 | try: 39 | if isinstance(entry, fs.DirectoryNode): 40 | yield f"cat: {entry.abspath.name}: Is a directory" 41 | elif isinstance(entry, fs.ObjectNode): 42 | yield representer(entry.obj) 43 | elif hist.is_obj_id(entry): 44 | yield representer(hist.load(entry)) 45 | else: 46 | yield representer(entry) 47 | except Exception as exc: # pylint: disable=broad-except 48 | yield representer(exc) 49 | 50 | results = results_.CachingResults(iterator(), representer=str) 51 | 52 | if len(to_cat) == 1: 53 | return results_.ResultsString(results[0]) 54 | 55 | return results 56 | 57 | 58 | class FstringRepresenter: 59 | REGEXP = re.compile(r"{(\S*)}") 60 | 61 | def __init__(self, fstring): 62 | # Create the adapted f-string with 'obj' representing the passed object 63 | self._fstring = self.REGEXP.subn("{obj\\1}", fstring)[0] 64 | 65 | def __call__(self, obj) -> str: 66 | return self._fstring.format(obj=obj) 67 | 68 | 69 | class Cat(cmd2.CommandSet): 70 | ls_parser = argparse.ArgumentParser() 71 | ls_parser.add_argument( 72 | "-f", 73 | dest="fstring", 74 | type=str, 75 | help="optional f-string for printing attributes e.g. -f {colour} prints " 76 | "obj.colour for each object", 77 | ) 78 | ls_parser.add_argument("path", nargs="*", type=str, completer_method=completion.file_completer) 79 | 80 | @cmd2.with_argparser(ls_parser) 81 | def do_cat(self, args): 82 | if not args.path: 83 | # Read from standard in 84 | _LOGGER.debug("cat: getting input from stdin") 85 | try: 86 | args.path = [line.rstrip() for line in sys.stdin.readlines()] 87 | except Exception: 88 | _LOGGER.exception("Exception trying to readlines") 89 | raise 90 | _LOGGER.debug("cat: got input' %s' from stdin", args.path) 91 | 92 | representer = None 93 | if args.fstring: 94 | representer = FstringRepresenter(args.fstring) 95 | 96 | results = cat(*args.path, representer=representer) 97 | 98 | if isinstance(results, results_.ResultsString): 99 | print(results) 100 | else: 101 | for entry in results: 102 | print(entry) 103 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/cd.py: -------------------------------------------------------------------------------- 1 | """Change directory command""" 2 | 3 | import argparse 4 | 5 | import cmd2 6 | 7 | from .. import completion 8 | from ... import exceptions, os, psh_lib 9 | 10 | 11 | @psh_lib.command() 12 | def cd(path: os.PathSpec): # pylint: disable=invalid-name 13 | """Change the current working directory""" 14 | path = os.path.normpath(os.path.expanduser(path)) 15 | try: 16 | os.chdir(path) 17 | except exceptions.PyOSError as exc: 18 | print(exc) 19 | 20 | 21 | class Cd(cmd2.CommandSet): 22 | 23 | def __init__(self): 24 | super().__init__() 25 | self._prev_dir = "~" 26 | 27 | parser = argparse.ArgumentParser() 28 | parser.add_argument("path", nargs=1, type=str, completer_method=completion.dir_completer) 29 | 30 | @cmd2.with_argparser(parser) 31 | def do_cd(self, args): 32 | current_dir = os.getcwd() 33 | new_dir = args.path[0] if args.path[0] != "-" else self._prev_dir 34 | cd(new_dir) 35 | self._prev_dir = current_dir 36 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/connect.py: -------------------------------------------------------------------------------- 1 | """The connection commands""" 2 | 3 | import argparse 4 | import sys 5 | 6 | import cmd2 7 | import mincepy 8 | 9 | 10 | class Connnection(cmd2.CommandSet): 11 | 12 | connect_parser = argparse.ArgumentParser() 13 | connect_parser.add_argument("uri", type=str, help="the URI of the archive to connect to") 14 | 15 | @cmd2.with_argparser(connect_parser) 16 | def do_connect(self, args): 17 | """Connect to a mincepy archive""" 18 | from pyos import db 19 | 20 | try: 21 | db.connect(args.uri) 22 | print(f"Connected to {args.uri}") 23 | except mincepy.ConnectionError as exc: 24 | print(f"Error: {exc}", file=sys.stderr) 25 | 26 | def do_disconnect(self, _args): 27 | """Disconnect from the current archive""" 28 | from pyos import db 29 | 30 | db.reset() 31 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/find.py: -------------------------------------------------------------------------------- 1 | """The find command""" 2 | 3 | import argparse 4 | 5 | import cmd2 6 | from mincepy import qops 7 | 8 | from .. import argparse_types, completion 9 | from ... import fs, os, pathlib, psh_lib 10 | 11 | __all__ = ("find",) 12 | 13 | 14 | @psh_lib.command() 15 | def find( 16 | *starting_point: os.PathLike, 17 | meta: dict = None, 18 | state: dict = None, 19 | type=None, # pylint: disable=redefined-builtin 20 | mindepth=0, 21 | maxdepth=-1, 22 | ) -> fs.ResultsNode: 23 | """Find objects matching the given criteria, optionally starting from one or more paths 24 | 25 | 26 | :param starting_point: one or more paths to start the search from 27 | :param meta: a dictionary for the metadata to match 28 | :param state: a dictionary for the record state to match 29 | :param type: restrict the match to objects of this type 30 | :param mindepth: the minimum depth to search at relative to the start point(s) 31 | :param maxdepth: the maximum depth to search at relative to the start point(s) 32 | """ 33 | _options, spoints = psh_lib.opts.separate_opts(*starting_point) 34 | if not spoints: 35 | spoints = (pathlib.Path(),) 36 | 37 | return fs.find( 38 | *spoints, 39 | meta=meta, 40 | state=state, 41 | type=type, 42 | mindepth=mindepth, 43 | maxdepth=maxdepth, 44 | ) 45 | 46 | 47 | class Find(cmd2.CommandSet): 48 | parser = argparse.ArgumentParser() 49 | parser.add_argument( 50 | "-t", 51 | dest="type", 52 | type=argparse_types.type_string, 53 | help="the type to search for", 54 | ) 55 | parser.add_argument( 56 | "-s", 57 | dest="paths", 58 | action="append", 59 | type=str, 60 | default=[], 61 | completer_method=completion.dir_completer, 62 | help="starting point (path) for search", 63 | ) 64 | parser.add_argument( 65 | "-m", 66 | dest="meta", 67 | action="append", 68 | type=argparse_types.parse_query, 69 | help="a constraint to apply to the metadata", 70 | ) 71 | parser.add_argument( 72 | "--maxdepth", 73 | dest="maxdepth", 74 | type=int, 75 | default=-1, 76 | help="maximum depth to search at relative to starting point(s) " "(-1 means no maximum)", 77 | ) 78 | parser.add_argument( 79 | "--mindepth", 80 | dest="mindepth", 81 | type=int, 82 | default=0, 83 | help="minimum depth to start search at relative to starting point(s)", 84 | ) 85 | parser.add_argument( 86 | "state", 87 | type=argparse_types.parse_query, 88 | nargs="*", 89 | help="a constraint to apply to the state of the object", 90 | ) 91 | 92 | @cmd2.with_argparser(parser) 93 | def do_find(self, args): 94 | meta = None 95 | state = None 96 | if args.meta: 97 | meta = qops.and_(*args.meta) 98 | if args.state: 99 | state = qops.and_(*args.state) 100 | 101 | res = fs.find( 102 | *args.paths, 103 | type=args.type, 104 | meta=meta, 105 | state=state, 106 | mindepth=args.mindepth, 107 | maxdepth=args.maxdepth, 108 | ) 109 | 110 | res.__stream_out__(self._cmd.stdout) 111 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/history.py: -------------------------------------------------------------------------------- 1 | """The history command""" 2 | 3 | from . import cat 4 | from ... import db, psh_lib 5 | 6 | 7 | @psh_lib.command() 8 | def history(obj): 9 | hist = db.get_historian() 10 | for entry in hist.history(obj): 11 | cat.cat(entry.obj) 12 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/load.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Iterable, Union 2 | 3 | from . import ls 4 | from ... import db, psh_lib 5 | 6 | 7 | @psh_lib.command() 8 | def load(*obj_or_ids) -> Union[Iterable[Any], Any]: 9 | """Load one or more objects""" 10 | _options, args = psh_lib.separate_opts(*obj_or_ids) 11 | if not args: 12 | return None 13 | 14 | # First load any by object id directly 15 | loaded = [] 16 | rest = [] 17 | hist = db.get_historian() 18 | for entry in args: 19 | if hist.is_obj_id(entry): 20 | try: 21 | loaded.append(hist.load(entry)) 22 | except Exception as exc: # pylint: disable=broad-except 23 | loaded.append(exc) 24 | else: 25 | rest.append(entry) 26 | 27 | # The remaining arguments are passed to ls which will try to find the corresponding objects 28 | to_load = ls.ls(*rest) # pylint: disable=no-value-for-parameter 29 | for node in to_load: 30 | try: 31 | loaded.append(node.obj) 32 | except Exception as exc: # pylint: disable=broad-except 33 | loaded.append(exc) 34 | 35 | if len(to_load) == 1: 36 | return loaded[0] 37 | 38 | return loaded 39 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/locate.py: -------------------------------------------------------------------------------- 1 | """The locate command""" 2 | 3 | import argparse 4 | from typing import Optional, Sequence, Union 5 | 6 | import cmd2 7 | 8 | from ... import db, os, pathlib, psh_lib 9 | from ... import results as results_ 10 | 11 | 12 | @psh_lib.command() 13 | def locate( 14 | *obj_or_ids, 15 | ) -> Optional[Union[pathlib.Path, Sequence[pathlib.Path]]]: 16 | """Locate the directory of one or more objects""" 17 | if not obj_or_ids: 18 | return None 19 | 20 | hist = db.get_historian() 21 | obj_ids = tuple(map(hist.to_obj_id, obj_or_ids)) 22 | 23 | # Convert to abspaths 24 | def to_path(fs_path): 25 | return pathlib.Path(os.withdb.from_fs_path(fs_path)) 26 | 27 | paths = tuple(map(to_path, db.fs.get_paths(*obj_ids, historian=hist))) 28 | results = results_.CachingResults(iter(paths), representer=str) 29 | 30 | if len(obj_or_ids) == 1 and len(results) == 1: 31 | return results[0] 32 | 33 | return results 34 | 35 | 36 | class Locate(cmd2.CommandSet): 37 | parser = argparse.ArgumentParser() 38 | parser.add_argument("obj_ids", nargs="*", type=str) 39 | 40 | @cmd2.with_argparser(parser) 41 | def do_locate(self, args): 42 | print(locate(*args.obj_ids)) 43 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/log.py: -------------------------------------------------------------------------------- 1 | """Print working directory command""" 2 | 3 | import argparse 4 | import logging 5 | from typing import Union 6 | 7 | import cmd2 8 | 9 | 10 | class Log(cmd2.CommandSet): 11 | parser = argparse.ArgumentParser() 12 | parser.add_argument( 13 | "logger", help="the logger to set loglevel for, can be empty string for root" 14 | ) 15 | parser.add_argument("level", help="the log level to set") 16 | 17 | @cmd2.with_argparser(parser) 18 | def do_log(self, args): 19 | 20 | if isinstance(args.level, int): 21 | level = args.level 22 | else: 23 | level = logging.getLevelName(args.level) 24 | 25 | set_logging(args.logger, level) 26 | print(f"Log level for '{args.logger}' set to {args.level}") 27 | 28 | 29 | def set_logging(logger: str, level: Union[int, str]): 30 | log = logging.getLogger(logger) 31 | log.setLevel(level) 32 | 33 | formatter = logging.Formatter("%(levelname)s: %(message)s") 34 | 35 | handler = logging.StreamHandler() 36 | handler.setLevel(level) 37 | handler.setFormatter(formatter) 38 | log.addHandler(handler) 39 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/ls.py: -------------------------------------------------------------------------------- 1 | """List command""" 2 | 3 | import argparse 4 | import logging 5 | 6 | import cmd2 7 | 8 | from .. import completion, flags 9 | from ... import fs, pathlib, psh_lib 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | @psh_lib.command(pass_options=True) 15 | @psh_lib.flag(flags.l, help="use a long listing format") 16 | @psh_lib.flag(flags.d, help="list directories themselves, not their contents") 17 | @psh_lib.flag(flags.p, help="print the str() value of each object") 18 | @psh_lib.flag( 19 | psh_lib.Option(1), 20 | help="list one file per line. This will avoid waiting for all results to be loaded before printing", 21 | ) 22 | def ls(options, *args) -> fs.ContainerNode: # pylint: disable=invalid-name, too-many-branches 23 | """List the contents of a directory 24 | 25 | :type: restrict listing to a particular type 26 | """ 27 | rest = args 28 | parsed = psh_lib.parse_fs_entry(*rest) 29 | 30 | results = fs.ResultsNode() 31 | if rest: 32 | for entry in parsed: 33 | if isinstance(entry, Exception): 34 | raise entry 35 | 36 | try: 37 | results.append(fs.to_node(entry)) 38 | except ValueError as exc: 39 | logger.info(str(exc)) 40 | else: 41 | results.append(fs.to_node(pathlib.Path())) 42 | 43 | if not options.pop(flags.d): 44 | for entry in results: 45 | if isinstance(entry, fs.DirectoryNode): 46 | entry.expand(populate_objects=flags.l in options) 47 | 48 | if len(results) == 1 and isinstance(results[0], fs.DirectoryNode): 49 | # We just have a single directory 50 | results = results[0] 51 | 52 | if options.pop(flags.l): 53 | properties = ["loaded", "type", "version", "mtime", "name"] 54 | if options.pop(flags.p): 55 | properties.append("str") 56 | results.show(*properties, mode=fs.TABLE_VIEW) 57 | elif options.pop(1): 58 | results.show(mode=fs.SINGLE_COLUMN_VIEW) 59 | else: 60 | results.show(mode=fs.LIST_VIEW) 61 | 62 | return results 63 | 64 | 65 | class Ls(cmd2.CommandSet): 66 | parser = argparse.ArgumentParser() 67 | parser.add_argument("-l", action="store_true", help="use a long listing format") 68 | parser.add_argument( 69 | "-d", 70 | action="store_true", 71 | help="list directories themselves, not their contents", 72 | ) 73 | parser.add_argument("-p", action="store_true", help="print the str() value of each object") 74 | parser.add_argument( 75 | "-1", 76 | action="store_true", 77 | help="list one file per line. This will avoid waiting for all results to be loaded before " 78 | "printing", 79 | ) 80 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 81 | 82 | @cmd2.with_argparser(parser) 83 | def do_ls(self, args): 84 | command = ls 85 | if args.l: 86 | command = command - flags.l 87 | if args.d: 88 | command = command - flags.d 89 | if args.p: 90 | command = command - flags.p 91 | if vars(args)["1"]: 92 | command = command - psh_lib.Option(1) 93 | 94 | res = command(*args.path) 95 | res.__stream_out__(self._cmd.stdout) 96 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/meta.py: -------------------------------------------------------------------------------- 1 | """The meta command""" 2 | 3 | import argparse 4 | import logging 5 | import sys 6 | 7 | import cmd2 8 | 9 | from . import ls 10 | from .. import completion, flags 11 | from ... import db, fs, psh_lib 12 | from ... import results as results_ 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | @psh_lib.command(pass_options=True) 18 | @psh_lib.flag(flags.s, "Set the metadata") 19 | @psh_lib.flag(flags.u, "Update the metadata") 20 | def meta(options, *obj_or_ids, **updates): # pylint: disable=too-many-return-statements 21 | """Get, set or update the metadata on one or more objects""" 22 | # pylint: disable=too-many-branches 23 | if not obj_or_ids: 24 | return None 25 | 26 | hist = db.get_historian() 27 | obj_ids, rest = psh_lib.gather_obj_ids(obj_or_ids, hist) 28 | 29 | # Assume that anything left is something like a path or filesystem node 30 | if rest: 31 | to_update = ls(-flags.d, *rest) 32 | for node in to_update: 33 | if isinstance(node, fs.ObjectNode): 34 | obj_ids.append(node.obj_id) 35 | else: 36 | print(f"Can't set metadata on '{node}'") 37 | 38 | if options.pop(flags.u, False): 39 | # In 'update' mode 40 | if updates: 41 | db.lib.update_meta(*obj_ids, meta=updates) 42 | 43 | elif options.pop(flags.s, False): 44 | # In 'setting' mode 45 | if updates: 46 | db.lib.set_meta(*obj_ids, meta=updates) 47 | 48 | else: 49 | # In 'getting' mode 50 | if updates: 51 | logging.warning("Keywords supplied to meta without -s/-u flags: %s", updates) 52 | 53 | if len(obj_ids) == 1: 54 | # Special case for a single parameter 55 | metadata = db.lib.get_meta(obj_ids[0]) 56 | if metadata is None: 57 | return None 58 | return results_.ResultsDict(metadata) 59 | 60 | return results_.CachingResults(db.lib.find_meta(obj_ids=obj_ids)) 61 | 62 | return None 63 | 64 | 65 | class Meta(cmd2.CommandSet): 66 | parser = argparse.ArgumentParser() 67 | 68 | set_update = parser.add_mutually_exclusive_group() 69 | set_update.add_argument("-s", action="store_true", help="set the metadata") 70 | set_update.add_argument("-u", action="store_true", help="update the metadata") 71 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 72 | 73 | @cmd2.with_argparser(parser) 74 | def do_meta(self, args): 75 | command = meta 76 | if args.u: 77 | command = command - flags.u 78 | if args.s: 79 | command = command - flags.s 80 | 81 | if not args.path: 82 | # Read from standard in 83 | try: 84 | args.path = [line.rstrip() for line in sys.stdin.readlines()] 85 | except Exception: 86 | logger.exception("Exception trying to readlines") 87 | raise 88 | 89 | logger.debug("Writing to %s", sys.stdout) 90 | result = command(*args.path) 91 | rep = repr(result) 92 | print(rep, file=sys.stdout) 93 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/mkdir.py: -------------------------------------------------------------------------------- 1 | """Print working directory command""" 2 | 3 | import argparse 4 | 5 | import cmd2 6 | 7 | from .. import completion, flags 8 | from ... import os, psh_lib 9 | 10 | 11 | @psh_lib.command(pass_options=True) 12 | @psh_lib.flag(flags.p, "no error if existing, make parent directories as needed") 13 | def mkdir(options, directory: os.PathSpec): 14 | """make directories""" 15 | if options.pop(flags.p): 16 | os.makedirs(directory, exists_ok=True) 17 | else: 18 | os.makedirs(directory) 19 | 20 | 21 | class Mkdir(cmd2.CommandSet): 22 | __doc__ = mkdir.__doc__ 23 | 24 | parser = argparse.ArgumentParser() 25 | parser.add_argument("path", nargs=1, type=str, completer_method=completion.dir_completer) 26 | parser.add_argument( 27 | "-p", 28 | action="store_true", 29 | help="no error if existing, make parent directories as needed", 30 | ) 31 | 32 | @cmd2.with_argparser(parser) 33 | def do_mkdir(self, args): 34 | new_dir = args.path[0] 35 | 36 | command = mkdir 37 | if args.p: 38 | command = command - flags.p 39 | 40 | res = command(new_dir) 41 | if res is not None: 42 | print(res) 43 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/mv.py: -------------------------------------------------------------------------------- 1 | """The move command""" 2 | 3 | import argparse 4 | 5 | import click 6 | import cmd2 7 | 8 | from . import ls 9 | from .. import completion, flags 10 | from ... import pathlib, psh_lib 11 | 12 | 13 | @psh_lib.command(pass_options=True) 14 | @psh_lib.flag(flags.f, help="force - do not prompt before overwriting") 15 | def mv(options, *args): # pylint: disable=invalid-name 16 | """Take one or more files or directories with the final parameter being interpreted as 17 | destination 18 | 19 | mv has the following syntax, with the indicated outcomes: 20 | 21 | Files as input: 22 | mv ('a', 'b/') - move file 'a' into folder 'b/' 23 | mv ('a', 'b') - rename file a to b 24 | 25 | Folders as input: 26 | mv ('a/', 'b/') - move folder 'a/' into folder 'b/' becoming 'b/a/' 27 | mv ('a/', 'b') - rename folder 'a/' to be called 'b' having path 'b/' 28 | 29 | Multiple inputs: 30 | mv (*args, 'd') - move all supplied *args to 'd/' 31 | mv (*args, 'd/') - move all supplied *args to 'd/' 32 | 33 | For now, files will not be overwritten. 34 | """ 35 | if len(args) < 2: 36 | raise ValueError("mv: missing destination") 37 | 38 | args = list(args) 39 | dest = pathlib.Path(args.pop()) 40 | 41 | to_move = ls(-flags.d, *args) 42 | 43 | if dest.is_dir(): 44 | dest = dest.resolve() 45 | to_move.move(dest, overwrite=not options.pop(flags.n)) 46 | else: 47 | # Renaming 48 | if dest.exists(): 49 | if -flags.f in options or click.confirm(f"Overwrite '{dest}'?"): 50 | dest.unlink() 51 | else: 52 | return 53 | 54 | to_move[0].rename(dest.name) 55 | 56 | 57 | class Mv(cmd2.CommandSet): 58 | parser = argparse.ArgumentParser() 59 | parser.add_argument("-f", action="store_true", help="force - do not prompt before overwriting") 60 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 61 | 62 | @cmd2.with_argparser(parser) 63 | def do_mv(self, args): 64 | command = mv 65 | if args.f: 66 | command = command - flags.f 67 | 68 | res = command(*args.path) 69 | if res is not None: 70 | print(res) 71 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/oid.py: -------------------------------------------------------------------------------- 1 | """The object id command""" 2 | 3 | import argparse 4 | import logging 5 | import sys 6 | 7 | import cmd2 8 | 9 | from .. import completion 10 | from ... import db, psh_lib 11 | from ... import results as results_ 12 | 13 | _LOGGER = logging.getLogger(__name__) 14 | 15 | 16 | @psh_lib.command() 17 | def oid(*args): 18 | """Get the object id for one or more live objects""" 19 | if not args: 20 | return None 21 | 22 | result = results_.CachingResults(db.get_oid(*args), representer=str) 23 | 24 | if len(result) == 1: 25 | return result[0] 26 | 27 | return result 28 | 29 | 30 | class Oid(cmd2.CommandSet): 31 | parser = argparse.ArgumentParser() 32 | # parser.add_argument('objs', nargs='*', type=str, help='the objects to get ids for') 33 | parser.add_argument( 34 | "path", 35 | nargs="*", 36 | type=str, 37 | completer_method=completion.path_complete, 38 | help="the objects to get ids for", 39 | ) 40 | 41 | @cmd2.with_argparser(parser) 42 | def do_oid(self, args): 43 | if not args.path: 44 | # Read from standard in 45 | _LOGGER.debug("oid: getting input from stdin") 46 | try: 47 | args.path = [line.rstrip() for line in sys.stdin.readlines()] 48 | except Exception: 49 | _LOGGER.exception("Exception trying to readlines") 50 | raise 51 | _LOGGER.debug("oid: got input' %s' from stdin", args.path) 52 | 53 | print(oid(*args.path)) 54 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/pwd.py: -------------------------------------------------------------------------------- 1 | """Print working directory command""" 2 | 3 | import argparse 4 | 5 | import cmd2 6 | 7 | from ... import pathlib, psh_lib 8 | 9 | 10 | @psh_lib.command() 11 | def pwd() -> pathlib.Path: 12 | """Return the current working directory""" 13 | return pathlib.Path().resolve() 14 | 15 | 16 | class Pwd(cmd2.CommandSet): 17 | parser = argparse.ArgumentParser() 18 | 19 | @cmd2.with_argparser(parser) 20 | def do_pwd(self, _): 21 | print(pwd()) 22 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/rm.py: -------------------------------------------------------------------------------- 1 | """The remove command""" 2 | 3 | import argparse 4 | import copy 5 | 6 | import cmd2 7 | import tqdm 8 | 9 | from . import ls 10 | from .. import completion, flags 11 | from ... import db, fs, psh_lib 12 | 13 | 14 | def _remove_directories(nodes): 15 | filtered = fs.ResultsNode() 16 | for node in nodes: 17 | if isinstance(node, fs.DirectoryNode): 18 | print(f"rm: cannot remove '{node.abspath.name}': Is a directory") 19 | elif isinstance(node, fs.ResultsNode): 20 | filtered.extend(_remove_directories(node)) 21 | else: 22 | filtered.append(copy.copy(node)) 23 | return filtered 24 | 25 | 26 | @psh_lib.command(pass_options=True) 27 | @psh_lib.flag(flags.p, help="show progress bar") 28 | @psh_lib.flag(flags.r, help="remove directories and their contents recursively") 29 | def rm(options, *obj_or_ids): # pylint: disable=invalid-name 30 | """Remove objects""" 31 | if not obj_or_ids: 32 | return 33 | 34 | hist = db.get_historian() 35 | obj_ids, rest = psh_lib.gather_obj_ids(obj_or_ids, hist) 36 | 37 | with hist.transaction(): 38 | for obj_id in obj_ids: 39 | hist.delete(obj_id) 40 | 41 | # Assume that anything left is something like a path or filesystem node 42 | to_delete = ls(-flags.d, *rest) 43 | recursive = options.pop(flags.r) 44 | if not recursive: 45 | to_delete = _remove_directories(to_delete) 46 | 47 | if to_delete: 48 | if options.pop(flags.p): 49 | to_delete = tqdm.tqdm(to_delete, desc="rm") 50 | 51 | for node in to_delete: 52 | node.delete() 53 | 54 | 55 | class Rm(cmd2.CommandSet): 56 | parser = argparse.ArgumentParser() 57 | parser.add_argument("-p", action="store_true", help="show progress bar") 58 | parser.add_argument( 59 | "-r", 60 | action="store_true", 61 | help="remove directories and their contents recursively", 62 | ) 63 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 64 | 65 | @cmd2.with_argparser(parser) 66 | def do_rm(self, args): 67 | command = rm 68 | if args.r: 69 | command = command - flags.r 70 | if args.p: 71 | command = command - flags.p 72 | 73 | res = command(*args.path) 74 | if res is not None: 75 | print(res) 76 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/rsync.py: -------------------------------------------------------------------------------- 1 | """The move command""" 2 | 3 | import argparse 4 | import collections 5 | from typing import Callable, Optional 6 | 7 | import cmd2 8 | import mincepy 9 | import pymongo.errors 10 | import yarl 11 | 12 | from .. import completion 13 | from ... import db, fs, os, psh_lib 14 | 15 | META_UPDATE = "update" 16 | META_OVERWRITE = "overwrite" 17 | 18 | 19 | @psh_lib.command(pass_options=True) 20 | def rsync( 21 | options, *args, progress=False, history=False, meta=None 22 | ): # pylint: disable=invalid-name, unused-argument 23 | """Remote sync. 24 | 25 | Synchronise the source and destination. The src and destination can be 26 | either local or remote path. 27 | Remote paths should be specified with as a URI string containing the 28 | remote server path, database name followed by the path e.g.: 29 | 30 | mongodb://localhost/my_db/my_folder/my_sub_folder/ 31 | 32 | rsync uses similar path syntax to mv, e.g.: 33 | 34 | Files as input: 35 | rsync ('a', 'mongodb://localhost/db/b/') - copy file 'a' into folder 'b/' on localhost 36 | rsync ('a', 'mongodb://localhost/db/b') - copy file a to b on localhost 37 | 38 | Folders as input: 39 | rsync ('a/', 'mongodb://localhost/db/b/') - copy folder 'a/' into folder 'b/' on localhost 40 | becoming 'b/a/' 41 | rsync ('a/', 'mongodb://localhost/db/b') - copy folder 'a/' to be called 'b' on localhost 42 | having path 'b/' 43 | 44 | Multiple inputs: 45 | rsync (*src, 'mongodb://localhost/db/d') - copy all supplied *src paths to 'd/' on localhost 46 | rsync (*src, 'mongodb://localhost/db/d/') - same as above 47 | 48 | For now, files will not be overwritten. 49 | """ 50 | if len(args) < 2: 51 | raise ValueError("rsync: missing destination") 52 | 53 | args = list(args) 54 | dest = args.pop() 55 | 56 | src_url, src_paths = _get_sources(*args) 57 | dest_url, dest_path = _parse_location(dest) 58 | try: 59 | # 1. Set up the source 60 | if src_url: 61 | src = db.connect(src_url, use_globally=False) 62 | else: 63 | # We are the source database 64 | src = db.get_historian() 65 | 66 | # 2. Set up the destination 67 | if dest_url: 68 | dest = db.connect(dest_url, use_globally=False) 69 | else: 70 | # We are the destination database 71 | dest = db.get_historian() 72 | except pymongo.errors.OperationFailure as exc: 73 | print( 74 | f"Error trying to connect with src '{src_url} {src_paths}', and dest '{dest_url} {dest_path}'" 75 | ) 76 | print(exc) 77 | return 1 78 | else: 79 | # 3. Perform historian merge on objects 80 | def show_progress(prog, _merge_results): 81 | if progress: 82 | print(prog) 83 | 84 | result = fs.ResultsNode() 85 | for src_path in src_paths: 86 | sync_result, merged_paths = _sync_objects( 87 | src, 88 | src_path, 89 | dest, 90 | dest_path, 91 | history=history, 92 | meta=meta, 93 | progress_cb=show_progress, 94 | ) 95 | for entry in sync_result.merged: 96 | # result.append(pyos.fs.to_node(entry.obj_id, historian=dest)) 97 | result.append( 98 | fs.ObjectNode(entry.obj_id, path=merged_paths[entry.obj_id], historian=dest) 99 | ) 100 | 101 | return result 102 | 103 | 104 | def _sync_objects( 105 | src: mincepy.Historian, 106 | src_path: str, 107 | dest: mincepy.Historian, 108 | dest_path: str, 109 | history=False, 110 | meta=None, 111 | progress_cb: Callable = None, 112 | ): 113 | """Synchronise objects from a given source at the given path, to the destination at the given path""" 114 | # Get the object ids at the source path 115 | path = os.path.abspath(src_path) 116 | obj_ids = set(entry.obj_id for entry in fs.find(path, historian=src).objects) # DB HIT 117 | 118 | # Choose the collection to sync from (either history or live objects) 119 | src_collection = src.snapshots if history else src.objects 120 | 121 | sync_set = src_collection.find(mincepy.DataRecord.obj_id.in_(*obj_ids)) 122 | merged_paths = {} 123 | 124 | def batch_merged(progress, result): 125 | """Callback called when a batch is merged""" 126 | # Now we have to put the newly transferred objects into the correct path 127 | # first get the paths for each of the merged objects 128 | all_obj_ids = set(sid.obj_id for sid in result.all) 129 | 130 | # 1. Get all paths 131 | paths = dict(db.get_paths(*all_obj_ids, historian=src)) 132 | 133 | # 2. Create the new abspaths 134 | new_paths = { 135 | obj_id: os.path.abspath( 136 | os.path.join(dest_path, os.path.relpath(objpath, start=src_path)) 137 | ) 138 | for obj_id, objpath in paths.items() 139 | } 140 | 141 | # 3. Get the set of folders we need to have in place 142 | dirs = set(os.path.dirname(objpath) for objpath in new_paths.values()) 143 | 144 | # 4. Create the directories 145 | for objdir in dirs: 146 | db.fs.make_dirs(os.withdb.to_fs_path(objdir), exists_ok=True, historian=dest) 147 | 148 | # 5. Set the paths 149 | for obj_id, path in new_paths.items(): 150 | db.fs.set_obj_path(obj_id, os.withdb.to_fs_path(path), historian=dest) 151 | 152 | # 6. Copy over metadata 153 | # Dictionary to store the metadatas we need to set at the dest 154 | dest_metas = collections.defaultdict(dict) 155 | 156 | # Get all the metadata at the source 157 | src_metas = dict(src.meta.find({}, obj_id=all_obj_ids)) 158 | 159 | if meta is not None: 160 | # We're being asked to merge metadata at the source so update our dictionary 161 | dest_metas.update(src_metas) 162 | 163 | # Now update all the metadata dictionaries 164 | if dest_metas: 165 | if meta == META_UPDATE: 166 | dest.meta.update_many(dest_metas) 167 | else: 168 | dest.meta.set_many(dest_metas) 169 | 170 | if progress_cb is not None: 171 | progress_cb(progress, result) 172 | 173 | merged_paths.update(new_paths) 174 | 175 | return ( 176 | dest.merge(sync_set, progress_callback=batch_merged, batch_size=256), 177 | merged_paths, 178 | ) 179 | 180 | 181 | def _get_sources(*src) -> tuple[Optional[str], list[str]]: 182 | paths = [] 183 | url = None 184 | for entry in src: 185 | this_url, this_path = _parse_location(entry) 186 | if this_url: 187 | if url is None: 188 | url = this_url 189 | else: 190 | if url != this_url: 191 | raise ValueError( 192 | f"Cannot have two different remote sources, got {url} and {this_url}" 193 | ) 194 | else: 195 | paths.append(this_path) 196 | 197 | return url, paths 198 | 199 | 200 | def _parse_location(location: str) -> tuple[Optional[str], str]: 201 | url = yarl.URL(location) 202 | if not url.scheme: 203 | return None, location 204 | 205 | # Skip the first entry as it is empty after splitting, and the second which is the database name 206 | path_parts = url.path.split("/") 207 | url = url.with_path("".join(path_parts[:2])).with_query(url.query) 208 | fs_path = "/" + "/".join(path_parts[2:]) # The filesystem path 209 | 210 | # Get rid of filesystem path part 211 | return str(url), fs_path 212 | 213 | 214 | class Rsync(cmd2.CommandSet): 215 | parser = argparse.ArgumentParser() 216 | parser.add_argument( 217 | "--history", 218 | action="store_true", 219 | default=False, 220 | help="synchronise object history as well as current versions", 221 | ) 222 | parser.add_argument( 223 | "--progress", 224 | action="store_true", 225 | default=False, 226 | help="show progress during transfer", 227 | ) 228 | parser.add_argument( 229 | "--meta", 230 | choices=[META_UPDATE, META_OVERWRITE], 231 | help="""Synchronise metadata as well as objects. 232 | 233 | Options are: 234 | update - perform a dictionary update with any existing metadata 235 | overwrite - replace any existing metadata with that from SRC 236 | """, 237 | ) 238 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 239 | 240 | @cmd2.with_argparser(parser) 241 | def do_rsync(self, args): 242 | command = rsync 243 | progress = args.progress 244 | history = args.history 245 | meta = args.meta 246 | try: 247 | print(command(*args.path, progress=progress, history=history, meta=meta)) 248 | except ValueError as exc: 249 | self._cmd.perror(str(exc)) 250 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/save.py: -------------------------------------------------------------------------------- 1 | """The save command""" 2 | 3 | from .. import flags 4 | from ... import db, pathlib, psh_lib 5 | 6 | 7 | @psh_lib.command(pass_options=True) 8 | @psh_lib.flag(flags.f, help="Force - overwrite files with the same name") 9 | def save(options, *args): 10 | """Save one or more objects""" 11 | objs = args 12 | 13 | if len(objs) > 1 and isinstance(objs[-1], (str, pathlib.PurePath)): 14 | # Extract the destination 15 | dest = pathlib.Path(objs[-1]).resolve() 16 | objs = objs[:-1] 17 | 18 | save_args = [(obj, dest) for obj in objs] 19 | else: 20 | save_args = [(obj, None) for obj in objs] 21 | 22 | saved = db.save_many(save_args, overwrite=flags.f in options) 23 | 24 | if len(objs) == 1: 25 | return saved[0] 26 | 27 | return saved 28 | -------------------------------------------------------------------------------- /src/pyos/psh/cmds/tree.py: -------------------------------------------------------------------------------- 1 | """The tree command""" 2 | 3 | import argparse 4 | 5 | import cmd2 6 | 7 | from . import ls 8 | from .. import completion, flags 9 | from ... import fs, psh_lib 10 | 11 | 12 | @psh_lib.command(pass_options=True) 13 | @psh_lib.option(flags.L, help="max display depth of the directory tree") 14 | def tree(options, *paths): 15 | """Get a tree representation of the given paths""" 16 | to_tree = ls(-flags.d, *paths) 17 | level = options.pop(flags.L, -1) 18 | # Fully expand all directories 19 | for dir_node in to_tree.directories: 20 | dir_node.expand(level) 21 | to_tree.show(mode=fs.TREE_VIEW) 22 | return to_tree 23 | 24 | 25 | class Tree(cmd2.CommandSet): 26 | parser = argparse.ArgumentParser() 27 | parser.add_argument("-L", type=int, help="max display depth of the directory tree") 28 | parser.add_argument("path", nargs="*", type=str, completer_method=completion.path_complete) 29 | 30 | @cmd2.with_argparser(parser) 31 | def do_tree(self, args): 32 | command = tree 33 | if args.L is not None: 34 | command = command - flags.L(args.L) 35 | 36 | print(command(*args.path)) 37 | -------------------------------------------------------------------------------- /src/pyos/psh/completion.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Iterable, List, Optional 2 | 3 | from . import shell 4 | from .. import exceptions, glob, os, pathlib 5 | 6 | __all__ = ("PathCompletion",) 7 | 8 | 9 | class PathCompletion(os.PathLike): 10 | """This class enables tab-completion for paths. Passed a path, this class will list all the 11 | files and directories below it when __dir__ is called (i.e. tab in, e.g., ipython). 12 | """ 13 | 14 | def __init__(self, path="."): 15 | self._path = pathlib.Path(path) 16 | 17 | def __repr__(self): 18 | return f"{self.__class__.__name__}('{self._path}')" 19 | 20 | def __fspath__(self): 21 | return str(self._path) 22 | 23 | def __dir__(self) -> Optional[Iterable[str]]: 24 | try: 25 | for path in self._path.iterdir(): 26 | name = path.name 27 | 28 | # Skip strings that aren't python identifiers 29 | if name.isidentifier(): 30 | yield name 31 | 32 | except (exceptions.FileNotFoundError, exceptions.NotADirectoryError): 33 | pass 34 | 35 | def _ipython_key_completions_(self): 36 | """This allows getitem style ([") style completion""" 37 | try: 38 | for path in self._path.iterdir(): 39 | yield path.name 40 | 41 | except (exceptions.FileNotFoundError, exceptions.NotADirectoryError): 42 | pass 43 | 44 | def __getattr__(self, item: str) -> "PathCompletion": 45 | """Attribute access for paths""" 46 | path = self._path / item 47 | if path.exists(): 48 | return PathCompletion(path) 49 | 50 | raise FileNotFoundError(f"Path does not exist: '{item}'") 51 | 52 | def __getitem__(self, item): 53 | """Square bracket notation. Allows arbitrary strings which is needed if the path name isn't 54 | a valid python variable""" 55 | path = self._path / item 56 | if path.exists(): 57 | return PathCompletion(path) 58 | 59 | raise FileNotFoundError(f"Path does not exist: '{item}'") 60 | 61 | 62 | def path_complete( 63 | app: shell.PyosShell, 64 | text: str, 65 | _line: str, 66 | _begidx: int, 67 | _endidx: int, 68 | *, 69 | path_filter: Optional[Callable[[str], bool]] = None, 70 | ) -> List[str]: 71 | """Performs completion of local file system paths 72 | 73 | :param app: The pyos base shell 74 | :param text: the string prefix we are attempting to match (all matches must begin with it) 75 | :param _line: the current input line with leading whitespace removed 76 | :param _begidx: the beginning index of the prefix text 77 | :param _endidx: the ending index of the prefix text 78 | :param path_filter: optional filter function that determines if a path belongs in the 79 | results this function takes a path as its argument and returns True if the path should 80 | be kept in the results 81 | :return: a list of possible tab completions 82 | """ 83 | # If the search text is blank, then search in the CWD for * 84 | if not text: 85 | search_str = "*" 86 | else: 87 | # Purposely don't match any path containing wildcards 88 | if "*" in text or "?" in text: 89 | return [] 90 | 91 | # Start the search string 92 | search_str = text + "*" 93 | 94 | # Set this to True for proper quoting of paths with spaces 95 | app.matches_delimited = True 96 | 97 | # Find all matching path completions 98 | matches = glob.glob(search_str) 99 | 100 | # Filter out results that don't belong 101 | if path_filter is not None: 102 | matches = [c for c in matches if path_filter(c)] 103 | 104 | # Don't append a space or closing quote to directory 105 | if len(matches) == 1 and os.isdir(matches[0]): 106 | app.allow_appended_space = False 107 | app.allow_closing_quote = False 108 | 109 | # Sort the matches before any trailing slashes are added 110 | matches.sort(key=app.default_sort_key) 111 | app.matches_sorted = True 112 | 113 | return matches 114 | 115 | 116 | def file_completer( 117 | app: shell.PyosShell, text: str, line: str, begidx: int, endidx: int 118 | ) -> List[str]: 119 | 120 | def is_file(path: str): 121 | return os.path.isfile(path) 122 | 123 | return path_complete(app, text, line, begidx, endidx, path_filter=is_file) 124 | 125 | 126 | def dir_completer( 127 | app: shell.PyosShell, text: str, line: str, begidx: int, endidx: int 128 | ) -> List[str]: 129 | 130 | def is_dir(path: str): 131 | return os.path.isdir(path) 132 | 133 | return path_complete(app, text, line, begidx, endidx, path_filter=is_dir) 134 | -------------------------------------------------------------------------------- /src/pyos/psh/constants.py: -------------------------------------------------------------------------------- 1 | # The file used to store the history of commands 2 | HISTORY_FILE = "psh_history" 3 | # The startup script file 4 | STARTUP_FILE = "pshrc" 5 | -------------------------------------------------------------------------------- /src/pyos/psh/flags.py: -------------------------------------------------------------------------------- 1 | """Module containig flags""" 2 | 3 | from .. import psh_lib 4 | 5 | __all__ = "f", "l", "L", "n", "p", "d", "u", "r", "s", "v" 6 | 7 | # pylint: disable=invalid-name 8 | # flake8: noqa: E741 9 | 10 | # Define standard flags 11 | d = psh_lib.Option("d") 12 | f = psh_lib.Option("f") 13 | l = psh_lib.Option("l") 14 | L = psh_lib.Option("L") 15 | n = psh_lib.Option("n") 16 | p = psh_lib.Option("p") 17 | r = psh_lib.Option("r") 18 | s = psh_lib.Option("s") 19 | u = psh_lib.Option("u") 20 | v = psh_lib.Option("v") 21 | -------------------------------------------------------------------------------- /src/pyos/psh_lib/__init__.py: -------------------------------------------------------------------------------- 1 | """Library to help writing psh applications and commands""" 2 | 3 | from . import opts, utils 4 | from .opts import * 5 | from .utils import * 6 | 7 | __all__ = opts.__all__ + utils.__all__ 8 | -------------------------------------------------------------------------------- /src/pyos/psh_lib/utils.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import functools 3 | from typing import Sequence 4 | 5 | import mincepy 6 | 7 | from .. import fs, glob, os, pathlib 8 | 9 | __all__ = ("parse_fs_entry", "gather_obj_ids") 10 | 11 | 12 | @functools.singledispatch 13 | def parse_arg(arg) -> Sequence: 14 | raise TypeError(f"Unknown type '{arg.__class__.__name__}'") 15 | 16 | 17 | @parse_arg.register(fs.ObjectNode) 18 | def _(arg: fs.ObjectNode): 19 | return [copy.copy(arg)] 20 | 21 | 22 | @parse_arg.register(fs.DirectoryNode) 23 | def _(arg: fs.DirectoryNode): 24 | return [copy.copy(arg)] 25 | 26 | 27 | @parse_arg.register(fs.ResultsNode) 28 | def _(arg: fs.ResultsNode): 29 | return parse_fs_entry(*arg.children) 30 | 31 | 32 | @parse_arg.register(os.PathLike) 33 | def _(arg: os.PathLike): 34 | return [arg] 35 | 36 | 37 | @parse_arg.register(str) 38 | def _(arg: str): 39 | if glob.has_magic(arg): 40 | return tuple(map(pathlib.PurePath, glob.glob(arg))) 41 | 42 | if isinstance(arg, str): 43 | # Assume it's a path 44 | return (pathlib.PurePath(arg),) 45 | 46 | raise TypeError(f"Unknown type '{arg}'") 47 | 48 | 49 | def parse_fs_entry(*args) -> Sequence: 50 | """Parse objects that can be interpreted as filesystem entries. This can be a path, or a filesystem node.""" 51 | parsed = [] 52 | for arg in args: 53 | parsed.extend(parse_arg(arg)) 54 | 55 | return parsed 56 | 57 | 58 | def gather_obj_ids(entries, historian: mincepy.Historian) -> tuple[list, list]: 59 | obj_ids = [] 60 | rest = [] 61 | for entry in entries: 62 | obj_id = historian.to_obj_id(entry) 63 | if obj_id is not None: 64 | obj_ids.append(obj_id) 65 | else: 66 | rest.append(entry) 67 | 68 | return obj_ids, rest 69 | -------------------------------------------------------------------------------- /src/pyos/pyos.py: -------------------------------------------------------------------------------- 1 | from . import db, psh, version 2 | from .pathlib import Path, PurePath # pylint: disable=unused-import 3 | from .psh import * # pylint: disable=unused-wildcard-import, wildcard-import 4 | 5 | __all__ = psh.__all__ + ("PurePath", "Path") 6 | 7 | 8 | def _mod() -> str: 9 | """Get the message of the day string""" 10 | return f"Welcome to\n{version.BANNER}" 11 | 12 | 13 | print(_mod()) 14 | db.init() 15 | -------------------------------------------------------------------------------- /src/pyos/representers.py: -------------------------------------------------------------------------------- 1 | """Module for representers which are used to convert objects into strings""" 2 | 3 | import functools 4 | import io 5 | import pprint 6 | from typing import Sequence 7 | 8 | import mincepy 9 | 10 | try: 11 | import pyprnt 12 | except ImportError: 13 | pass 14 | 15 | from . import fmt 16 | from . import utils as putils 17 | 18 | __all__ = "get_default", "set_default", "dict_representer", "make_custom_pyprnt", "prnt" 19 | 20 | _DEFAULT_REPRESENTER = None 21 | 22 | 23 | def get_default(): 24 | # flake8: noqa: F824 25 | global _DEFAULT_REPRESENTER # pylint: disable=global-statement, global-variable-not-assigned 26 | if _DEFAULT_REPRESENTER is None: 27 | try: 28 | # We try importing here just in case the user has since installed pyprnt 29 | import pyprnt as _ # pylint: disable=import-outside-toplevel 30 | 31 | del _ 32 | return prnt 33 | except ImportError: 34 | return dict_representer 35 | 36 | return _DEFAULT_REPRESENTER 37 | 38 | 39 | def set_default(representer): 40 | global _DEFAULT_REPRESENTER # pylint: disable=global-statement 41 | _DEFAULT_REPRESENTER = representer 42 | 43 | 44 | def dict_representer(obj): 45 | try: 46 | if isinstance(obj, mincepy.File): 47 | return obj.read_text() 48 | 49 | return pprint.pformat(fmt.obj_dict(obj)) 50 | except Exception as exc: # pylint: disable=broad-except 51 | return str(exc) 52 | 53 | 54 | def make_custom_pyprnt(**kwargs): 55 | return functools.partial(pyprnt.prnt, **kwargs) 56 | 57 | 58 | def _terminal_width_prnt(obj, **kwargs): 59 | width = putils.get_terminal_width() 60 | return pyprnt.prnt(obj, end="", width=width, truncate=True, **kwargs) 61 | 62 | 63 | def prnt(obj, custom_prnt=None): 64 | """Pyprnt representer. Use the great pyprnt library to represent the object either as a 65 | sequence or a dictionary of the public properties. 66 | 67 | See https://github.com/kevink1103/pyprnt for details of pyprnt""" 68 | to_prnt = to_simple_repr(obj) 69 | custom_prnt = custom_prnt or _terminal_width_prnt 70 | 71 | buffer = io.StringIO() 72 | custom_prnt(to_prnt, output=False, file=buffer) 73 | return buffer.getvalue() 74 | 75 | 76 | def to_simple_repr(obj): 77 | """Convert an object into primitive types i.e. str, list, dict, etc""" 78 | if isinstance(obj, mincepy.File): 79 | return obj.read_text() 80 | 81 | if isinstance(obj, Sequence): # pylint: disable=isinstance-second-argument-not-valid-type 82 | return obj 83 | 84 | if isinstance(obj, Exception): 85 | return f"{obj.__class__.__name__}: {obj}" 86 | 87 | return fmt.obj_dict(obj) 88 | -------------------------------------------------------------------------------- /src/pyos/results.py: -------------------------------------------------------------------------------- 1 | import collections 2 | from typing import Callable, Iterator, TextIO 3 | 4 | from . import representers 5 | 6 | 7 | class BaseResults: 8 | 9 | def __or__(self, other: Callable): 10 | if not isinstance( 11 | other, Callable 12 | ): # pylint: disable=isinstance-second-argument-not-valid-type 13 | raise ValueError(f"'{other}' is not callable") 14 | 15 | return other(self) 16 | 17 | def __stream_out__(self, stream: TextIO): 18 | """Stream the string representation of this object to the output stream""" 19 | stream.write(self.__str__()) 20 | 21 | 22 | class CachingResults(collections.abc.Sequence, BaseResults): 23 | """A helper that takes an iterator and wraps it, caching the results as a sequence.""" 24 | 25 | def __init__(self, iterator: Iterator, representer: Callable = None): 26 | """ 27 | Create a caching results sequence. If no representer is supplied the default will be used. 28 | 29 | :param iterator: the iterable to cache results of 30 | :param representer: the representer to use, if None the current default will be used. 31 | """ 32 | super().__init__() 33 | if not isinstance(iterator, Iterator): 34 | raise TypeError(f"Expected Iterator, got {iterator.__class__.__name__}") 35 | 36 | self._iterator = iterator 37 | self._representer = representer or representers.get_default() 38 | self._cache = [] 39 | 40 | def __getitem__(self, item): 41 | if isinstance(item, slice): 42 | self._ensure_cache(item.stop - 1 if item.stop is not None else -1) 43 | else: 44 | self._ensure_cache(item) 45 | return self._cache[item] 46 | 47 | def __iter__(self): 48 | return self._iter_generator() 49 | 50 | def __len__(self): 51 | self._ensure_cache() 52 | return len(self._cache) 53 | 54 | def __repr__(self): 55 | return "\n".join([self._representer(item) for item in self]) 56 | 57 | def _iter_generator(self, at_end=False): 58 | idx = 0 if not at_end else len(self._cache) 59 | while True: 60 | if idx >= len(self._cache): 61 | # Ok, try the iterable 62 | if self._iterator is None: 63 | return 64 | 65 | try: 66 | self._cache.append(next(self._iterator)) 67 | except StopIteration: 68 | self._iterator = None 69 | return 70 | 71 | yield self._cache[idx] 72 | idx += 1 73 | 74 | def _ensure_cache(self, max_index=-1): 75 | """Fill up the cache up to the max index. If -1 then fill up entirely""" 76 | if self._iterator is None or (0 <= max_index < len(self._cache)): 77 | return 78 | 79 | idx = len(self._cache) 80 | self_iter = self._iter_generator(at_end=True) 81 | while True: 82 | try: 83 | next(self_iter) 84 | idx += 1 85 | if max_index != -1 and idx > max_index: 86 | return 87 | except StopIteration: 88 | return 89 | 90 | 91 | class ResultsDict(collections.abc.Mapping, BaseResults): 92 | """A custom dictionary representing results from a command""" 93 | 94 | def __init__(self, results: dict, representer=None): 95 | self._results = results 96 | self._representer = representer or representers.get_default() 97 | 98 | def __getitem__(self, item): 99 | return self._results.__getitem__(item) 100 | 101 | def __iter__(self): 102 | return self._results.__iter__() 103 | 104 | def __len__(self): 105 | return self._results.__len__() 106 | 107 | def __repr__(self): 108 | return self._representer(self) 109 | 110 | 111 | class ResultsString(collections.UserString, BaseResults): 112 | """A string that overwrites the __repr__ method""" 113 | 114 | def __init__(self, result: str, representer=None): 115 | super().__init__(result) 116 | self._representer = representer or representers.get_default() 117 | 118 | def __repr__(self): 119 | return self._representer(self.data) 120 | -------------------------------------------------------------------------------- /src/pyos/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def get_terminal_width() -> int: 5 | try: 6 | return os.get_terminal_size().columns 7 | except OSError: 8 | return 100 9 | -------------------------------------------------------------------------------- /src/pyos/version.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=undefined-all-variable 2 | __all__ = ("__version__",) 3 | 4 | author_info = (("Martin Uhrin", "martin.uhrin.10@ucl.ac.uk"),) 5 | version_info = (0, 8, 0) 6 | 7 | __author__ = ", ".join(f"{info[0]} <{info[1]}>" for info in author_info) 8 | __version__ = ".".join(map(str, version_info)) 9 | 10 | 11 | def _strip_first_and_last_lines(text: str) -> str: 12 | return "\n".join(text.split("\n")[1:-1]) 13 | 14 | 15 | LOGO = _strip_first_and_last_lines( 16 | r""" 17 | ____ ____ 18 | / __ \/ __ / 19 | ____ __ __/ / / / /_ 20 | / __ \/ / / / / / /\__ \ 21 | / /_/ / /_/ / /_/ /___/ / 22 | / .___/\__, /\____//____/ 23 | /_/ /____/ 24 | """ 25 | ) 26 | 27 | BANNER = _strip_first_and_last_lines( 28 | rf""" 29 | ____ ____ 30 | / __ \/ __ / 31 | ____ __ __/ / / / /_ 32 | / __ \/ / / / / / /\__ \ 33 | / /_/ / /_/ / /_/ /___/ / 34 | / .___/\__, /\____//____/ 35 | /_/ /____/ v{__version__} 36 | """ 37 | ) 38 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/muhrin/pyos/800c021aa121415b97ac23c5aea0201ddd503224/test/__init__.py -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from mincepy.testing import historian, mongodb_archive # pylint: disable=unused-import 4 | import pytest 5 | 6 | import pyos 7 | import pyos.db 8 | 9 | from . import utils 10 | 11 | # pylint: disable=redefined-outer-name 12 | 13 | 14 | @pytest.fixture 15 | def archive_base_uri() -> str: 16 | return utils.get_base_uri() 17 | 18 | 19 | @pytest.fixture 20 | def archive_uri(): 21 | return os.environ.get(utils.ENV_ARCHIVE_URI, utils.DEFAULT_ARCHIVE_URI) 22 | 23 | 24 | @pytest.fixture(autouse=True) 25 | def lib( 26 | # flake8: noqa: F811 27 | historian, 28 | ): # pylint: disable=unused-argument 29 | pyos.init() 30 | yield pyos.db.lib 31 | pyos.reset() 32 | 33 | 34 | @pytest.fixture 35 | def test_utils(): 36 | return utils 37 | -------------------------------------------------------------------------------- /test/db/test_db.py: -------------------------------------------------------------------------------- 1 | import mincepy 2 | from mincepy.testing import Car 3 | import pytest 4 | 5 | from pyos import db 6 | import pyos.exceptions 7 | 8 | 9 | def test_overwrite(historian: mincepy.Historian): 10 | car = Car() 11 | car_id = db.save_one(car, "car") 12 | del car 13 | 14 | with pytest.raises(pyos.exceptions.FileExistsError): 15 | db.save_one(Car(), "car") 16 | 17 | # Now use the overwrite settings 18 | new_car = Car() 19 | db.save_one(new_car, "car", overwrite=True) 20 | del new_car 21 | 22 | with pytest.raises(mincepy.NotFound): 23 | historian.load(car_id) 24 | 25 | 26 | def test_getting_obj_id(): 27 | car = Car() 28 | car_id = db.save_one(car, "my_car") 29 | 30 | assert next(db.get_oid(car_id)) == car_id 31 | assert next(db.get_oid(car)) == car_id 32 | assert next(db.get_oid(str(car_id))) == car_id 33 | assert next(db.get_oid("my_car")) == car_id 34 | assert next(db.get_oid(f"./{car_id}")) == car_id 35 | 36 | 37 | def test_save_meta(): 38 | car = Car() 39 | car_id = db.save_one(car, "my_car", meta=dict(license="abcdef")) 40 | assert db.get_meta(car_id)["license"] == "abcdef" 41 | -------------------------------------------------------------------------------- /test/db/test_fs.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import bson 4 | import mincepy 5 | import pytest 6 | 7 | from pyos import db 8 | from pyos import os as pos 9 | from pyos.db import fs 10 | 11 | 12 | def test_delete_outside_pyos(archive_uri): 13 | car = mincepy.testing.Car() 14 | car_id = db.save_one(car, "my_car") 15 | assert pos.withdb.exists("my_car") 16 | 17 | hist = mincepy.connect(archive_uri, use_globally=False) 18 | hist.delete(car_id) 19 | 20 | assert not pos.withdb.exists("my_car") 21 | 22 | 23 | def test_iter_children(): 24 | black_car = mincepy.testing.Car(colour="black") 25 | pink_car = mincepy.testing.Car(colour="pink") 26 | mark = mincepy.testing.Person("mark", 23) 27 | dave = mincepy.testing.Person("dave", 54) 28 | pos.makedirs("subdir") 29 | 30 | # Save them all 31 | db.save_many((black_car, pink_car, mark, dave)) 32 | 33 | # Now, make sure they are all children of the current directory 34 | fs_entry = fs.find_entry(pos.withdb.to_fs_path(pos.getcwd())) 35 | entry_id = fs.Entry.id(fs_entry) 36 | children = list(fs.iter_children(entry_id)) 37 | 38 | assert len(children) == 5 39 | ids = tuple(map(fs.Entry.id, children)) 40 | assert black_car.obj_id in ids 41 | assert pink_car.obj_id in ids 42 | assert mark.obj_id in ids 43 | assert dave.obj_id in ids 44 | assert fs.Entry.id(fs.find_entry(pos.withdb.to_fs_path("./subdir"))) in ids 45 | 46 | # Now check that filtering works 47 | children = list(fs.iter_children(entry_id, obj_filter=mincepy.testing.Car.colour == "black")) 48 | assert len(children) == 2 49 | ids = tuple(map(fs.Entry.id, children)) 50 | assert black_car.obj_id in ids 51 | 52 | children = list( 53 | fs.iter_children( 54 | entry_id, obj_filter=mincepy.DataRecord.type_id == mincepy.testing.Car.TYPE_ID 55 | ) 56 | ) 57 | assert len(children) == 3 58 | 59 | # Check that meta filtering works 60 | db.set_meta(black_car, meta=dict(reg="1234")) 61 | db.set_meta(pink_car, meta=dict(reg="1466")) 62 | 63 | children = tuple( 64 | fs.iter_children( 65 | entry_id, 66 | type=fs.Schema.TYPE_OBJ, # Only find objects (not directories) 67 | meta_filter={"reg": "1234"}, 68 | ) 69 | ) 70 | assert len(children) == 1 71 | assert fs.Entry.id(children[0]) == black_car.obj_id 72 | 73 | 74 | def test_iter_descendents(): 75 | start_dir = pos.getcwd() 76 | 77 | pos.makedirs("a/b/c") 78 | for dirname in ("a", "b", "c"): 79 | pos.chdir(dirname) 80 | 81 | mincepy.testing.Car(colour="black").save() 82 | mincepy.testing.Person("mark", 23).save() 83 | 84 | root_id = fs.Entry.id(fs.find_entry(pos.withdb.to_fs_path(start_dir))) 85 | descendents = tuple(fs.iter_descendents(root_id)) 86 | assert len(descendents) == 9 87 | 88 | # Now check that filtering works 89 | descendents = tuple( 90 | fs.iter_descendents(root_id, obj_filter=mincepy.testing.Car.colour == "black") 91 | ) 92 | assert len(descendents) == 6 93 | 94 | 95 | @pytest.mark.skip() 96 | def test_delete_many_entries(): 97 | """Test that deleting a large number of entries works. If this is done in a single MongoDB delete_many command it 98 | will exceed the 16MB document limit so check that the command still works even in such a case""" 99 | obj_id_bytes = 12 100 | # Calculate the number of object ids it would take to burst the 16MB limit 101 | num_entries = math.ceil(16 * 1024 * 1024 / obj_id_bytes) 102 | 103 | fake_objects = (bson.ObjectId() for _ in range(num_entries)) 104 | for obj_id in fake_objects: 105 | db.fs.insert_obj(obj_id, ("/", str(obj_id))) 106 | 107 | db.fs._delete_entries(*fake_objects) # pylint: disable=protected-access 108 | -------------------------------------------------------------------------------- /test/fs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/muhrin/pyos/800c021aa121415b97ac23c5aea0201ddd503224/test/fs/__init__.py -------------------------------------------------------------------------------- /test/fs/test_dir_nodes.py: -------------------------------------------------------------------------------- 1 | """Tests for pyos directory nodes""" 2 | 3 | import mincepy 4 | from mincepy.testing import Person 5 | import pytest 6 | 7 | import pyos 8 | from pyos import psh 9 | 10 | # pylint: disable=unused-argument 11 | 12 | 13 | def test_dir_in_directory(): 14 | home = psh.pwd() 15 | address_book = pyos.pathlib.Path("address_book/").resolve() 16 | pyos.os.makedirs(address_book) 17 | with pyos.pathlib.working_path(address_book): 18 | person_id = psh.save(Person("martin", 34), "martin") 19 | 20 | home_node = pyos.fs.DirectoryNode(home) 21 | home_node.expand(depth=-1) 22 | 23 | assert pyos.pathlib.Path("address_book/") in home_node 24 | assert pyos.pathlib.Path("address_book/martin") in home_node 25 | 26 | address_book_node = pyos.fs.DirectoryNode(home_node.abspath / "address_book/") 27 | address_book_node.expand(1) # Have to expand so it finds internal objects 28 | assert person_id in address_book_node 29 | 30 | 31 | def test_dir_delete(historian: mincepy.Historian): 32 | """Test deleting directory (and all contents)""" 33 | address_book = pyos.pathlib.Path("address_book/").resolve() 34 | pyos.os.makedirs("address_book/sub/") 35 | with pyos.pathlib.working_path(address_book): 36 | martin_id = psh.save(Person("martin", 34), "martin") 37 | sonia_id = psh.save(Person("sonia", 31), "sub/sonia") 38 | 39 | address_book_node = pyos.fs.DirectoryNode(address_book) 40 | assert address_book.exists() 41 | address_book_node.delete() 42 | assert not address_book.exists() 43 | 44 | with pytest.raises(mincepy.NotFound): 45 | historian.load(martin_id) 46 | with pytest.raises(mincepy.NotFound): 47 | historian.load(sonia_id) 48 | 49 | 50 | def test_move_dir(historian: mincepy.Historian): 51 | """Test moving a directory node""" 52 | pyos.os.makedirs("address_book/") 53 | pyos.os.makedirs("sub/") 54 | 55 | path = pyos.pathlib.Path("address_book/").resolve() 56 | with pyos.pathlib.working_path(path): 57 | martin_id = psh.save(Person("martin", 34), "martin") 58 | 59 | node = pyos.fs.DirectoryNode(path) 60 | node.move("sub/") 61 | 62 | assert node.abspath == pyos.Path("sub/address_book/").resolve() 63 | node.expand(1) 64 | assert node.children[0].obj_id == martin_id 65 | 66 | 67 | def test_rename_dir(historian: mincepy.Historian): 68 | """Test renaming a directory node""" 69 | path = pyos.pathlib.Path("address_book/").resolve() 70 | pyos.os.makedirs(path) 71 | with pyos.pathlib.working_path(path): 72 | martin_id = psh.save(Person("martin", 34), "martin") 73 | 74 | node = pyos.fs.DirectoryNode(path) 75 | node.rename("abook") 76 | 77 | assert node.abspath == pyos.Path("abook/").resolve() 78 | node.expand(1) 79 | assert node.children[0].obj_id == martin_id 80 | -------------------------------------------------------------------------------- /test/fs/test_nodes.py: -------------------------------------------------------------------------------- 1 | """Tests for pyos nodes""" 2 | 3 | from mincepy.testing import Person 4 | import pytest 5 | 6 | import pyos 7 | from pyos import fs, psh 8 | 9 | 10 | def test_obj_in_directory(): 11 | home = psh.pwd() 12 | address_book = pyos.pathlib.Path("address_book/").resolve() 13 | pyos.os.makedirs(address_book) 14 | with pyos.pathlib.working_path(address_book): 15 | person_id = pyos.db.save_one(Person("martin", 34), "martin") 16 | assert psh.pwd() == home / address_book 17 | 18 | home_node = pyos.fs.DirectoryNode(home) 19 | home_node.expand(depth=-1) 20 | assert len(home_node) == 1 21 | assert pyos.pathlib.Path("address_book/martin") in home_node 22 | 23 | dir_node = pyos.fs.DirectoryNode(address_book) 24 | dir_node.expand() 25 | 26 | assert person_id in dir_node 27 | 28 | 29 | def test_results_slicing(): 30 | num_persons = 10 31 | 32 | for _ in range(num_persons): 33 | Person("test", 30).save() 34 | 35 | cwd = pyos.fs.DirectoryNode("./") 36 | cwd.expand() 37 | assert len(cwd) == num_persons 38 | 39 | # Now try slicing it 40 | first_n = cwd[:5] 41 | assert len(first_n) == 5 42 | assert isinstance(first_n, pyos.fs.ResultsNode) 43 | 44 | # Now slice the results 45 | first_n.show("creator", "name", "ctime", mode=pyos.fs.TABLE_VIEW) 46 | first_2 = first_n[:2] 47 | assert len(first_2) == 2 48 | assert isinstance(first_n, pyos.fs.ResultsNode) 49 | # Now make sure the view modes were copied 50 | assert first_2.view_mode == first_n.view_mode 51 | assert first_2.showing == first_n.showing 52 | 53 | 54 | def test_obj_node_basics(historian): 55 | # Test trying to create an object node for a deleted object 56 | person = Person("martin", 34) 57 | meta = {"fave colour": "red"} 58 | person_id = pyos.db.save_one(person, "martin", meta=meta) 59 | 60 | obj_node = fs.ObjectNode(person_id, "martin") 61 | record = historian.records.find(obj_id=person_id).one() 62 | assert obj_node.type_id == Person.TYPE_ID 63 | assert obj_node.version == record.version 64 | assert obj_node.ctime == record.creation_time 65 | assert obj_node.mtime == record.snapshot_time 66 | 67 | assert obj_node.meta == meta 68 | 69 | # Modify 70 | person.age = 35 71 | person.save() 72 | # ..now delete 73 | historian.delete(person) 74 | # and create the object node 75 | with pytest.raises(pyos.exceptions.FileNotFoundError): 76 | fs.ObjectNode(person_id, "martin") 77 | 78 | 79 | def test_container_show(): 80 | name = "bart" 81 | 82 | # Create a person and use their name as the filename 83 | person = Person(name, 34) 84 | pyos.db.save_one(person, person.name) 85 | 86 | res: pyos.fs.ContainerNode = pyos.psh.ls() # pylint: disable=no-value-for-parameter 87 | 88 | # Check the various properties that can be shown 89 | res.show("loaded") 90 | assert "*" in str(res) 91 | 92 | res.show("type") 93 | assert "Person" in str(res) 94 | 95 | res.show("version") 96 | assert "0" in str(res) 97 | 98 | res.show("name") 99 | assert name in str(res) 100 | 101 | res.show("relpath") 102 | assert name in str(res) 103 | 104 | res.show("abspath") 105 | assert str(pyos.pathlib.Path() / name) in str(res) 106 | -------------------------------------------------------------------------------- /test/fs/test_path.py: -------------------------------------------------------------------------------- 1 | import mincepy.testing 2 | 3 | import pyos 4 | from pyos.psh import cmds 5 | 6 | 7 | def test_saving_path(): 8 | path = pyos.pathlib.Path("/some/random/path/") 9 | path_str = str(path) 10 | obj_id = cmds.save(path) 11 | del path 12 | 13 | loaded = cmds.load(obj_id) 14 | assert str(loaded) == path_str 15 | 16 | 17 | def test_parents_file(): 18 | """Test that parents works for files""" 19 | path = pyos.pathlib.PurePath("tmp/test/sub/a") 20 | assert path.parents == [ 21 | pyos.pathlib.Path("tmp/test/sub"), 22 | pyos.pathlib.Path("tmp/test"), 23 | pyos.pathlib.Path("tmp"), 24 | pyos.pathlib.Path("."), 25 | ] 26 | 27 | # Test absolute 28 | path = pyos.pathlib.Path("/tmp/test/sub/a") 29 | assert path.parents == [ 30 | pyos.pathlib.Path("/tmp/test/sub"), 31 | pyos.pathlib.Path("/tmp/test"), 32 | pyos.pathlib.Path("/tmp"), 33 | pyos.pathlib.Path("/"), 34 | ] 35 | 36 | 37 | def test_parents_dir(): 38 | """Test that parents works for directories""" 39 | path = pyos.pathlib.Path("tmp/test/sub/a/") 40 | assert path.parents == [ 41 | pyos.pathlib.Path("tmp/test/sub/"), 42 | pyos.pathlib.Path("tmp/test/"), 43 | pyos.pathlib.Path("tmp/"), 44 | pyos.pathlib.Path("./"), 45 | ] 46 | 47 | # Test absolute 48 | path = pyos.pathlib.Path("/tmp/test/sub/a") 49 | assert path.parents == [ 50 | pyos.pathlib.Path("/tmp/test/sub/"), 51 | pyos.pathlib.Path("/tmp/test/"), 52 | pyos.pathlib.Path("/tmp/"), 53 | pyos.pathlib.Path("/"), 54 | ] 55 | 56 | 57 | def test_exists_object(): 58 | car = mincepy.testing.Car() 59 | pyos.db.lib.save_one(car, "my_car") 60 | 61 | assert pyos.pathlib.Path("my_car").exists() 62 | assert not pyos.pathlib.Path("not_my_car").exists() 63 | 64 | 65 | def test_exists_path(): 66 | car = mincepy.testing.Car() 67 | pyos.os.makedirs("folder/") 68 | pyos.db.lib.save_one(car, "folder/my_car") 69 | 70 | assert pyos.pathlib.Path("folder/").exists() 71 | assert not pyos.pathlib.Path("other_folder/").exists() 72 | -------------------------------------------------------------------------------- /test/fs/test_utils.py: -------------------------------------------------------------------------------- 1 | from mincepy import testing 2 | 3 | from pyos import fs, psh 4 | import pyos.os 5 | 6 | 7 | def test_find_multiple_paths(): 8 | """Test the find function when specifying multiple start points""" 9 | ferrari = testing.Car(make="ferrari") 10 | skoda = testing.Car(make="skoda") 11 | 12 | pyos.os.makedirs("path1/") 13 | pyos.os.makedirs("path2/") 14 | 15 | psh.save(ferrari, "path1/ferrari") 16 | psh.save(skoda, "path2/skoda") 17 | 18 | assert len(fs.find(type=testing.Car)) == 2 19 | assert len(fs.find("path1/")) == 1 20 | assert len(fs.find("path1/", type=testing.Car)) == 1 21 | 22 | # Multiple paths 23 | assert len(fs.find("path1/", "path2/")) == 2 24 | assert len(fs.find("path1/", "path2/", type=testing.Car)) == 2 25 | 26 | 27 | def test_find_from_meta(): 28 | ferrari = testing.Car(make="ferrari") 29 | skoda = testing.Car(make="skoda") 30 | 31 | pyos.db.save_one(ferrari, meta=dict(num_keys=1)) 32 | pyos.db.save_one(skoda, meta=dict(num_keys=2)) 33 | 34 | res = fs.find(meta=dict(num_keys=2)) 35 | assert len(res) == 1 36 | assert res[0].obj is skoda 37 | 38 | 39 | def test_find_from_attribute(): 40 | ferrari = testing.Car(make="ferrari") 41 | skoda = testing.Car(make="skoda") 42 | pyos.db.save_many((ferrari, skoda)) 43 | 44 | res = fs.find(obj_filter=testing.Car.make == "skoda") 45 | assert len(res) == 1 46 | assert res[0].obj is skoda 47 | -------------------------------------------------------------------------------- /test/os/test_os.py: -------------------------------------------------------------------------------- 1 | from mincepy import testing 2 | import pytest 3 | 4 | from pyos import db, os 5 | import pyos.exceptions 6 | 7 | 8 | def test_rename(): 9 | os.makedirs("/home/sub/subsub/") 10 | os.makedirs("/home/new/") 11 | 12 | car = testing.Car() 13 | car_path = "/home/sub/subsub/my_car" 14 | db.save_one(car, car_path) 15 | 16 | car_path = db.get_path(car) 17 | assert car_path == "/home/sub/subsub/my_car" 18 | os.rename(car_path, "/home/new/your_car") 19 | assert db.get_path(car) == "/home/new/your_car" 20 | 21 | os.rename("/home/new/your_car", "their_car") 22 | assert db.get_path(car) == os.path.abspath("their_car") 23 | 24 | 25 | def test_rename_to_existing(): 26 | car1 = testing.Car() 27 | car2 = testing.Car() 28 | db.save_many([(car1, "car1"), (car2, "car2")]) 29 | assert set(os.listdir()) == {"car1", "car2"} 30 | 31 | with pytest.raises(pyos.exceptions.FileExistsError): 32 | os.rename("car2", "car1") 33 | assert set(os.listdir()) == {"car1", "car2"} 34 | 35 | 36 | def test_listdir(): 37 | os.makedirs("/a/b/") 38 | db.save_one(testing.Car(), "/a/acar") 39 | db.save_one(testing.Car(), "/a/b/bcar") 40 | 41 | assert sorted(os.listdir("/")) == ["a"] 42 | assert sorted(os.listdir("/a/")) == ["acar", "b"] 43 | assert sorted(os.listdir("/a/b/")) == ["bcar"] 44 | 45 | with pytest.raises(pyos.exceptions.NotADirectoryError): 46 | os.listdir("/a/acar") 47 | with pytest.raises(pyos.exceptions.FileNotFoundError): 48 | os.listdir("/c/") 49 | -------------------------------------------------------------------------------- /test/os/test_os_path.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from mincepy.testing import Car 4 | 5 | import pyos 6 | from pyos import os 7 | 8 | 9 | def test_normpath_curdir(): 10 | """Using normpath(curdir), i.e. '.' should normalise to './'""" 11 | assert pyos.os.path.normpath(".") == "." 12 | 13 | 14 | def test_exists(): 15 | """Check some properties of the exists function""" 16 | car = Car() 17 | car_path = "/home/sub/subsub/my_car" 18 | os.makedirs("/home/sub/subsub/") 19 | pyos.db.save_one(car, car_path) 20 | 21 | # Make sure the car and all subdirectories exist 22 | # parts = pyos.os.path.split(car_path) 23 | parts = re.findall(r"(\w*/|\w+$)", car_path) 24 | for idx in range(1, len(parts)): 25 | assert pyos.os.path.exists("".join(parts[:idx])) 26 | 27 | 28 | def test_normpath_parent_dir(): 29 | assert os.path.normpath("a/b/../") == "a" 30 | assert os.path.normpath("a/b/..") == "a" 31 | 32 | 33 | def test_normpah_current_dir(): 34 | assert os.path.normpath("a/b/./") == "a/b" 35 | assert os.path.normpath("a/b/.") == "a/b" 36 | -------------------------------------------------------------------------------- /test/os/test_os_path_python.py: -------------------------------------------------------------------------------- 1 | """Tests taken from cPython's path tests: 2 | https://github.com/python/cpython/blob/master/Lib/test/test_path.py 3 | 4 | Where I've changed a check I've kept the original to highlight differences beween pyos and python. 5 | """ 6 | 7 | import pytest 8 | 9 | from pyos import db, os 10 | import pyos.config 11 | from pyos.os import path 12 | 13 | 14 | def test_join(): 15 | assert path.join("/foo", "bar", "/bar", "baz") == "/bar/baz" 16 | assert path.join("/foo", "bar", "baz") == "/foo/bar/baz" 17 | assert path.join("/foo/", "bar/", "baz/") == "/foo/bar/baz/" 18 | 19 | 20 | def test_split(): 21 | assert path.split("/foo/bar") == ("/foo", "bar") 22 | 23 | assert path.split("/") == ("/", "") 24 | assert path.split("foo") == ("", "foo") 25 | assert path.split("////foo") == ("////", "foo") 26 | 27 | assert path.split("//foo//bar") == ("//foo", "bar") 28 | 29 | 30 | def test_basename(): 31 | assert path.basename("/foo/bar") == "bar" 32 | assert path.basename("/") == "" 33 | assert path.basename("foo") == "foo" 34 | assert path.basename("////foo") == "foo" 35 | assert path.basename("//foo//bar") == "bar" 36 | 37 | 38 | def test_dirname(): 39 | assert path.dirname("/foo/bar") == "/foo" 40 | 41 | assert path.dirname("/") == "/" 42 | assert path.dirname("foo") == "" 43 | assert path.dirname("////foo") == "////" 44 | 45 | assert path.dirname("//foo//bar") == "//foo" 46 | 47 | 48 | def test_expanduser(): 49 | homedir = db.homedir() 50 | assert path.expanduser("foo") == "foo" 51 | assert path.expanduser("~") == homedir 52 | assert path.expanduser("~/") == homedir 53 | assert path.expanduser("~/foo") == f"{homedir}foo" 54 | assert path.expanduser("~root") == "/root" 55 | 56 | 57 | def test_normpath(): 58 | assert path.normpath("") == "." 59 | assert path.normpath("/") == "/" 60 | 61 | assert path.normpath("//") == "//" 62 | 63 | assert path.normpath("///") == "/" 64 | 65 | assert path.normpath("///foo/.//bar//") == "/foo/bar" 66 | 67 | assert path.normpath("///foo/.//bar//.//..//.//baz") == "/foo/baz" 68 | assert path.normpath("///..//./foo/.//bar") == "/foo/bar" 69 | 70 | 71 | def test_relpath(): 72 | # Change the CWD 73 | os.makedirs("/home/user/bar/") 74 | 75 | pyos.os.chdir("/home/user/bar/") 76 | assert os.getcwd() == r"/home/user/bar" 77 | 78 | curdir = os.path.split(os.getcwd())[-1] 79 | with pytest.raises(ValueError): 80 | path.relpath("") 81 | 82 | assert path.relpath("a") == "a" 83 | assert path.relpath(path.abspath("a")) == "a" 84 | assert path.relpath("a/b") == "a/b" 85 | assert path.relpath("../a/b") == "../a/b" 86 | 87 | assert path.relpath("a", "../b") == "../" + curdir + "/a" 88 | 89 | assert path.relpath("a/b", "../c") == "../" + curdir + "/a/b" 90 | 91 | assert path.relpath("a", "b/c") == "../../a" 92 | assert path.relpath("a", "a") == "." 93 | assert path.relpath("/foo/bar/bat", "/x/y/z") == "../../../foo/bar/bat" 94 | assert path.relpath("/foo/bar/bat", "/foo/bar") == "bat" 95 | assert path.relpath("/foo/bar/bat", "/") == "foo/bar/bat" 96 | assert path.relpath("/", "/foo/bar/bat") == "../../.." 97 | assert path.relpath("/foo/bar/bat", "/x") == "../foo/bar/bat" 98 | assert path.relpath("/x", "/foo/bar/bat") == "../../../x" 99 | assert path.relpath("/", "/") == "." 100 | assert path.relpath("/a", "/a") == "." 101 | assert path.relpath("/a/b", "/a/b") == "." 102 | 103 | # Check directories 104 | assert path.relpath("a/") == "a" 105 | assert path.relpath(path.abspath("a/")) == "a" 106 | assert path.relpath("a/b/") == "a/b" 107 | assert path.relpath("../a/b/") == "../a/b" 108 | 109 | assert path.relpath("a/", "../b") == "../" + curdir + "/a" 110 | 111 | assert path.relpath("a/b/", "../c") == "../" + curdir + "/a/b" 112 | 113 | assert path.relpath("a/", "b/c") == "../../a" 114 | assert path.relpath("a/", "a/") == "." 115 | assert path.relpath("/foo/bar/bat/", "/x/y/z") == "../../../foo/bar/bat" 116 | assert path.relpath("/foo/bar/bat/", "/foo/bar") == "bat" 117 | assert path.relpath("/foo/bar/bat/", "/") == "foo/bar/bat" 118 | assert path.relpath("/", "/foo/bar/bat/") == "../../.." 119 | assert path.relpath("/foo/bar/bat/", "/x") == "../foo/bar/bat" 120 | assert path.relpath("/x/", "/foo/bar/bat") == "../../../x" 121 | assert path.relpath("/", "/") == "." 122 | assert path.relpath("/a/", "/a/") == "." 123 | assert path.relpath("/a/b/", "/a/b/") == "." 124 | -------------------------------------------------------------------------------- /test/pathlib/test_dirs.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Person 2 | 3 | import pyos 4 | from pyos import psh 5 | from pyos.fs import nodes 6 | import pyos.os 7 | 8 | 9 | def test_working_path(): 10 | # pylint: disable=no-value-for-parameter 11 | home = psh.pwd() 12 | address_book = pyos.pathlib.Path("address_book/").resolve() 13 | pyos.os.makedirs(address_book) 14 | with pyos.pathlib.working_path(address_book): 15 | person_id = Person("martin", 34).save() 16 | assert psh.pwd() == home / address_book 17 | 18 | contents = psh.ls() 19 | assert len(contents) == 1 20 | assert isinstance(contents[0], nodes.FilesystemNode) 21 | assert contents[0].abspath == address_book # pylint: disable=no-member 22 | 23 | contents = psh.ls(address_book) 24 | assert len(contents) == 1 25 | assert isinstance(contents[0], nodes.FilesystemNode) 26 | assert contents[0].entry_id == person_id # pylint: disable=no-member 27 | -------------------------------------------------------------------------------- /test/pathlib/test_path.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | import pyos 4 | from pyos import pathlib, psh 5 | import pyos.os 6 | 7 | 8 | def test_root(): 9 | """Make sure that root is represented as '/' always""" 10 | assert str(pathlib.Path("/")) == "/" 11 | assert str(pathlib.Path("//").resolve()) == "/" 12 | 13 | 14 | def test_iterdir(): 15 | psh.save(Car(), "my_car") 16 | pyos.os.makedirs("sub/") 17 | with pathlib.working_path("sub/"): 18 | psh.save(Car(), "my_sub_car") 19 | cwd = pathlib.Path() 20 | 21 | content = tuple(map(pathlib.Path.name.fget, cwd.iterdir())) 22 | assert len(content) == 2 23 | 24 | assert "my_car" in content 25 | assert "sub" in content 26 | 27 | 28 | def test_resolve_parent_dir(): 29 | expected_result = pathlib.PurePath("a/") 30 | assert pathlib.PurePath("a/b/") / pathlib.PurePath("../") == expected_result 31 | assert pathlib.PurePath("a/b/") / pathlib.PurePath("..") == expected_result 32 | 33 | 34 | def test_resolve_current_dir(): 35 | expected_result = pathlib.PurePath("a/b/") 36 | assert pathlib.PurePath("a/b/") / pathlib.PurePath("./") == expected_result 37 | assert pathlib.PurePath("a/b/") / pathlib.PurePath(".") == expected_result 38 | 39 | 40 | def test_path_joining(): 41 | # Check that we can join with a file path and it will be promoted to a directory 42 | result = pathlib.PurePath("/home") / pathlib.PurePath("martin") 43 | assert pyos.os.fspath(result) == "/home/martin" 44 | -------------------------------------------------------------------------------- /test/psh/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/muhrin/pyos/800c021aa121415b97ac23c5aea0201ddd503224/test/psh/__init__.py -------------------------------------------------------------------------------- /test/psh/conftest.py: -------------------------------------------------------------------------------- 1 | import cmd2_ext_test 2 | import pytest 3 | 4 | from pyos import psh 5 | 6 | 7 | class PyosShellTester(cmd2_ext_test.ExternalTestMixin, psh.PyosShell): 8 | """Shell tester""" 9 | 10 | 11 | @pytest.fixture 12 | def pyos_shell(): 13 | app = PyosShellTester() 14 | app.fixture_setup() 15 | 16 | # Make sure that exceptions bubble up 17 | old_pexcept = app.pexcept 18 | 19 | def new_pexcept(*args, **kwargs): 20 | old_pexcept(*args, **kwargs) 21 | raise # pylint: disable=misplaced-bare-raise 22 | 23 | app.pexcept = new_pexcept 24 | 25 | yield app 26 | app.fixture_teardown() 27 | -------------------------------------------------------------------------------- /test/psh/test_cat.py: -------------------------------------------------------------------------------- 1 | import mincepy 2 | import mincepy.testing 3 | 4 | import pyos 5 | from pyos import psh 6 | 7 | 8 | def test_cat_file(historian: mincepy.Historian): 9 | dawg = historian.create_file("dawg.txt") 10 | dawg.write_text("'sup dawg?") 11 | file_id = psh.save(dawg, "dawg.txt") 12 | 13 | # Should be able to cat in these various ways, by default cat will print the file contents 14 | for descriptor in ( 15 | dawg, 16 | "dawg.txt", # Relative path string 17 | pyos.pathlib.Path("dawg.txt"), # Relative pyos path 18 | pyos.pathlib.Path("dawg.txt").resolve(), # Abs pyos path 19 | str(pyos.pathlib.Path("dawg.txt").resolve()), # Abs pyos path str 20 | file_id, # Obj id 21 | ): 22 | catted = psh.cat(descriptor) 23 | assert catted == "'sup dawg?", f"Problem with {descriptor}" 24 | 25 | 26 | def test_cat_object(): 27 | yellow = mincepy.testing.Car("ferrari", "yellow") 28 | black = mincepy.testing.Car("ferrari", "black") 29 | psh.save(yellow, black) 30 | 31 | default_representer = pyos.representers.get_default() 32 | 33 | catted = psh.cat(yellow, black) 34 | assert len(catted) == 2 35 | assert catted[0] == default_representer(yellow) 36 | assert catted[1] == default_representer(black) 37 | 38 | 39 | def test_cat_empty(): 40 | pyos.db.lib.save_one(mincepy.testing.Car()) 41 | result = psh.cat() 42 | assert result is None 43 | 44 | 45 | def test_shell_cat(pyos_shell): 46 | yellow = mincepy.testing.Car("ferrari", "yellow") 47 | yellow.save() 48 | 49 | res = pyos_shell.app_cmd(f"cat {yellow.obj_id}") 50 | assert not res.stderr 51 | assert "│colour │yellow" in res.stdout 52 | assert "│make │ferrari" in res.stdout 53 | -------------------------------------------------------------------------------- /test/psh/test_cd.py: -------------------------------------------------------------------------------- 1 | """Tests for the change directory command""" 2 | 3 | import pyos 4 | from pyos import psh 5 | import pyos.os 6 | 7 | # pylint: disable=invalid-name 8 | 9 | 10 | def test_cd_simple(): 11 | pyos.os.makedirs("a/b/") 12 | 13 | start = psh.pwd() 14 | assert isinstance(start, pyos.pathlib.Path) 15 | 16 | # Try relative directory changes 17 | a = "a/" 18 | psh.cd(a) 19 | assert psh.pwd() == start / a 20 | 21 | b = "b/" 22 | psh.cd(b) 23 | assert psh.pwd() == start / a / b 24 | 25 | # Try absolute directory change 26 | psh.cd(start / a) 27 | assert psh.pwd() == start / a 28 | 29 | # Try .. change 30 | psh.cd("..") 31 | assert psh.pwd() == start 32 | 33 | 34 | def test_reaching_root(): 35 | """Assert the correct behaviour when traversing up until reaching root and beyond""" 36 | cwd = psh.pwd() 37 | for _ in range(len(cwd.parts) - 1): 38 | psh.cd("../..") 39 | 40 | # Should be at root 41 | assert psh.pwd() == pyos.pathlib.Path("/") 42 | 43 | # Now trying going up again 44 | psh.cd("../..") 45 | # Should still be at root 46 | assert psh.pwd() == pyos.pathlib.Path("/") 47 | 48 | 49 | def test_cd_minus(pyos_shell): 50 | """Test changing back to last directory using 'cd -'""" 51 | pyos.os.makedirs("/some/random/dir/") 52 | start_dir = pyos.os.getcwd() 53 | pyos_shell.app_cmd("cd /some/random/dir/") 54 | assert pyos.os.getcwd() == "/some/random/dir" 55 | pyos_shell.app_cmd("cd -") 56 | assert pyos.os.getcwd() == start_dir 57 | 58 | 59 | def test_cd_home(pyos_shell): 60 | home_dir = pyos.db.homedir() 61 | rand_dir = "/some/rand/dir" 62 | pyos.os.makedirs(rand_dir) 63 | pyos.os.makedirs(home_dir) 64 | 65 | pyos.os.chdir(rand_dir) 66 | pyos_shell.app_cmd("cd ~") 67 | 68 | assert pyos.os.getcwd() == home_dir 69 | 70 | pyos.os.chdir(rand_dir) 71 | pyos_shell.app_cmd("cd ~/") 72 | assert pyos.os.getcwd() == home_dir 73 | -------------------------------------------------------------------------------- /test/psh/test_completion.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | from pyos import psh 4 | import pyos.os 5 | 6 | 7 | def test_completion_simple(): 8 | pyos.os.makedirs("/test/sub/") 9 | 10 | psh.cd("/test") 11 | psh.save(Car(), "my_car") 12 | psh.cd("sub/") 13 | Car().save() 14 | 15 | comp = psh.completion.PathCompletion("/test/") 16 | content = tuple(dir(comp)) 17 | assert len(content) == 2 18 | assert "sub" in content 19 | assert "my_car" in content 20 | 21 | assert isinstance(getattr(comp, "sub"), psh.completion.PathCompletion) 22 | assert isinstance(getattr(comp, "my_car"), psh.completion.PathCompletion) 23 | assert set(comp._ipython_key_completions_()) == { 24 | "sub", 25 | "my_car", 26 | } # pylint: disable=protected-access 27 | 28 | # Check __repr__ 29 | assert psh.completion.PathCompletion.__name__ in repr(comp) 30 | assert pyos.os.fspath(comp) in repr(comp) 31 | 32 | # Check for non-existent paths 33 | assert not list(dir(psh.completion.PathCompletion("/does_not_exist/"))) 34 | -------------------------------------------------------------------------------- /test/psh/test_find.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car, Person 2 | from pytray import obj_load 3 | 4 | from pyos import psh 5 | import pyos.os 6 | 7 | 8 | def fill_with_cars(subdirs: list): 9 | cwd = psh.pwd() 10 | for subdir in subdirs: 11 | pyos.os.makedirs(subdir, exists_ok=True) 12 | psh.cd(subdir) 13 | car = Car() 14 | car.save(meta={"target": True, "mydir": subdir}) 15 | # Now change back to the original directory 16 | psh.cd(cwd) 17 | 18 | 19 | def test_simple_find(): 20 | # Save a car 21 | car = Car() 22 | car.save(meta={"group": "cars"}) 23 | 24 | # Look for it 25 | results = psh.find(meta=dict(group="cars")) 26 | assert len(results) == 1 27 | assert results[0].entry_id == car.obj_id 28 | 29 | # Add another car to the group 30 | car2 = Car() 31 | car2.save(meta={"group": "cars"}) 32 | 33 | # Look for them 34 | results = psh.find(meta=dict(group="cars")) 35 | assert len(results) == 2 36 | assert car.obj_id in results 37 | assert car2.obj_id in results 38 | 39 | 40 | def test_find_paths(): 41 | pyos.os.makedirs("subdir/") 42 | psh.save(Car(), "subdir/car_a") 43 | 44 | res = psh.find() 45 | assert len(res) == 1 46 | assert res[0].abspath == pyos.pathlib.Path("subdir/car_a").resolve() 47 | assert "subdir/car_a" in str(res) 48 | 49 | 50 | def test_subdirs_find(): 51 | subdirs = ["./", "a/", "b/", "c/", "d/"] 52 | fill_with_cars(subdirs) 53 | num_subdirs = len(subdirs) 54 | 55 | results = psh.find(meta=dict(target=True)) 56 | assert len(results) == num_subdirs 57 | 58 | # Check mindepth 59 | for idx, _subdir in enumerate(subdirs): 60 | found = psh.find(mindepth=idx + 1) 61 | assert len(found) == num_subdirs - idx 62 | dirs = {psh.meta(node)["mydir"] for node in found} 63 | for check_dir in subdirs[idx:]: 64 | assert check_dir in dirs 65 | 66 | # Now check maxdepth 67 | for idx, _subdir in enumerate(subdirs): 68 | found = psh.find(maxdepth=idx) 69 | assert len(found) == idx 70 | dirs = {psh.meta(node)["mydir"] for node in found} 71 | for check_dir in subdirs[:idx]: 72 | assert check_dir in dirs 73 | 74 | # Now check combinations of mindepth and maxdepth 75 | for min_idx in range(len(subdirs)): 76 | for max_idx in range(min_idx, len(subdirs)): 77 | found = psh.find(mindepth=min_idx, maxdepth=max_idx) 78 | assert len(found) == max_idx - min_idx if min_idx == 0 else (max_idx - min_idx + 1) 79 | dirs = {psh.meta(node)["mydir"] for node in found} 80 | 81 | for check_dir in subdirs[min_idx:max_idx]: 82 | assert check_dir in dirs 83 | 84 | 85 | def test_find_starting_point(): 86 | """Test that find respects the passed starting points""" 87 | subdirs = ["./", "a/", "b/", "c/", "d/"] 88 | fill_with_cars(subdirs) 89 | num_subdirs = len(subdirs) 90 | 91 | for idx, _subdir in enumerate(subdirs): 92 | start_point = "/".join(subdirs[: idx + 1]) 93 | found = psh.find(start_point) 94 | assert len(found) == num_subdirs - idx 95 | dirs = {psh.meta(meta_dict)["mydir"] for meta_dict in found} 96 | 97 | for check_dir in subdirs[idx:]: 98 | assert check_dir in dirs 99 | 100 | 101 | def test_find_by_type_simple(): 102 | car = Car() 103 | car.save() 104 | person = Person("martin", 34) 105 | person.save() 106 | 107 | results = psh.find(type=Car) 108 | assert len(results) == 1 109 | assert results[0].obj is car 110 | 111 | results = psh.find(type=Person) 112 | assert len(results) == 1 113 | assert results[0].obj is person 114 | 115 | 116 | def test_shell_find(pyos_shell): 117 | # Save a car 118 | car = Car() 119 | car.save(meta={"group": "cars"}) 120 | 121 | # Look for it 122 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Car)} -m group=cars") 123 | assert not res.stderr 124 | assert str(car.obj_id) in res.stdout 125 | 126 | # Add another car to the group 127 | car2 = Car() 128 | car2.save(meta={"group": "cars"}) 129 | 130 | # Look for them 131 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Car)} -m group=cars") 132 | assert not res.stderr 133 | assert str(car.obj_id) in res.stdout 134 | assert str(car2.obj_id) in res.stdout 135 | 136 | 137 | def test_shell_find_starting_point(pyos_shell): 138 | """Test that find respects the passed starting points""" 139 | subdirs = ["./", "a/", "b/", "c/", "d/"] 140 | fill_with_cars(subdirs) 141 | num_subdirs = len(subdirs) 142 | 143 | for idx, _subdir in enumerate(subdirs): 144 | start_point = "/".join(subdirs[: idx + 1]) 145 | res = pyos_shell.app_cmd(f"find -s {start_point}") 146 | assert not res.stderr 147 | 148 | paths = tuple(map(str.strip, res.stdout.split("\n")[:-1])) 149 | assert len(paths) == num_subdirs - idx 150 | dirs = {psh.meta(path)["mydir"] for path in paths} 151 | 152 | for check_dir in subdirs[idx:]: 153 | assert check_dir in dirs 154 | 155 | 156 | def test_shell_find_by_type_simple(pyos_shell): 157 | car = Car() 158 | car.save() 159 | person = Person("martin", 34) 160 | person.save() 161 | 162 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Car)}") 163 | assert not res.stderr 164 | lines = res.stdout.split("\n")[:-1] 165 | assert len(lines) == 1 166 | assert str(car.obj_id) in lines[0] 167 | 168 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Person)}") 169 | assert not res.stderr 170 | lines = res.stdout.split("\n")[:-1] 171 | assert len(lines) == 1 172 | assert str(person.obj_id) in lines[0] 173 | 174 | 175 | def test_shell_find_query_state(pyos_shell): 176 | fiat = Car(make="fiat", colour="white") 177 | subaru = Car(make="subaru", colour="white") 178 | fiat.save() 179 | subaru.save() 180 | 181 | # = 182 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Car)} colour=white") 183 | assert not res.stderr 184 | lines = res.stdout.split("\n")[:-1] 185 | assert len(lines) == 2 186 | assert str(fiat.obj_id) in res.stdout 187 | assert str(subaru.obj_id) in res.stdout 188 | 189 | res = pyos_shell.app_cmd("find make=subaru") 190 | assert not res.stderr 191 | lines = res.stdout.split("\n")[:-1] 192 | assert len(lines) == 1 193 | assert str(subaru.obj_id) in lines[0] 194 | 195 | # != 196 | res = pyos_shell.app_cmd(f"find -t {obj_load.full_name(Car)} make!=fiat") 197 | assert not res.stderr 198 | lines = res.stdout.split("\n")[:-1] 199 | assert len(lines) == 1 200 | assert str(fiat.obj_id) not in res.stdout 201 | assert str(subaru.obj_id) in res.stdout 202 | -------------------------------------------------------------------------------- /test/psh/test_load.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | import pyos 4 | from pyos import psh 5 | 6 | 7 | def test_load_single(): 8 | """Test loading a single objects just returns that object and not a list""" 9 | car = Car() 10 | car_id = car.save() 11 | loaded = psh.load(car_id) 12 | assert loaded is car 13 | 14 | 15 | def test_load_from_path(): 16 | car = Car() 17 | psh.save(car, "my_car") 18 | 19 | # Try with string first 20 | assert psh.load("my_car") is car 21 | 22 | # Try with path 23 | assert psh.load(pyos.pathlib.Path("my_car")) is car 24 | 25 | 26 | def test_load_from_obj_id_string(): 27 | car = Car() 28 | car_id = psh.save(car) 29 | 30 | loaded = psh.load(str(car_id)) 31 | assert loaded is car 32 | -------------------------------------------------------------------------------- /test/psh/test_locate.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | import pyos 4 | from pyos import psh 5 | import pyos.os 6 | 7 | 8 | def test_simple(): 9 | car = Car() 10 | car.save() 11 | home = pyos.Path().resolve() 12 | assert psh.locate(car) == home / str(car.obj_id) 13 | 14 | 15 | def test_locate_multiple(): 16 | car1 = Car() 17 | car2 = Car() 18 | 19 | pyos.os.makedirs("garage1/") 20 | pyos.os.makedirs("garage2/") 21 | 22 | car1.save() 23 | car2.save() 24 | car1_path = (pyos.Path("garage1/") / str(car1.obj_id)).resolve() 25 | car2_path = (pyos.Path("garage2/") / str(car2.obj_id)).resolve() 26 | pyos.db.save_many([(car1, car1_path), (car2, car2_path)]) 27 | 28 | # By object 29 | locations = psh.locate(car1, car2) 30 | assert locations[0] == car1_path 31 | assert locations[1] == car2_path 32 | 33 | # By obj id 34 | locations = psh.locate(car1.obj_id, car2) 35 | assert locations[0] == car1_path 36 | assert locations[1] == car2_path 37 | 38 | # By obj id string 39 | locations = psh.locate(str(car1.obj_id), car2.obj_id) 40 | assert locations[0] == car1_path 41 | assert locations[1] == car2_path 42 | -------------------------------------------------------------------------------- /test/psh/test_ls.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car, Person 2 | 3 | import pyos 4 | from pyos import fs, psh 5 | from pyos.fs import nodes 6 | import pyos.os 7 | 8 | # Disable this here because, e.g., ls() causes the linter to warn because it is the 9 | # decorator that takes care of passing the first argument 10 | # pylint: disable=no-value-for-parameter 11 | 12 | 13 | def test_ls_basic(): 14 | pyos.os.makedirs("test/") 15 | 16 | assert len(psh.ls()) == 1 17 | 18 | car1 = Car() 19 | car1.save() 20 | 21 | assert len(psh.ls()) == 2 22 | 23 | # Now save in a different directory 24 | psh.cd("test") 25 | 26 | assert len(psh.ls()) == 0 27 | 28 | car = Car() 29 | car.save() 30 | 31 | assert len(psh.ls()) == 1 32 | 33 | psh.cd("..") 34 | contents = psh.ls() 35 | assert len(contents) == 2 # Now there is a directory and a file 36 | found = [] 37 | for entry in contents: 38 | if isinstance(entry, pyos.fs.DirectoryNode): 39 | assert entry.name == "test" 40 | found.append(True) 41 | continue 42 | 43 | if isinstance(entry, pyos.fs.ObjectNode): 44 | assert entry.obj_id == car1.obj_id 45 | found.append(True) 46 | continue 47 | assert len(found) == 2 48 | 49 | 50 | def test_ls_path(): 51 | """Test that ls lists the contents of a folder when given a path""" 52 | car = Car() 53 | pyos.os.makedirs("a/") 54 | psh.save(car, "a/") 55 | 56 | res = psh.ls() 57 | assert len(res) == 1 # Should have the directory in home 58 | assert "a" in repr(res) 59 | 60 | res = psh.ls("a/") 61 | assert len(res) == 1 62 | assert str(car.obj_id) in repr(res) 63 | 64 | 65 | def test_ls_dirs(): 66 | subdirs = ["a", "b", "c", "d"] 67 | for subdir in subdirs: 68 | pyos.os.makedirs(subdir) 69 | # Put a couple of cars in just to make it more realistic 70 | psh.save(Car(), subdir) 71 | psh.save(Car(), subdir) 72 | 73 | results = psh.ls() 74 | assert len(results) == len(subdirs) 75 | for result in results: 76 | assert isinstance(result, pyos.fs.DirectoryNode) 77 | idx = subdirs.index(result.name) 78 | assert idx != -1 79 | subdirs.pop(idx) 80 | assert not subdirs 81 | 82 | 83 | def test_ls_minus_d(): 84 | # Two cars at top level and two in the garage 85 | pyos.os.makedirs("garage/") 86 | 87 | psh.save(Car()) 88 | psh.save(Car()) 89 | psh.save(Car(), "garage/") 90 | psh.save(Car(), "garage/") 91 | 92 | # The two cars, plus the directory 93 | assert len(psh.ls()) == 3 94 | 95 | # Just the current directory 96 | results = psh.ls(-psh.d) 97 | assert len(results) == 1 98 | assert isinstance(results[0], nodes.FilesystemNode) 99 | assert results[0].abspath == psh.pwd() # pylint: disable=no-member 100 | 101 | 102 | def test_ls_lots(): 103 | paths = ["test/", "b/", "test/b/", "my_dir/", "my_dir/sub/", "test/b/b_sub/"] 104 | num = len(paths) 105 | for idx in range(20): 106 | path = paths[idx % num] 107 | pyos.os.makedirs(path, exists_ok=True) 108 | psh.save(Car(), path) 109 | psh.save(Person("random", 35), path) 110 | 111 | # Now save some in the root 112 | for _ in range(2): 113 | Car().save() 114 | Person("person a", 23).save() 115 | 116 | # We should have 3 paths and 4 objects 117 | results = psh.ls() 118 | 119 | assert len(results) == 7 120 | 121 | 122 | def test_ls_minus_l(): 123 | pyos.os.makedirs("garage/") 124 | 125 | car1_id = psh.save(Car()) 126 | car2_id = psh.save(Car()) 127 | car3_id = psh.save(Car(), "garage/") 128 | car4_id = psh.save(Car(), "garage/") 129 | 130 | res = psh.ls(-psh.l) 131 | assert len(res) == 3 132 | res_repr = repr(res) 133 | assert "garage" in res_repr 134 | assert str(car1_id) in res_repr 135 | assert str(car2_id) in res_repr 136 | assert str(car3_id) not in res_repr 137 | assert str(car4_id) not in res_repr 138 | 139 | res = psh.ls(-psh.l, "garage/") 140 | assert len(res) == 2 141 | res_repr = repr(res) 142 | assert "garage" not in res_repr 143 | assert str(car1_id) not in res_repr 144 | assert str(car2_id) not in res_repr 145 | assert str(car3_id) in res_repr 146 | assert str(car4_id) in res_repr 147 | 148 | 149 | def test_inexistent(): 150 | """This used to raise but shouldn't do make sure it's possible""" 151 | assert len(psh.ls("not_there")) == 0 152 | 153 | 154 | def test_vanishing_folders(): 155 | """Test for bug we had where folders would vanish if changed to their parent directory""" 156 | pyos.os.makedirs("/test/") 157 | 158 | psh.cd("/test/") 159 | car_id = psh.save(Car()) 160 | results = psh.ls() 161 | assert len(results) == 1 162 | assert isinstance(results[0], fs.nodes.ObjectNode) 163 | assert results[0].entry_id == car_id # pylint: disable = no-member 164 | 165 | results = psh.ls("/") 166 | assert len(results) == 1 167 | assert isinstance(results[0], fs.nodes.DirectoryNode) 168 | assert results[0].abspath == pyos.Path("/test/") # pylint: disable = no-member 169 | -------------------------------------------------------------------------------- /test/psh/test_meta.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | from pyos import psh 4 | 5 | 6 | def dict_in(where: dict, what: dict): 7 | for key, value in what.items(): 8 | if key not in where or where[key] != value: 9 | return False 10 | 11 | return True 12 | 13 | 14 | def test_meta_basic(): 15 | car = Car() 16 | car.save() 17 | 18 | psh.meta(-psh.s, car, fast=True, colour="blue") 19 | assert dict_in(psh.meta(car), {"fast": True, "colour": "blue"}) 20 | 21 | psh.meta(-psh.u, car, fast=False) 22 | assert dict_in(psh.meta(car), {"fast": False, "colour": "blue"}) 23 | 24 | 25 | def test_meta_update_upsert(): 26 | """Test that update automatically upserts i.e. if there is no metadata for an object and 27 | update is called it still just sets the metadata""" 28 | car = Car() 29 | car.save() 30 | # Get original 31 | orig = psh.meta(car) 32 | # Update and get 33 | psh.meta(-psh.u, car, fast=True, colour="blue") 34 | new = psh.meta(car) 35 | 36 | assert dict_in(new, {"fast": True, "colour": "blue"}) 37 | assert new != orig 38 | 39 | 40 | def test_meta_many(): 41 | car1 = Car().save() 42 | car2 = Car().save() 43 | 44 | new_meta = {"fast": True, "colour": "blue"} 45 | 46 | psh.meta(-psh.s, car1, car2, fast=True, colour="blue") 47 | assert dict_in(psh.meta(car1), new_meta) 48 | assert dict_in(psh.meta(car2), new_meta) 49 | 50 | 51 | def test_shell_meta(pyos_shell): 52 | car = Car() 53 | car.save(meta={"reg": 123}) 54 | 55 | res = pyos_shell.app_cmd(f"meta {car.obj_id}") 56 | assert not res.stderr 57 | assert "reg│123" in res.stdout 58 | 59 | res = pyos_shell.app_cmd(f"meta -s {car.obj_id} reg=456") 60 | res = pyos_shell.app_cmd(f"meta {car.obj_id}") 61 | assert not res.stderr 62 | assert "reg│456" in res.stdout 63 | -------------------------------------------------------------------------------- /test/psh/test_mkdir.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pyos import psh 4 | import pyos.os 5 | 6 | # pylint: disable=no-value-for-parameter 7 | 8 | 9 | def test_mkdir_basic(): 10 | """Delete by the various identifier""" 11 | # Try with and without trailing slash 12 | psh.mkdir("test/") 13 | assert pyos.os.isdir("test/") 14 | 15 | psh.mkdir("test2") 16 | assert pyos.os.isdir("test2") 17 | 18 | psh.mkdir(-psh.p, "test/subtest/subsubtest") 19 | assert pyos.os.isdir("test/subtest/subsubtest") 20 | 21 | # Test creating existing 22 | with pytest.raises(pyos.exceptions.FileExistsError): 23 | psh.mkdir("test/") 24 | 25 | # With -p flag should be OK 26 | psh.mkdir(-psh.p, "test/") 27 | 28 | 29 | def test_mkdir_cmd(pyos_shell): 30 | """Test the mkdir REPL command""" 31 | pyos_shell.app_cmd("mkdir test/") 32 | assert pyos.os.isdir("test/") 33 | 34 | pyos_shell.app_cmd("mkdir test2") 35 | assert pyos.os.isdir("test2") 36 | 37 | pyos_shell.app_cmd("mkdir -p test/subtest/subsubtest") 38 | assert pyos.os.isdir("test/subtest/subsubtest") 39 | 40 | # Test creating existing 41 | with pytest.raises(pyos.exceptions.FileExistsError): 42 | pyos_shell.app_cmd("mkdir test/") 43 | 44 | # With -p flag should be OK 45 | pyos_shell.app_cmd("mkdir -p test/") 46 | -------------------------------------------------------------------------------- /test/psh/test_mv.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | from mincepy.testing import Car 4 | 5 | import pyos 6 | from pyos import psh 7 | from pyos.fs import nodes 8 | import pyos.os 9 | 10 | # pylint: disable=no-value-for-parameter 11 | 12 | 13 | def test_mv_basic(): 14 | pyos.os.makedirs("test/") 15 | pyos.os.makedirs("sub/") 16 | 17 | car = Car() 18 | car.save() 19 | 20 | # Move to test subdirectory 21 | psh.mv(str(car.obj_id), "test/") 22 | 23 | assert psh.locate(car) == psh.pwd() / "test" / str(car.obj_id) 24 | 25 | contents = psh.ls("test/") 26 | assert len(contents) == 1 27 | assert isinstance(contents[0], nodes.FilesystemNode) 28 | assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 29 | 30 | # Now move test into a subfolder 31 | psh.mv("test/", "sub/") 32 | contents = psh.ls() 33 | assert len(contents) == 1 34 | assert isinstance(contents[0], nodes.FilesystemNode) 35 | assert contents[0].name == "sub" # pylint: disable=no-member 36 | 37 | contents = psh.ls("sub/") 38 | assert len(contents) == 1 39 | assert contents[0].name == "test" 40 | 41 | 42 | def test_mv_from_str(): 43 | pyos.os.makedirs("test/") 44 | 45 | car = Car() 46 | car.save() 47 | 48 | psh.mv(str(car.obj_id), "test/") 49 | contents = psh.ls("test/") 50 | assert len(contents) == 1 51 | assert isinstance(contents[0], nodes.FilesystemNode) 52 | assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 53 | 54 | 55 | def test_mv_from_path(): 56 | pyos.os.makedirs("test/") 57 | 58 | car = Car() 59 | car.save() 60 | 61 | psh.mv(psh.locate(car), "test/") 62 | contents = psh.ls("test/") 63 | assert len(contents) == 1 64 | assert isinstance(contents[0], nodes.FilesystemNode) 65 | assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 66 | 67 | 68 | # 69 | # def test_mv_from_obj_id(): 70 | # pyos.os.makedirs('test/') 71 | # 72 | # car = Car() 73 | # car.save() 74 | # 75 | # psh.mv(car.obj_id, 'test/') 76 | # contents = psh.ls('test/') 77 | # assert len(contents) == 1 78 | # assert isinstance(contents[0], nodes.FilesystemNode) 79 | # assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 80 | 81 | 82 | def test_mv_dest_as_path(): 83 | pyos.os.makedirs("test/") 84 | 85 | car = Car() 86 | car.save() 87 | 88 | psh.mv(str(car.obj_id), pyos.pathlib.Path("test/")) 89 | contents = psh.ls("test/") 90 | assert len(contents) == 1 91 | assert isinstance(contents[0], nodes.FilesystemNode) 92 | assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 93 | 94 | 95 | def test_mv_remote(): 96 | """Test moving an object from one remote path to another""" 97 | pyos.os.makedirs("/test/path_a/") 98 | pyos.os.makedirs("/a/different/path/") 99 | 100 | car = Car() 101 | psh.save(car, "/test/path_a/") 102 | psh.mv(f"/test/path_a/{car.obj_id}", "/a/different/path/") 103 | 104 | contents = psh.ls("/a/different/path/") 105 | assert len(contents) == 1 106 | assert isinstance(contents[0], nodes.FilesystemNode) 107 | assert contents[0].entry_id == car.obj_id # pylint: disable=no-member 108 | 109 | 110 | def test_mv_overwrite(): 111 | """Test that mv overwrites an existing object correctly""" 112 | car1 = Car() 113 | psh.save(car1, "my_car") 114 | 115 | car2 = Car() 116 | car2.save() 117 | psh.mv(psh.f, str(car2.obj_id), "my_car") 118 | 119 | 120 | def test_mv_overwrite_prompt(monkeypatch): 121 | """Test moving with that would cause an overwrite without force""" 122 | car1 = Car() 123 | psh.save(car1, "my_car") 124 | 125 | car2 = Car() 126 | psh.save(car2, "my_car2") 127 | 128 | assert len(psh.ls()) == 2 129 | 130 | # This will prompt 131 | confirm = io.StringIO("N") 132 | monkeypatch.setattr("sys.stdin", confirm) 133 | psh.mv("my_car2", "my_car") 134 | assert len(psh.ls()) == 2 # Still 2 135 | 136 | # Now overwrite 137 | confirm = io.StringIO("Y") 138 | monkeypatch.setattr("sys.stdin", confirm) 139 | psh.mv("my_car2", "my_car") 140 | assert len(psh.ls()) == 1 141 | 142 | 143 | def test_mv_multiple(): 144 | pyos.os.makedirs("garage/") 145 | 146 | ferrari = Car() 147 | psh.save(ferrari, "ferrari") 148 | skoda = Car() 149 | psh.save(skoda, "skoda") 150 | 151 | assert len(psh.ls()) == 3 152 | 153 | psh.mv("skoda", "ferrari", "garage/") 154 | assert psh.ls("garage/") | len == 2 155 | 156 | 157 | def test_mv_rename_directory(): 158 | pyos.os.makedirs("cars/") 159 | 160 | psh.cd("cars/") 161 | car_id = Car().save() 162 | psh.cd("../") 163 | 164 | # Now, rename the directory using mv 165 | psh.mv("cars/", "new_cars") 166 | results = psh.ls("new_cars/") 167 | 168 | assert len(results) == 1 169 | assert isinstance(results[0], nodes.FilesystemNode) 170 | assert results[0].entry_id == car_id # pylint: disable=no-member 171 | -------------------------------------------------------------------------------- /test/psh/test_oid.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | from pyos import db 4 | import pyos.psh 5 | from pyos.psh import cmds 6 | 7 | 8 | def test_oid(): 9 | car = Car() 10 | car.save() 11 | 12 | assert cmds.oid(car) == car.obj_id 13 | 14 | # Save some more and see if the order is preserved 15 | cars = [] 16 | paths = [] 17 | for idx in range(10): 18 | cars.append(Car()) 19 | paths.append(f"car_{idx}") 20 | 21 | db.save_many(zip(cars, paths)) 22 | 23 | results = cmds.oid(*cars) 24 | for car, result_oid in zip(cars, results): 25 | assert car.obj_id == result_oid 26 | 27 | assert len(cars) == len(results) 28 | 29 | # Now check paths 30 | results = cmds.oid(*paths) 31 | for car, result_oid in zip(cars, results): 32 | assert car.obj_id == result_oid 33 | 34 | assert len(cars) == len(results) 35 | 36 | 37 | def test_shell_oid(pyos_shell): 38 | yellow = Car("ferrari", "yellow") 39 | yellow.save() 40 | 41 | # Test using the OID 42 | res = pyos_shell.app_cmd(f"oid {yellow.obj_id}") 43 | assert not res.stderr 44 | assert str(yellow.obj_id) in res.stdout 45 | 46 | pyos.psh.mv(str(yellow.obj_id), "ferrari") 47 | 48 | # Now try by filename 49 | res = pyos_shell.app_cmd("oid ferrari") 50 | assert not res.stderr 51 | assert str(yellow.obj_id) in res.stdout 52 | -------------------------------------------------------------------------------- /test/psh/test_piping.py: -------------------------------------------------------------------------------- 1 | """Module that tests the pyOS shell piping functionality""" 2 | 3 | import argparse 4 | import sys 5 | import tempfile 6 | 7 | import cmd2 8 | 9 | 10 | class Echo(cmd2.CommandSet): 11 | parser = argparse.ArgumentParser() 12 | parser.add_argument("-t", dest="tag", type=str, default="anon", help="the tag") 13 | parser.add_argument("msg", default=None, type=str, nargs="*") 14 | 15 | @cmd2.with_argparser(parser) 16 | def do_echo(self, args): 17 | if not args.msg: 18 | # Read from stdin 19 | args.msg = [line.rstrip() for line in sys.stdin.readlines()] 20 | if not args.msg: 21 | args.msg.append("") 22 | 23 | print(f"{args.tag}: {args.msg[0]}") 24 | 25 | 26 | def test_shell_piping(pyos_shell): 27 | pyos_shell.unregister_command_set(Echo()) 28 | res = pyos_shell.app_cmd("echo -t 1 hello_from_1 | echo -t 2 | echo -t 3") 29 | assert not res.stderr 30 | assert res.stdout == "3: 2: 1: hello_from_1\n" 31 | 32 | 33 | def test_host_shell_piping(pyos_shell, monkeypatch): 34 | # Mock standard in using a file as pytest effectively disable it 35 | with tempfile.TemporaryFile() as temp_buffer: 36 | monkeypatch.setattr("sys.stdin", temp_buffer) 37 | 38 | pyos_shell.do_set("debug true") 39 | pyos_shell.unregister_command_set(Echo()) 40 | res = pyos_shell.app_cmd("!echo 'shell: hello_from_shell' | echo -t 2 | echo -t 3") 41 | assert not res.stderr 42 | assert res.stdout == "3: 2: shell: hello_from_shell\n" 43 | 44 | # Now try shell command in the middle 45 | res = pyos_shell.app_cmd("echo -t 1 hello_from_1 | !cat | echo -t 3") 46 | assert not res.stderr 47 | assert res.stdout == "3: 1: hello_from_1\n" 48 | -------------------------------------------------------------------------------- /test/psh/test_rm.py: -------------------------------------------------------------------------------- 1 | import mincepy 2 | from mincepy.testing import Car 3 | 4 | from pyos import psh 5 | import pyos.os 6 | 7 | # pylint: disable=no-value-for-parameter 8 | 9 | 10 | def test_rm_basic(): 11 | """Delete by the various identifier""" 12 | 13 | car = Car() 14 | psh.save(car) 15 | 16 | assert len(psh.ls()) == 1 17 | psh.rm(car) 18 | assert len(psh.ls()) == 0 19 | 20 | car_id = Car().save() 21 | assert len(psh.ls()) == 1 22 | psh.rm(car_id) 23 | assert len(psh.ls()) == 0 24 | 25 | car_id = Car().save() 26 | assert len(psh.ls()) == 1 27 | psh.rm(str(car_id)) 28 | assert len(psh.ls()) == 0 29 | 30 | 31 | def test_rm_multiple(historian: mincepy.Historian): # pylint: disable=unused-argument 32 | car1 = Car() 33 | car2 = Car() 34 | car3 = Car() 35 | 36 | psh.save(car1, car2, car3) 37 | assert len(psh.ls()) == 3 38 | psh.rm(car1, car2) 39 | assert len(psh.ls()) == 1 40 | 41 | assert not car1.is_saved() 42 | assert not car2.is_saved() 43 | assert car3.is_saved() 44 | 45 | 46 | def test_rm_directory(): 47 | pyos.os.makedirs("/cars/") 48 | 49 | car1 = Car() 50 | psh.save(car1) 51 | psh.cd("/cars/") 52 | car2 = Car() 53 | car3 = Car() 54 | psh.save(car2, car3) 55 | 56 | assert len(psh.ls("/cars/")) == 2 57 | 58 | # First without -r flag 59 | psh.rm("/cars/") 60 | assert len(psh.ls("/cars/")) == 2 61 | assert car1.is_saved() 62 | assert car2.is_saved() 63 | assert car3.is_saved() 64 | 65 | # Now with 66 | psh.rm(-psh.r, "/cars/") 67 | assert len(psh.ls("/cars/")) == 0 68 | assert car1.is_saved() 69 | assert not car2.is_saved() 70 | assert not car3.is_saved() 71 | 72 | 73 | def test_rm_objects_with_references(): 74 | """Test deleting objects in a directory that reference each other""" 75 | pyos.os.makedirs("/cars/garage/") 76 | 77 | num_cars = 10 78 | psh.cd("/cars/garage/") 79 | for _ in range(num_cars): 80 | Car().save() 81 | 82 | psh.cd("/cars/") 83 | cars = mincepy.RefList() 84 | cars.extend(psh.load(psh.ls("garage/"))) 85 | 86 | psh.save(cars) 87 | results = psh.ls() 88 | assert len(results) == 2 # the list plus the 'garage' folder 89 | 90 | psh.cd("/") 91 | # Delete the folder 92 | psh.rm - psh.r("/cars/") # pylint: disable=expression-not-assigned 93 | 94 | assert len(psh.ls("/cars/")) == 0 95 | -------------------------------------------------------------------------------- /test/psh/test_rsync.py: -------------------------------------------------------------------------------- 1 | import mincepy.testing as mince_testing 2 | import yarl 3 | 4 | from pyos import fs, os, psh 5 | 6 | 7 | def ensure_at_path(*objs, path, historian): 8 | at_destination = fs.find(path, historian=historian) 9 | not_found = tuple(obj for obj in objs if obj not in at_destination) 10 | assert not not_found 11 | 12 | 13 | def get_uri_with_objsys_path(archive_uri: str, path: str): 14 | if path.startswith(os.sep): 15 | path = path[1:] 16 | return str(yarl.URL(archive_uri) / path) 17 | 18 | 19 | def test_rsync_basic(historian, test_utils): 20 | car = mince_testing.Car() 21 | person = mince_testing.Person("martin", 35, car=car) 22 | historian.save(car, person) 23 | 24 | with test_utils.temporary_historian("test-rsync") as (uri, remote): 25 | dest_path = "/home" 26 | dest = get_uri_with_objsys_path(uri, dest_path) 27 | result = psh.rsync("./", dest) 28 | assert car.obj_id in result 29 | assert person.obj_id in result 30 | 31 | ensure_at_path(car.obj_id, person.obj_id, path=dest_path, historian=remote) 32 | 33 | # Make a mutation and see if it is synced 34 | person.age = 36 35 | person.save() 36 | result = psh.rsync("./", dest) 37 | assert len(result) == 1 38 | assert person.obj_id in result 39 | 40 | 41 | def test_rsync_history(historian, test_utils): 42 | """Test the that correct behaviour is implemented for syncing object histories""" 43 | car = mince_testing.Car(colour="red") 44 | car_id = historian.save(car) 45 | car.colour = "blue" 46 | car.save() 47 | 48 | with test_utils.temporary_historian("test-rsync") as (uri, remote): 49 | dest_path = "/home" 50 | dest = get_uri_with_objsys_path(uri, dest_path) 51 | result = psh.rsync("./", dest) 52 | assert car.obj_id in result 53 | ensure_at_path(car.obj_id, path=dest_path, historian=remote) 54 | 55 | # Check that, by default, history is not transferred 56 | results = list(remote.snapshots.records.find(obj_id=car_id)) 57 | assert len(results) == 1 58 | assert results[0].version == 1 # Only the blue car is expected 59 | 60 | 61 | def test_shell_rsync(historian, test_utils, pyos_shell): 62 | car = mince_testing.Car() 63 | person = mince_testing.Person("martin", 35, car=car) 64 | car_id, person_id = historian.save(car, person) 65 | 66 | with test_utils.temporary_historian("test-rsync") as (uri, remote): 67 | dest_path = "/home" 68 | dest = get_uri_with_objsys_path(uri, dest_path) 69 | res = pyos_shell.app_cmd(f"rsync ./ {dest}") 70 | assert not res.stderr 71 | assert remote.objects.find(obj_id=[car_id, person_id]).count() == 2 72 | ensure_at_path(car_id, person_id, path=dest_path, historian=remote) 73 | 74 | # Make a mutation and see if it is synced 75 | person.age = 36 76 | person.save() 77 | res = pyos_shell.app_cmd(f"rsync ./ {dest}") 78 | assert not res.stderr 79 | assert remote.objects.records.find(obj_id=person_id, version=1).count() == 1 80 | 81 | 82 | def test_rsync_meta(historian, test_utils): 83 | local = historian 84 | car = mince_testing.Car() 85 | person = mince_testing.Person("martin", 35, car=car) 86 | car_id, person_id = local.save(car, person) 87 | 88 | car_meta = {"ref": "VD123"} 89 | person_meta = {"birthplace": "toronto"} 90 | car.update_meta(car_meta) 91 | person.update_meta(person_meta) 92 | 93 | with test_utils.temporary_historian("test-rsync") as (uri, remote): 94 | dest_path = "/home/" 95 | dest = get_uri_with_objsys_path(uri, dest_path) 96 | result = psh.rsync("./", dest, meta="overwrite") 97 | assert car_id in result 98 | assert person_id in result 99 | 100 | # Now, check that the meta has been transferred 101 | assert check_meta_contains(car_meta, remote.meta.get(car.obj_id)) 102 | assert check_meta_contains(person_meta, remote.meta.get(person.obj_id)) 103 | 104 | # Now, change the metadata and sync again 105 | car_meta = {"age": 12} 106 | car.set_meta(car_meta) 107 | psh.rsync("./", dest, meta="overwrite") 108 | assert check_meta_contains(car_meta, remote.meta.get(car.obj_id)) 109 | 110 | # Now try update 111 | combined = car_meta.copy() 112 | car_meta = {"sold": True} 113 | combined.update(car_meta) 114 | car.set_meta(car_meta) 115 | psh.rsync("./", dest, meta="update") 116 | assert check_meta_contains(combined, remote.meta.get(car.obj_id)) 117 | 118 | 119 | def check_meta_contains(template: dict, meta: dict): 120 | for key, value in template.items(): 121 | try: 122 | if meta[key] != value: 123 | return False 124 | except KeyError: 125 | return False 126 | return True 127 | -------------------------------------------------------------------------------- /test/psh/test_save.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | import pytest 3 | 4 | import pyos 5 | from pyos import psh 6 | import pyos.os 7 | 8 | 9 | def test_save(): 10 | car = Car() 11 | obj_id = psh.save(car) 12 | assert obj_id == car.obj_id 13 | 14 | 15 | def test_save_with_name(): 16 | car = Car() 17 | obj_id = psh.save(car, "my_car") 18 | assert obj_id is car.obj_id 19 | 20 | results = psh.ls("my_car") 21 | assert len(results) == 1 22 | assert results[0].name == "my_car" 23 | 24 | 25 | def test_save_to_dir(): 26 | pyos.os.makedirs("test/") 27 | 28 | for _ in range(10): 29 | psh.save(Car(), "test/") 30 | 31 | results = psh.ls("test/") 32 | assert len(results) == 10 33 | 34 | # Now check that save will promote a file to a directory 35 | psh.save(Car(), "test") 36 | assert len(psh.ls("test/")) == 11 37 | 38 | 39 | def test_save_same_name(): 40 | car = Car() 41 | car_id = psh.save(car, "my_car") 42 | car2 = Car() 43 | with pytest.raises(pyos.exceptions.FileExistsError): 44 | # For now this raises but this may change in the future 45 | psh.save(car2, "my_car") 46 | 47 | # Now test the force flags 48 | car2_id = psh.save(-psh.f, car2, "my_car") 49 | assert car_id != car2_id 50 | 51 | 52 | def test_resave_doesnt_move(): 53 | """Test that saving an object whilst in a new path doesn't automatically move it""" 54 | pyos.os.makedirs("sub/") 55 | 56 | car = Car() 57 | car.make = "ferrari" 58 | home = pyos.Path().resolve() 59 | car_id = car.save() 60 | car_loc = home / str(car_id) 61 | assert psh.locate(car) == car_loc 62 | 63 | pyos.os.chdir("sub/") 64 | sub = pyos.Path().resolve() 65 | assert home != sub 66 | 67 | car.make = "fiat" 68 | psh.save(car) 69 | assert psh.locate(car) == car_loc 70 | -------------------------------------------------------------------------------- /test/psh/test_tree.py: -------------------------------------------------------------------------------- 1 | from mincepy.testing import Car 2 | 3 | from pyos import psh 4 | import pyos.os 5 | 6 | # pylint: disable=no-value-for-parameter 7 | 8 | 9 | def test_tree_basic(): 10 | Car().save() 11 | results = psh.tree() 12 | assert len(results) == 1 13 | 14 | 15 | def test_tree_depth(): 16 | pyos.os.makedirs("sub/sub/") 17 | 18 | Car().save() 19 | psh.save(Car(), "sub/") 20 | psh.save(Car(), "sub/sub/") 21 | psh.save(Car(), "sub/sub/sub") 22 | 23 | def check_depth(results): 24 | return results.height 25 | 26 | results = psh.tree() 27 | assert check_depth(results) == 4 28 | for idx in range(4): 29 | height = check_depth(psh.tree - psh.L(idx)()) 30 | assert height == idx + 1 31 | 32 | 33 | def test_tree_print(): 34 | expected_result = """ 35 | ├── a 36 | │ ├── b 37 | │ │ └── b_car 38 | │ └── a_car 39 | └── some_car 40 | """ 41 | 42 | pyos.os.makedirs("a/b/") 43 | psh.save(Car(), "some_car") 44 | psh.save(Car(), "a/a_car") 45 | psh.save(Car(), "a/b/b_car") 46 | 47 | results = psh.tree() 48 | 49 | res_string = str(results) 50 | 51 | print(res_string) 52 | for line in expected_result.split("\n"): 53 | if line: 54 | assert line in res_string 55 | -------------------------------------------------------------------------------- /test/psh_lib/test_opts.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | 3 | from pyos.psh_lib import opts 4 | 5 | 6 | def my_fn(): 7 | """This is my docstring""" 8 | 9 | 10 | def my_other_fn(): 11 | """My other docstring""" 12 | 13 | 14 | def test_command_docstring(): 15 | command = opts.Command(my_fn) 16 | doc = inspect.getdoc(command.__call__) 17 | assert doc == "This is my docstring" 18 | 19 | other_command = opts.Command(my_other_fn) 20 | doc = inspect.getdoc(other_command.__call__) 21 | assert doc == "My other docstring" 22 | 23 | 24 | def test_command_name(): 25 | command = opts.Command(my_fn) 26 | assert command.name == "my_fn" 27 | -------------------------------------------------------------------------------- /test/psh_lib/test_results.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import pyos 4 | 5 | 6 | def test_caching_results(): 7 | with pytest.raises(TypeError): 8 | pyos.results.CachingResults(None) 9 | 10 | # Let's check with some proper data this time 11 | data = list(range(100)) 12 | res = pyos.results.CachingResults(iter(data)) 13 | assert res[0] == 0 14 | assert res[10] == 10 15 | with pytest.raises(IndexError): 16 | _ = res[100] 17 | 18 | # Check with slicing 19 | res = pyos.results.CachingResults(iter(data)) 20 | assert res[2:5] == [2, 3, 4] 21 | 22 | res = pyos.results.CachingResults(iter(data)) 23 | assert res[:] == data 24 | -------------------------------------------------------------------------------- /test/support.py: -------------------------------------------------------------------------------- 1 | from pyos import db, os, pathlib 2 | 3 | 4 | def create_empty_file(filename: os.PathSpec): 5 | hist = db.get_historian() 6 | file = hist.create_file(os.path.basename(filename)) 7 | db.save_one(file, path=filename) 8 | 9 | 10 | change_cwd = pathlib.working_path # pylint: disable=invalid-name 11 | -------------------------------------------------------------------------------- /test/test_glob.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from typing import List 3 | 4 | import pytest 5 | 6 | from pyos import glob, os, psh 7 | 8 | from . import support 9 | 10 | # pylint: disable=invalid-name, redefined-outer-name 11 | 12 | 13 | def norm(tempdir, *parts) -> str: 14 | return os.path.normpath(os.path.join(tempdir, *parts)) 15 | 16 | 17 | def joins(tempdir, *tuples) -> List[str]: 18 | return [os.path.join(tempdir, *parts) for parts in tuples] 19 | 20 | 21 | def mktemp(tempdir, *parts): 22 | filename = norm(tempdir, *parts) 23 | base, _file = os.path.split(filename) 24 | if not os.path.exists(base): 25 | os.makedirs(base) 26 | support.create_empty_file(filename) 27 | 28 | 29 | change_cwd = support.change_cwd 30 | 31 | 32 | @pytest.fixture 33 | def test_dir(): 34 | tempdir = "globtest_dir" 35 | mktemp(tempdir, "a", "D") 36 | mktemp(tempdir, "aab", "F") 37 | mktemp(tempdir, ".aa", "G") 38 | mktemp(tempdir, ".bb", "H") 39 | mktemp(tempdir, "aaa", "zzzF") 40 | mktemp(tempdir, "ZZZ") 41 | mktemp(tempdir, "EF") 42 | mktemp(tempdir, "a", "bcd", "EF") 43 | mktemp(tempdir, "a", "bcd", "efg", "ha") 44 | 45 | yield tempdir 46 | psh.rm - psh.r(tempdir) # pylint: disable=expression-not-assigned 47 | 48 | 49 | def check_glob(tempdir, *parts, **kwargs): 50 | if len(parts) == 1: 51 | pattern = parts[0] 52 | else: 53 | pattern = os.path.join(*parts) 54 | p = os.path.join(tempdir, pattern) 55 | res = glob.glob(p, **kwargs) 56 | assert len(list(glob.iglob(p, **kwargs))) == len(res) 57 | return res 58 | 59 | 60 | def assertSequencesEqual_noorder(l1, l2): 61 | l1 = list(l1) 62 | l2 = list(l2) 63 | assert set(l1) == set(l2) 64 | assert sorted(l1) == sorted(l2) 65 | 66 | 67 | def test_glob_literal(test_dir): 68 | eq = assertSequencesEqual_noorder 69 | do_check = functools.partial(check_glob, test_dir) 70 | do_norm = functools.partial(norm, test_dir) 71 | 72 | eq(do_check("a"), [do_norm("a")]) 73 | 74 | eq(do_check("a", "D"), [do_norm("a", "D")]) 75 | 76 | eq(do_check("aab"), [do_norm("aab")]) 77 | 78 | eq(do_check("zymurgy"), []) 79 | 80 | res = check_glob(test_dir, "*") 81 | assert {type(r) for r in res} == {str} 82 | res = glob.glob(os.path.join(test_dir, "*")) 83 | assert {type(r) for r in res} == {str} 84 | 85 | 86 | def test_glob_one_directory(test_dir): 87 | eq = assertSequencesEqual_noorder 88 | do_norm = functools.partial(norm, test_dir) 89 | do_check = functools.partial(check_glob, test_dir) 90 | 91 | eq(do_check("a*"), map(do_norm, ["a", "aab", "aaa"])) 92 | eq(do_check("*a"), map(do_norm, ["a", "aaa"])) 93 | eq(do_check(".*"), map(do_norm, [".aa", ".bb"])) 94 | eq(do_check("?aa"), map(do_norm, ["aaa"])) 95 | eq(do_check("aa?"), map(do_norm, ["aaa", "aab"])) 96 | eq(do_check("aa[ab]"), map(do_norm, ["aaa", "aab"])) 97 | eq(do_check("*q"), []) 98 | 99 | 100 | def test_glob_nested_directory(test_dir): 101 | eq = assertSequencesEqual_noorder 102 | do_norm = functools.partial(norm, test_dir) 103 | do_check = functools.partial(check_glob, test_dir) 104 | 105 | eq(do_check("a", "bcd", "E*"), [do_norm("a", "bcd", "EF")]) 106 | eq(do_check("a", "bcd", "*g"), [do_norm("a", "bcd", "efg")]) 107 | 108 | 109 | def test_glob_directory_names(test_dir): 110 | eq = assertSequencesEqual_noorder 111 | do_norm = functools.partial(norm, test_dir) 112 | do_check = functools.partial(check_glob, test_dir) 113 | 114 | eq(do_check("*", "D"), [do_norm("a", "D")]) 115 | eq(do_check("*", "*a"), []) 116 | eq(do_check("a", "*", "*", "*a"), [do_norm("a", "bcd", "efg", "ha")]) 117 | eq(do_check("?a?", "*F"), [do_norm("aaa", "zzzF"), do_norm("aab", "F")]) 118 | 119 | 120 | def test_glob_directory_with_trailing_slash(test_dir): 121 | do_norm = functools.partial(norm, test_dir) 122 | 123 | # Patterns ending with a slash shouldn't match non-dirs 124 | res = glob.glob(do_norm("Z*Z") + os.sep) 125 | assert res == [] # pylint: disable=use-implicit-booleaness-not-comparison 126 | res = glob.glob(do_norm("ZZZ") + os.sep) 127 | assert res == [] # pylint: disable=use-implicit-booleaness-not-comparison 128 | # When there is a wildcard pattern which ends with os.sep, glob() 129 | # doesn't blow up. 130 | res = glob.glob(do_norm("aa*") + os.sep) 131 | assert len(res) == 2 132 | # either of these results is reasonable 133 | assert set(res) in [ 134 | {do_norm("aaa"), do_norm("aab")}, 135 | {do_norm("aaa") + os.sep, do_norm("aab") + os.sep}, 136 | ] 137 | 138 | 139 | def check_escape(arg, expected): 140 | assert glob.escape(arg) == expected 141 | 142 | 143 | def test_escape(): 144 | check = check_escape 145 | check("abc", "abc") 146 | check("[", "[[]") 147 | check("?", "[?]") 148 | check("*", "[*]") 149 | check("[[_/*?*/_]]", "[[][[]_/[*][?][*]/_]]") 150 | check("/[[_/*?*/_]]/", "/[[][[]_/[*][?][*]/_]]/") 151 | 152 | 153 | def rglob(tempdir, *parts, **kwargs): 154 | return check_glob(tempdir, *parts, recursive=True, **kwargs) 155 | 156 | 157 | def test_recursive_glob(test_dir): 158 | eq = assertSequencesEqual_noorder 159 | full = [ 160 | ("EF",), 161 | ("ZZZ",), 162 | ("a",), 163 | ("a", "D"), 164 | ("a", "bcd"), 165 | ("a", "bcd", "EF"), 166 | ("a", "bcd", "efg"), 167 | ("a", "bcd", "efg", "ha"), 168 | ("aaa",), 169 | ("aaa", "zzzF"), 170 | ("aab",), 171 | ("aab", "F"), 172 | ] 173 | eq(rglob(test_dir, "**"), joins(test_dir, ("",), *full)) 174 | eq( 175 | rglob(test_dir, os.curdir, "**"), 176 | joins(test_dir, (os.curdir, ""), *((os.curdir,) + i for i in full)), 177 | ) 178 | dirs = [("a", ""), ("a", "bcd", ""), ("a", "bcd", "efg", ""), ("aaa", ""), ("aab", "")] 179 | 180 | eq(rglob(test_dir, "**", ""), joins(test_dir, ("",), *dirs)) 181 | 182 | eq( 183 | rglob(test_dir, "a", "**"), 184 | joins( 185 | test_dir, 186 | ("a", ""), 187 | ("a", "D"), 188 | ("a", "bcd"), 189 | ("a", "bcd", "EF"), 190 | ("a", "bcd", "efg"), 191 | ("a", "bcd", "efg", "ha"), 192 | ), 193 | ) 194 | eq(rglob(test_dir, "a**"), joins(test_dir, ("a",), ("aaa",), ("aab",))) 195 | expect = [("a", "bcd", "EF"), ("EF",)] 196 | 197 | eq(rglob(test_dir, "**", "EF"), joins(test_dir, *expect)) 198 | expect = [("a", "bcd", "EF"), ("aaa", "zzzF"), ("aab", "F"), ("EF",)] 199 | 200 | eq(rglob(test_dir, "**", "*F"), joins(test_dir, *expect)) 201 | eq(rglob(test_dir, "**", "*F", ""), []) 202 | eq(rglob(test_dir, "**", "bcd", "*"), joins(test_dir, ("a", "bcd", "EF"), ("a", "bcd", "efg"))) 203 | eq(rglob(test_dir, "a", "**", "bcd"), joins(test_dir, ("a", "bcd"))) 204 | 205 | with change_cwd(test_dir): 206 | join = os.path.join 207 | eq(glob.glob("**", recursive=True), [join(*i) for i in full]) 208 | eq(glob.glob(join("**", ""), recursive=True), [join(*i) for i in dirs]) 209 | eq(glob.glob(join("**", "*"), recursive=True), [join(*i) for i in full]) 210 | eq( 211 | glob.glob(join(os.curdir, "**"), recursive=True), 212 | [join(os.curdir, "")] + [join(os.curdir, *i) for i in full], 213 | ) 214 | eq( 215 | glob.glob(join(os.curdir, "**", ""), recursive=True), 216 | [join(os.curdir, "")] + [join(os.curdir, *i) for i in dirs], 217 | ) 218 | eq( 219 | glob.glob(join(os.curdir, "**", "*"), recursive=True), 220 | [join(os.curdir, *i) for i in full], 221 | ) 222 | eq(glob.glob(join("**", "zz*F"), recursive=True), [join("aaa", "zzzF")]) 223 | eq(glob.glob("**zz*F", recursive=True), []) 224 | expect = [join("a", "bcd", "EF"), "EF"] 225 | eq(glob.glob(join("**", "EF"), recursive=True), expect) 226 | -------------------------------------------------------------------------------- /test/utils.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import os 3 | import random 4 | import string 5 | 6 | import mincepy.testing as mince_testing 7 | 8 | import pyos 9 | 10 | ENV_ARCHIVE_BASE_URI = "MINCEPY_TEST_BASE_URI" 11 | DEFAULT_ARCHIVE_BASE_URI = "mongodb://localhost" 12 | ENV_ARCHIVE_URI = "PYOS_TEST_URI" 13 | DEFAULT_ARCHIVE_URI = "mongodb://localhost/pyos-tests" 14 | 15 | 16 | def get_base_uri() -> str: 17 | return os.environ.get(ENV_ARCHIVE_BASE_URI, DEFAULT_ARCHIVE_BASE_URI) 18 | 19 | 20 | def create_archive_uri(base_uri="", db_name=""): 21 | """Get an archive URI based on the current archive base URI plus the passed database name. 22 | 23 | If the database name is missing a random one will be used""" 24 | if not db_name: 25 | letters = string.ascii_lowercase 26 | db_name = "mincepy-" + "".join(random.choice(letters) for _ in range(5)) 27 | base_uri = base_uri or get_base_uri() 28 | return base_uri + "/" + db_name 29 | 30 | 31 | @contextlib.contextmanager 32 | def temporary_historian(db_name: str = ""): 33 | """Create a temporary database using the base archive URI and the given db name or a random one. Yields a tuple 34 | consisting of the archive uri and the historian""" 35 | archive_uri = mince_testing.create_archive_uri(get_base_uri(), db_name) 36 | with mince_testing.temporary_historian(archive_uri) as historian: 37 | pyos.db.init(historian, use_globally=False) 38 | yield archive_uri, historian 39 | --------------------------------------------------------------------------------