├── .github ├── FUNDING.yml ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── package.yml │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── CONTRIBUTORS.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── appveyor.yml ├── docs ├── Makefile ├── editdocs.sh ├── make.bat ├── requirements.txt ├── source │ ├── builtin.rst │ ├── concepts.rst │ ├── conf.py │ ├── contributing.md │ ├── extension.rst │ ├── external.rst │ ├── globbing.rst │ ├── guide.rst │ ├── implementers.rst │ ├── index.rst │ ├── info.rst │ ├── interface.rst │ ├── introduction.rst │ ├── openers.rst │ ├── reference.rst │ ├── reference │ │ ├── appfs.rst │ │ ├── base.rst │ │ ├── compress.rst │ │ ├── copy.rst │ │ ├── enums.rst │ │ ├── errors.rst │ │ ├── filesize.rst │ │ ├── ftpfs.rst │ │ ├── glob.rst │ │ ├── info_objects.rst │ │ ├── memoryfs.rst │ │ ├── mirror.rst │ │ ├── mode.rst │ │ ├── mountfs.rst │ │ ├── move.rst │ │ ├── multifs.rst │ │ ├── opener.rst │ │ ├── osfs.rst │ │ ├── path.rst │ │ ├── permissions.rst │ │ ├── subfs.rst │ │ ├── tarfs.rst │ │ ├── tempfs.rst │ │ ├── tools.rst │ │ ├── tree.rst │ │ ├── walk.rst │ │ ├── wildcard.rst │ │ ├── wrap.rst │ │ ├── wrapfs.rst │ │ └── zipfs.rst │ └── walking.rst └── tree.html ├── examples ├── README.txt ├── count_py.py ├── find_dups.py ├── rm_pyc.py └── upload.py ├── fs ├── __init__.py ├── _bulk.py ├── _fscompat.py ├── _ftp_parse.py ├── _pathcompat.py ├── _repr.py ├── _typing.py ├── _tzcompat.py ├── _url_tools.py ├── _version.py ├── appfs.py ├── base.py ├── compress.py ├── constants.py ├── copy.py ├── enums.py ├── error_tools.py ├── errors.py ├── filesize.py ├── ftpfs.py ├── glob.py ├── info.py ├── iotools.py ├── lrucache.py ├── memoryfs.py ├── mirror.py ├── mode.py ├── mountfs.py ├── move.py ├── multifs.py ├── opener │ ├── __init__.py │ ├── appfs.py │ ├── base.py │ ├── errors.py │ ├── ftpfs.py │ ├── memoryfs.py │ ├── osfs.py │ ├── parse.py │ ├── py.typed │ ├── registry.py │ ├── tarfs.py │ ├── tempfs.py │ └── zipfs.py ├── osfs.py ├── path.py ├── permissions.py ├── py.typed ├── subfs.py ├── tarfs.py ├── tempfs.py ├── test.py ├── time.py ├── tools.py ├── tree.py ├── walk.py ├── wildcard.py ├── wrap.py ├── wrapfs.py └── zipfs.py ├── requirements-readthedocs.txt ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── mark.py ├── requirements.txt ├── test_appfs.py ├── test_archives.py ├── test_base.py ├── test_bulk.py ├── test_copy.py ├── test_doctest.py ├── test_encoding.py ├── test_enums.py ├── test_error_tools.py ├── test_errors.py ├── test_filesize.py ├── test_fscompat.py ├── test_ftp_parse.py ├── test_ftpfs.py ├── test_glob.py ├── test_imports.py ├── test_info.py ├── test_iotools.py ├── test_lrucache.py ├── test_memoryfs.py ├── test_mirror.py ├── test_mode.py ├── test_mountfs.py ├── test_move.py ├── test_multifs.py ├── test_new_name.py ├── test_opener.py ├── test_osfs.py ├── test_path.py ├── test_permissions.py ├── test_subfs.py ├── test_tarfs.py ├── test_tempfs.py ├── test_time.py ├── test_tools.py ├── test_tree.py ├── test_url_tools.py ├── test_walk.py ├── test_wildcard.py ├── test_wrap.py ├── test_wrapfs.py └── test_zipfs.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: willmcgugan 4 | ko_fi: willmcgugan 5 | tidelift: "pypi/rich" 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Type of changes 2 | 3 | 4 | 5 | - Bug fix 6 | - New feature 7 | - Documentation / docstrings 8 | - Tests 9 | - Other 10 | 11 | ## Checklist 12 | 13 | - [ ] I've run the latest [black](https://github.com/ambv/black) with default args on new code. 14 | - [ ] I've updated CHANGELOG.md and CONTRIBUTORS.md where appropriate. 15 | - [ ] I've added tests for new code. 16 | - [ ] I accept that @PyFilesystem/maintainers may be pedantic in the code review. 17 | 18 | ## Description 19 | 20 | Please describe your changes here. If this fixes a bug, please link to the issue, if possible. 21 | -------------------------------------------------------------------------------- /.github/workflows/package.yml: -------------------------------------------------------------------------------- 1 | name: Package 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v2.*' 7 | 8 | jobs: 9 | 10 | build-wheel: 11 | runs-on: ubuntu-latest 12 | name: Build wheel distribution 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v2 16 | with: 17 | submodules: true 18 | - name: Setup Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | - name: Update build dependencies 23 | run: python -m pip install -U pip wheel setuptools 24 | - name: Build wheel distribution 25 | run: python setup.py bdist_wheel 26 | - name: Store built wheel 27 | uses: actions/upload-artifact@v2 28 | with: 29 | name: dist 30 | path: dist/* 31 | 32 | build-sdist: 33 | runs-on: ubuntu-latest 34 | name: Build source distribution 35 | steps: 36 | - name: Checkout code 37 | uses: actions/checkout@v2 38 | with: 39 | submodules: true 40 | - name: Set up Python 3.9 41 | uses: actions/setup-python@v2 42 | with: 43 | python-version: 3.9 44 | - name: Update build dependencies 45 | run: python -m pip install -U pip wheel setuptools 46 | - name: Build source distribution 47 | run: python setup.py sdist 48 | - name: Store source distribution 49 | uses: actions/upload-artifact@v2 50 | with: 51 | name: dist 52 | path: dist/* 53 | 54 | test-sdist: 55 | runs-on: ubuntu-latest 56 | name: Test source distribution 57 | needs: 58 | - build-sdist 59 | steps: 60 | - name: Checkout code 61 | uses: actions/checkout@v2 62 | with: 63 | submodules: true 64 | - name: Setup Python 3.9 65 | uses: actions/setup-python@v2 66 | with: 67 | python-version: 3.9 68 | - name: Download source distribution 69 | uses: actions/download-artifact@v2 70 | with: 71 | name: dist 72 | path: dist 73 | - name: Install source distribution 74 | run: python -m pip install dist/fs-*.tar.gz 75 | - name: Remove source code 76 | run: rm -rvd fs 77 | - name: Install test requirements 78 | run: python -m pip install -r tests/requirements.txt 79 | - name: Test installed package 80 | run: python -m unittest discover -vv 81 | 82 | test-wheel: 83 | runs-on: ubuntu-latest 84 | name: Test wheel distribution 85 | needs: 86 | - build-wheel 87 | steps: 88 | - name: Checkout code 89 | uses: actions/checkout@v2 90 | with: 91 | submodules: true 92 | - name: Setup Python 3.9 93 | uses: actions/setup-python@v2 94 | with: 95 | python-version: 3.9 96 | - name: Download wheel distribution 97 | uses: actions/download-artifact@v2 98 | with: 99 | name: dist 100 | path: dist 101 | - name: Install wheel distribution 102 | run: python -m pip install dist/fs-*.whl 103 | - name: Remove source code 104 | run: rm -rvd fs 105 | - name: Install test requirements 106 | run: python -m pip install -r tests/requirements.txt 107 | - name: Test installed package 108 | run: python -m unittest discover -vv 109 | 110 | upload: 111 | environment: PyPI 112 | runs-on: ubuntu-latest 113 | name: Upload 114 | needs: 115 | - build-sdist 116 | - build-wheel 117 | - test-sdist 118 | - test-wheel 119 | steps: 120 | - name: Download built distributions 121 | uses: actions/download-artifact@v2 122 | with: 123 | name: dist 124 | path: dist 125 | - name: Publish distributions to PyPI 126 | if: startsWith(github.ref, 'refs/tags/v') 127 | uses: pypa/gh-action-pypi-publish@master 128 | with: 129 | user: __token__ 130 | password: ${{ secrets.PYPI_API_TOKEN }} 131 | skip_existing: false 132 | 133 | release: 134 | environment: GitHub Releases 135 | runs-on: ubuntu-latest 136 | if: "startsWith(github.ref, 'refs/tags/v')" 137 | name: Release 138 | needs: upload 139 | steps: 140 | - name: Checkout code 141 | uses: actions/checkout@v1 142 | - name: Release a Changelog 143 | uses: rasmus-saks/release-a-changelog-action@v1.0.1 144 | with: 145 | github-token: '${{ secrets.GITHUB_TOKEN }}' 146 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | - push 5 | - pull_request 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | timeout-minutes: 10 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | python-version: 15 | - 2.7 16 | - 3.5 17 | - 3.6 18 | - 3.7 19 | - 3.8 20 | - 3.9 21 | - '3.10' 22 | - pypy-2.7 23 | - pypy-3.6 24 | - pypy-3.7 25 | steps: 26 | - name: Checkout code 27 | uses: actions/checkout@v1 28 | - name: Setup Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v2 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | - name: Update pip 33 | run: python -m pip install -U pip wheel setuptools 34 | - name: Install tox 35 | run: python -m pip install tox tox-gh-actions 36 | - name: Test with tox 37 | run: python -m tox 38 | - name: Store partial coverage reports 39 | uses: actions/upload-artifact@v2 40 | with: 41 | name: coverage 42 | path: .coverage.* 43 | 44 | coveralls: 45 | needs: test 46 | runs-on: ubuntu-latest 47 | steps: 48 | - name: Checkout code 49 | uses: actions/checkout@v1 50 | - name: Setup Python 3.10 51 | uses: actions/setup-python@v2 52 | with: 53 | python-version: '3.10' 54 | - name: Install coverage package 55 | run: python -m pip install -U coverage 56 | - name: Download partial coverage reports 57 | uses: actions/download-artifact@v2 58 | with: 59 | name: coverage 60 | - name: Combine coverage 61 | run: python -m coverage combine 62 | - name: Report coverage 63 | run: python -m coverage report 64 | - name: Export coverage to XML 65 | run: python -m coverage xml 66 | - name: Upload coverage statistics to Coveralls 67 | uses: AndreMiras/coveralls-python-action@develop 68 | 69 | lint: 70 | runs-on: ubuntu-latest 71 | strategy: 72 | fail-fast: false 73 | matrix: 74 | linter: 75 | - typecheck 76 | - codestyle 77 | - docstyle 78 | - codeformat 79 | steps: 80 | - name: Checkout code 81 | uses: actions/checkout@v1 82 | - name: Setup Python '3.10' 83 | uses: actions/setup-python@v2 84 | with: 85 | python-version: '3.10' 86 | - name: Update pip 87 | run: python -m pip install -U pip wheel setuptools 88 | - name: Install tox 89 | run: python -m pip install tox tox-gh-actions 90 | - name: Run ${{ matrix.linter }} linter 91 | run: python -m tox -e ${{ matrix.linter }} 92 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | 93 | # PyCharm 94 | .idea/ 95 | 96 | # MyPy cache 97 | .mypy_cache 98 | 99 | .vscode 100 | 101 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to PyFilesystem 2 | 3 | Pull Requests are very welcome for this project! 4 | 5 | For bug fixes or new features, please file an issue before submitting a pull 6 | request. If the change isn't trivial, it may be best to wait for feedback. 7 | For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com) 8 | directly. 9 | 10 | 11 | ## `tox` 12 | 13 | Most of the guidelines that follow can be checked with a particular 14 | [`tox`](https://pypi.org/project/tox/) environment. Having it installed will 15 | help you develop and verify your code locally without having to wait for 16 | our Continuous Integration pipeline to finish. 17 | 18 | 19 | ## Tests 20 | 21 | New code should have unit tests. We strive to have near 100% coverage. 22 | Get in touch, if you need assistance with the tests. You shouldn't refrain 23 | from opening a Pull Request even if all the tests were not added yet, or if 24 | not all of them are passing yet. 25 | 26 | ### Dependencies 27 | 28 | The dependency for running the tests can be found in the `tests/requirements.txt` file. 29 | If you're using `tox`, you won't have to install them manually. Otherwise, 30 | they can be installed with `pip`: 31 | ```console 32 | $ pip install -r tests/requirements.txt 33 | ``` 34 | 35 | ### Running (with `tox`) 36 | 37 | Simply run in the repository folder to execute the tests for all available 38 | environments: 39 | ```console 40 | $ tox 41 | ``` 42 | 43 | Since this can take some time, you can use a single environment to run 44 | tests only once, for instance to run tests only with Python 3.9: 45 | ```console 46 | $ tox -e py39 47 | ``` 48 | 49 | ### Running (without `tox`) 50 | 51 | Tests are written using the standard [`unittest`](https://docs.python.org/3/library/unittest.html) 52 | framework. You should be able to run them using the standard library runner: 53 | ```console 54 | $ python -m unittest discover -vv 55 | ``` 56 | 57 | 58 | ## Coding Guidelines 59 | 60 | This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at 61 | some point, but for now, please maintain compatibility. PyFilesystem2 uses 62 | the [`six`](https://pypi.org/project/six/) library to write version-agnostic 63 | Python code. 64 | 65 | ### Style 66 | 67 | The code (including the tests) should follow PEP8. You can check for the 68 | code style with: 69 | ```console 70 | $ tox -e codestyle 71 | ``` 72 | 73 | This will invoke [`flake8`](https://pypi.org/project/flake8/) with some common 74 | plugins such as [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/). 75 | 76 | ### Format 77 | 78 | Please format new code with [black](https://github.com/ambv/black), using the 79 | default settings. You can check whether the code is well-formatted with: 80 | ```console 81 | $ tox -e codeformat 82 | ``` 83 | 84 | ### Type annotations 85 | 86 | The code is typechecked with [`mypy`](https://pypi.org/project/mypy/), and 87 | type annotations written as comments, to stay compatible with Python2. Run 88 | the typechecking with: 89 | ```console 90 | $ tox -e typecheck 91 | ``` 92 | 93 | 94 | ## Documentation 95 | 96 | ### Dependencies 97 | 98 | The documentation is built with [Sphinx](https://pypi.org/project/Sphinx/), 99 | using the [ReadTheDocs](https://pypi.org/project/sphinx-rtd-theme/) theme. 100 | The dependencies are listed in `docs/requirements.txt` and can be installed with 101 | `pip`: 102 | ```console 103 | $ pip install -r docs/requirements.txt 104 | ``` 105 | 106 | ### Building 107 | 108 | Run the following command to build the HTML documentation: 109 | ```console 110 | $ python setup.py build_sphinx 111 | ``` 112 | 113 | The documentation index will be written to the `build/sphinx/html/` 114 | directory. 115 | 116 | ### Style 117 | 118 | The API reference is written in the Python source, using docstrings in 119 | [Google format](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). 120 | The documentation style can be checked with: 121 | ```console 122 | $ tox -e docstyle 123 | ``` 124 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | # Contributors (sorted alphabetically) 2 | 3 | Many thanks to the following developers for contributing to this project: 4 | 5 | - [Adrian Garcia Badaracco](https://github.com/adriangb) 6 | - [Alex Povel](https://github.com/alexpovel) 7 | - [Andreas Tollkötter](https://github.com/atollk) 8 | - [Andrew Scheller](https://github.com/lurch) 9 | - [Andrey Serov](https://github.com/zmej-serow) 10 | - [Ben Lindsay](https://github.com/benlindsay) 11 | - [Bernhard M. Wiedemann](https://github.com/bmwiedemann) 12 | - [@chfw](https://github.com/chfw) 13 | - [Dafna Hirschfeld](https://github.com/kamomil) 14 | - [Diego Argueta](https://github.com/dargueta) 15 | - [Eelke van den Bos](https://github.com/eelkevdbos) 16 | - [Egor Namakonov](https://github.com/fresheed) 17 | - [Felix Yan](https://github.com/felixonmars) 18 | - [@FooBarQuaxx](https://github.com/FooBarQuaxx) 19 | - [Geoff Jukes](https://github.com/geoffjukes) 20 | - [George Macon](https://github.com/gmacon) 21 | - [Giampaolo Cimino](https://github.com/gpcimino) 22 | - [@Hoboneer](https://github.com/Hoboneer) 23 | - [Jen Hagg](https://github.com/jenhagg) 24 | - [Joseph Atkins-Turkish](https://github.com/Spacerat) 25 | - [Joshua Tauberer](https://github.com/JoshData) 26 | - [Justin Charlong](https://github.com/jcharlong) 27 | - [Louis Sautier](https://github.com/sbraz) 28 | - [Martin Durant](https://github.com/martindurant) 29 | - [Martin Larralde](https://github.com/althonos) 30 | - [Masaya Nakamura](https://github.com/mashabow) 31 | - [Matthew Gamble](https://github.com/djmattyg007) 32 | - [Morten Engelhardt Olsen](https://github.com/xoriath) 33 | - [@mrg0029](https://github.com/mrg0029) 34 | - [Nathan Goldbaum](https://github.com/ngoldbaum) 35 | - [Nick Henderson](https://github.com/nwh) 36 | - [Oliver Galvin](https://github.com/odgalvin) 37 | - [Philipp Wiesner](https://github.com/birnbaum) 38 | - [Philippe Ombredanne](https://github.com/pombredanne) 39 | - [Rehan Khwaja](https://github.com/rkhwaja) 40 | - [Silvan Spross](https://github.com/sspross) 41 | - [@sqwishy](https://github.com/sqwishy) 42 | - [Sven Schliesing](https://github.com/muffl0n) 43 | - [Thomas Feldmann](https://github.com/tfeldmann) 44 | - [Tim Gates](https://github.com/timgates42/) 45 | - [@tkossak](https://github.com/tkossak) 46 | - [Todd Levi](https://github.com/televi) 47 | - [Vilius Grigaliūnas](https://github.com/vilius-g) 48 | - [Will McGugan](https://github.com/willmcgugan) 49 | - [Zmej Serow](https://github.com/zmej-serow) 50 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017-2021 The PyFilesystem2 contributors 4 | Copyright (c) 2016-2019 Will McGugan 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGELOG.md 2 | include CONTRIBUTING.md 3 | include CONTRIBUTORS.md 4 | include LICENSE 5 | graft tests 6 | graft docs 7 | global-exclude __pycache__ 8 | global-exclude *.py[co] 9 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | .PHONY: release 3 | release: cleandist 4 | python3 setup.py sdist bdist_wheel 5 | twine upload dist/*.whl dist/*.tar.gz 6 | 7 | .PHONY: cleandist 8 | cleandist: 9 | rm -f dist/*.whl dist/*.tar.gz 10 | 11 | .PHONY: cleandocs 12 | cleandocs: 13 | $(MAKE) -C docs clean 14 | 15 | .PHONY: clean 16 | clean: cleandist cleandocs 17 | 18 | .PHONY: test 19 | test: 20 | nosetests --with-coverage --cover-package=fs -a "!slow" tests 21 | 22 | .PHONY: slowtest 23 | slowtest: 24 | nosetests --with-coverage --cover-erase --cover-package=fs tests 25 | 26 | .PHONY: testall 27 | testall: 28 | tox 29 | 30 | .PHONY: docs 31 | docs: 32 | $(MAKE) -C docs html 33 | python -c "import os, webbrowser; webbrowser.open('file://' + os.path.abspath('./docs/build/html/index.html'))" 34 | 35 | .PHONY: typecheck 36 | typecheck: 37 | mypy -p fs --config setup.cfg 38 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyFilesystem2 2 | 3 | Python's Filesystem abstraction layer. 4 | 5 | [![PyPI version](https://img.shields.io/pypi/v/fs)](https://pypi.org/project/fs/) 6 | [![PyPI](https://img.shields.io/pypi/pyversions/fs.svg)](https://pypi.org/project/fs/) 7 | [![Downloads](https://pepy.tech/badge/fs/month)](https://pepy.tech/project/fs/) 8 | [![Build Status](https://img.shields.io/github/workflow/status/PyFilesystem/pyfilesystem2/Test/master?logo=github&cacheSeconds=600)](https://github.com/PyFilesystem/pyfilesystem2/actions?query=branch%3Amaster) 9 | [![Windows Build Status](https://img.shields.io/appveyor/build/willmcgugan/pyfilesystem2/master?logo=appveyor&cacheSeconds=600)](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2) 10 | [![Coverage Status](https://img.shields.io/coveralls/github/PyFilesystem/pyfilesystem2/master?cacheSeconds=600)](https://coveralls.io/github/PyFilesystem/pyfilesystem2) 11 | [![Codacy Badge](https://img.shields.io/codacy/grade/30ad6445427349218425d93886ade9ee/master?logo=codacy)](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade) 12 | [![Docs](https://img.shields.io/readthedocs/pyfilesystem2?maxAge=3600)](http://pyfilesystem2.readthedocs.io/en/stable/?badge=stable) 13 | 14 | ## Documentation 15 | 16 | - ~~[Wiki](https://www.pyfilesystem.org)~~ (currently offline) 17 | - [API Documentation](https://pyfilesystem2.readthedocs.io/en/latest/) 18 | - [GitHub Repository](https://github.com/PyFilesystem/pyfilesystem2) 19 | - [Blog](https://www.willmcgugan.com/tag/fs/) 20 | 21 | ## Introduction 22 | 23 | Think of PyFilesystem's `FS` objects as the next logical step to 24 | Python's `file` objects. In the same way that file objects abstract a 25 | single file, FS objects abstract an entire filesystem. 26 | 27 | Let's look at a simple piece of code as an example. The following 28 | function uses the PyFilesystem API to count the number of non-blank 29 | lines of Python code in a directory. It works _recursively_, so it will 30 | find `.py` files in all sub-directories. 31 | 32 | ```python 33 | def count_python_loc(fs): 34 | """Count non-blank lines of Python code.""" 35 | count = 0 36 | for path in fs.walk.files(filter=['*.py']): 37 | with fs.open(path) as python_file: 38 | count += sum(1 for line in python_file if line.strip()) 39 | return count 40 | ``` 41 | 42 | We can call `count_python_loc` as follows: 43 | 44 | ```python 45 | from fs import open_fs 46 | projects_fs = open_fs('~/projects') 47 | print(count_python_loc(projects_fs)) 48 | ``` 49 | 50 | The line `project_fs = open_fs('~/projects')` opens an FS object that 51 | maps to the `projects` directory in your home folder. That object is 52 | used by `count_python_loc` when counting lines of code. 53 | 54 | To count the lines of Python code in a _zip file_, we can make the 55 | following change: 56 | 57 | ```python 58 | projects_fs = open_fs('zip://projects.zip') 59 | ``` 60 | 61 | Or to count the Python lines on an FTP server: 62 | 63 | ```python 64 | projects_fs = open_fs('ftp://ftp.example.org/projects') 65 | ``` 66 | 67 | No changes to `count_python_loc` are necessary, because PyFileystem 68 | provides a simple consistent interface to anything that resembles a 69 | collection of files and directories. Essentially, it allows you to write 70 | code that is independent of where and how the files are physically 71 | stored. 72 | 73 | Contrast that with a version that purely uses the standard library: 74 | 75 | ```python 76 | def count_py_loc(path): 77 | count = 0 78 | for root, dirs, files in os.walk(path): 79 | for name in files: 80 | if name.endswith('.py'): 81 | with open(os.path.join(root, name), 'rt') as python_file: 82 | count += sum(1 for line in python_file if line.strip()) 83 | return count 84 | ``` 85 | 86 | This version is similar to the PyFilesystem code above, but would only 87 | work with the OS filesystem. Any other filesystem would require an 88 | entirely different API, and you would likely have to re-implement the 89 | directory walking functionality of `os.walk`. 90 | 91 | ## Credits 92 | 93 | The following developers have contributed code and their time to this projects: 94 | 95 | - [Will McGugan](https://github.com/willmcgugan) 96 | - [Martin Larralde](https://github.com/althonos) 97 | - [Giampaolo Cimino](https://github.com/gpcimino) 98 | - [Geoff Jukes](https://github.com/geoffjukes) 99 | 100 | See [CONTRIBUTORS.md](https://github.com/PyFilesystem/pyfilesystem2/blob/master/CONTRIBUTORS.md) 101 | for a full list of contributors. 102 | 103 | PyFilesystem2 owes a massive debt of gratitude to the following 104 | developers who contributed code and ideas to the original version. 105 | 106 | - Ryan Kelly 107 | - Andrew Scheller 108 | - Ben Timby 109 | 110 | Apologies if I missed anyone, feel free to prompt me if your name is 111 | missing here. 112 | 113 | ## Support 114 | 115 | If commercial support is required, please contact [Will McGugan](mailto:willmcgugan@gmail.com). 116 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | # For Python versions available on Appveyor, see 4 | # https://www.appveyor.com/docs/windows-images-software/#python 5 | # The list here is complete (excluding Python 2.6, which 6 | # isn't covered by this document) at the time of writing. 7 | 8 | # - PYTHON: "C:\\Python27" 9 | # - PYTHON: "C:\\Python33" 10 | # - PYTHON: "C:\\Python34" 11 | # - PYTHON: "C:\\Python35" 12 | # - PYTHON: "C:\\Python27-x64" 13 | # - PYTHON: "C:\\Python33-x64" 14 | # DISTUTILS_USE_SDK: "1" 15 | # - PYTHON: "C:\\Python34-x64" 16 | # DISTUTILS_USE_SDK: "1" 17 | # - PYTHON: "C:\\Python35-x64" 18 | - PYTHON: "C:\\Python36-x64" 19 | - PYTHON: "C:\\Python37-x64" 20 | 21 | install: 22 | # We need wheel installed to build wheels 23 | - "%PYTHON%\\python.exe -m pip install -U pip wheel setuptools" 24 | - "%PYTHON%\\python.exe -m pip install pytest" 25 | - "%PYTHON%\\python.exe -m pip install -r tests/requirements.txt" 26 | - "%PYTHON%\\python.exe setup.py install" 27 | 28 | build: off 29 | 30 | test_script: 31 | - "%PYTHON%\\python.exe -m pytest" 32 | -------------------------------------------------------------------------------- /docs/editdocs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | make html 3 | python -c "import os, webbrowser; webbrowser.open('file://' + os.path.abspath('./build/html/index.html'))" 4 | watchmedo shell-command ../ --patterns "*.rst;*.py" --recursive --command="rm -rf build;make html;" 5 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # the bare requirements for building docs 2 | Sphinx ~=3.0 3 | sphinx-rtd-theme ~=0.5.1 4 | recommonmark ~=0.6 5 | -------------------------------------------------------------------------------- /docs/source/builtin.rst: -------------------------------------------------------------------------------- 1 | Builtin Filesystems 2 | =================== 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | 7 | reference/appfs.rst 8 | reference/ftpfs.rst 9 | reference/memoryfs.rst 10 | reference/mountfs.rst 11 | reference/multifs.rst 12 | reference/osfs.rst 13 | reference/subfs.rst 14 | reference/tarfs.rst 15 | reference/tempfs.rst 16 | reference/zipfs.rst 17 | -------------------------------------------------------------------------------- /docs/source/concepts.rst: -------------------------------------------------------------------------------- 1 | .. _concepts: 2 | 3 | Concepts 4 | ======== 5 | 6 | The following describes some core concepts when working with 7 | PyFilesystem. If you are skimming this documentation, pay particular 8 | attention to the first section on paths. 9 | 10 | .. _paths: 11 | 12 | Paths 13 | ----- 14 | 15 | With the possible exception of the constructor, all paths in a 16 | filesystem are *PyFilesystem paths*, which have the following 17 | properties: 18 | 19 | * Paths are ``str`` type in Python3, and ``unicode`` in Python2 20 | * Path components are separated by a forward slash (``/``) 21 | * Paths beginning with a ``/`` are *absolute* 22 | * Paths not beginning with a forward slash are *relative* 23 | * A single dot (``.``) means 'current directory' 24 | * A double dot (``..``) means 'previous directory' 25 | 26 | Note that paths used by the FS interface will use this format, but the 27 | constructor may not. Notably the :class:`~fs.osfs.OSFS` constructor which 28 | requires an OS path -- the format of which is platform-dependent. 29 | 30 | .. note:: 31 | There are many helpful functions for working with paths in the 32 | :mod:`~fs.path` module. 33 | 34 | PyFilesystem paths are platform-independent, and will be automatically 35 | converted to the format expected by your operating system -- so you 36 | won't need to make any modifications to your filesystem code to make it 37 | run on other platforms. 38 | 39 | System Paths 40 | ------------ 41 | 42 | Not all Python modules can use file-like objects, especially those which 43 | interface with C libraries. For these situations you will need to 44 | retrieve the *system path*. You can do this with the 45 | :meth:`~fs.base.FS.getsyspath` method which converts a valid path in the 46 | context of the FS object to an absolute path that would be understood by 47 | your OS. 48 | 49 | For example:: 50 | 51 | >>> from fs.osfs import OSFS 52 | >>> home_fs = OSFS('~/') 53 | >>> home_fs.getsyspath('test.txt') 54 | '/home/will/test.txt' 55 | 56 | Not all filesystems map to a system path (for example, files in a 57 | :class:`~fs.memoryfs.MemoryFS` will only ever exists in memory). 58 | 59 | If you call ``getsyspath`` on a filesystem which doesn't map to a system 60 | path, it will raise a :class:`~fs.errors.NoSysPath` exception. If you 61 | prefer a *look before you leap* approach, you can check if a resource 62 | has a system path by calling :meth:`~fs.base.FS.hassyspath` 63 | 64 | 65 | Sandboxing 66 | ---------- 67 | 68 | FS objects are not permitted to work with any files outside of their 69 | *root*. If you attempt to open a file or directory outside the 70 | filesystem instance (with a backref such as ``"../foo.txt"``), a 71 | :class:`~fs.errors.IllegalBackReference` exception will be thrown. This 72 | ensures that any code using a FS object won't be able to read or modify 73 | anything you didn't intend it to, thus limiting the scope of any bugs. 74 | 75 | Unlike your OS, there is no concept of a current working directory in 76 | PyFilesystem. If you want to work with a sub-directory of an FS object, 77 | you can use the :meth:`~fs.base.FS.opendir` method which returns another 78 | FS object representing the contents of that sub-directory. 79 | 80 | For example, consider the following directory structure. The directory 81 | ``foo`` contains two sub-directories; ``bar`` and ``baz``:: 82 | 83 | --foo 84 | |--bar 85 | | |--readme.txt 86 | | `--photo.jpg 87 | `--baz 88 | |--private.txt 89 | `--dontopen.jpg 90 | 91 | We can open the ``foo`` directory with the following code:: 92 | 93 | from fs.osfs import OSFS 94 | foo_fs = OSFS('foo') 95 | 96 | The ``foo_fs`` object can work with any of the contents of ``bar`` and 97 | ``baz``, which may not be desirable if we are passing ``foo_fs`` to a 98 | function that has the potential to delete files. Fortunately we can 99 | isolate a single sub-directory with the :meth:`~fs.base.FS.opendir` 100 | method:: 101 | 102 | bar_fs = foo_fs.opendir('bar') 103 | 104 | This creates a completely new FS object that represents everything in 105 | the ``foo/bar`` directory. The root directory of ``bar_fs`` has been re- 106 | position, so that from ``bar_fs``'s point of view, the readme.txt and 107 | photo.jpg files are in the root:: 108 | 109 | --bar 110 | |--readme.txt 111 | `--photo.jpg 112 | 113 | .. note:: 114 | This *sandboxing* only works if your code uses the filesystem 115 | interface exclusively. It won't prevent code using standard OS level 116 | file manipulation. 117 | 118 | 119 | Errors 120 | ------ 121 | 122 | PyFilesystem converts errors in to a common exception hierarchy. This 123 | ensures that error handling code can be written once, regardless of the 124 | filesystem being used. See :mod:`~fs.errors` for details. 125 | -------------------------------------------------------------------------------- /docs/source/contributing.md: -------------------------------------------------------------------------------- 1 | ../../CONTRIBUTING.md -------------------------------------------------------------------------------- /docs/source/extension.rst: -------------------------------------------------------------------------------- 1 | .. _extension: 2 | 3 | Creating an extension 4 | ===================== 5 | 6 | Once a filesystem has been implemented, it can be integrated with other 7 | applications and projects using PyFilesystem. 8 | 9 | 10 | Naming Convention 11 | ----------------- 12 | 13 | For visibility in PyPi, we recommend that your package be prefixed with 14 | ``fs-``. For instance if you have implemented an ``AwesomeFS`` 15 | PyFilesystem class, your packaged could be be named ``fs-awesome`` or 16 | ``fs-awesomefs``. 17 | 18 | 19 | Opener 20 | ------ 21 | 22 | In order for your filesystem to be opened with an :ref:`FS URL ` 23 | you should define an :class:`~fs.opener.base.Opener` class. 24 | 25 | Here's an example taken from an Amazon S3 Filesystem:: 26 | 27 | 28 | """Defines the S3FSOpener.""" 29 | 30 | __all__ = ['S3FSOpener'] 31 | 32 | from fs.opener import Opener 33 | from fs.opener.errors import OpenerError 34 | 35 | from ._s3fs import S3FS 36 | 37 | 38 | class S3FSOpener(Opener): 39 | protocols = ['s3'] 40 | 41 | def open_fs(self, fs_url, parse_result, writeable, create, cwd): 42 | bucket_name, _, dir_path = parse_result.resource.partition('/') 43 | if not bucket_name: 44 | raise OpenerError( 45 | "invalid bucket name in '{}'".format(fs_url) 46 | ) 47 | s3fs = S3FS( 48 | bucket_name, 49 | dir_path=dir_path or '/', 50 | aws_access_key_id=parse_result.username or None, 51 | aws_secret_access_key=parse_result.password or None, 52 | ) 53 | return s3fs 54 | 55 | By convention this would be defined in ``opener.py``. 56 | 57 | 58 | To register the opener you will need to define an `entry point 59 | `_ 60 | in your setup.py. See below for an example. 61 | 62 | 63 | The setup.py file 64 | ----------------- 65 | 66 | Refer to the `setuptools documentation `_ 67 | to see how to write a ``setup.py`` file. There are only a few things that 68 | should be kept in mind when creating a Pyfilesystem2 extension. Make sure that: 69 | 70 | * ``fs`` is in the ``install_requires`` list. You should reference the 71 | version number with the ``~=`` operator which ensures that the install 72 | will get any bugfix releases of PyFilesystem but not any potentially 73 | breaking changes. 74 | * Ìf you created an opener, include it as an ``fs.opener`` entry point, 75 | using the name of the entry point as the protocol to be used. 76 | 77 | Here is an minimal ``setup.py`` for our project: 78 | 79 | .. code:: python 80 | 81 | from setuptools import setup 82 | setup( 83 | name='fs-awesomefs', # Name in PyPi 84 | author="You !", 85 | author_email="your.email@domain.ext", 86 | description="An awesome filesystem for pyfilesystem2 !", 87 | install_requires=[ 88 | "fs~=2.0.5" 89 | ], 90 | entry_points = { 91 | 'fs.opener': [ 92 | 'awe = awesomefs.opener:AwesomeFSOpener', 93 | ] 94 | }, 95 | license="MY LICENSE", 96 | packages=['awesomefs'], 97 | version="X.Y.Z", 98 | ) 99 | 100 | Good Practices 101 | -------------- 102 | 103 | Keep track of your achievements! Add the following values to your ``__init__.py``: 104 | 105 | * ``__version__`` The version of the extension (we recommend following 106 | `Semantic Versioning `_), 107 | * ``__author__`` Your name(s). 108 | * ``__author_email__`` Your email(s). 109 | * ``__license__`` The module's license. 110 | 111 | Let us Know 112 | ----------- 113 | 114 | Contact us to add your filesystem to the `PyFilesystem Wiki `_. 115 | 116 | 117 | Live Example 118 | ------------ 119 | 120 | See `fs.sshfs `_ for a functioning 121 | PyFilesystem2 extension implementing a Pyfilesystem2 filesystem over 122 | SSH. 123 | -------------------------------------------------------------------------------- /docs/source/external.rst: -------------------------------------------------------------------------------- 1 | External Filesystems 2 | ==================== 3 | 4 | See the following wiki page for a list of filesystems not in the core library, and community contributed filesystems. 5 | 6 | https://www.pyfilesystem.org/page/index-of-filesystems/ 7 | 8 | If you have developed a filesystem that you would like added to the above page, please let us know by opening a `Github issue `_. -------------------------------------------------------------------------------- /docs/source/globbing.rst: -------------------------------------------------------------------------------- 1 | .. _globbing: 2 | 3 | Globbing 4 | ======== 5 | 6 | Globbing is the process of matching paths according to the rules used 7 | by the Unix shell. 8 | 9 | Generally speaking, you can think of a glob pattern as a path containing 10 | one or more wildcard patterns, separated by forward slashes. 11 | 12 | 13 | Matching Files and Directories 14 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 15 | 16 | In a glob pattern, A ``*`` means match anything text in a filename. A ``?`` 17 | matches any single character. A ``**`` matches any number of subdirectories, 18 | making the glob *recursive*. If the glob pattern ends in a ``/``, it will 19 | only match directory paths, otherwise it will match files and directories. 20 | 21 | .. note:: 22 | A recursive glob requires that PyFilesystem scan a lot of files, 23 | and can potentially be slow for large (or network based) filesystems. 24 | 25 | Here's a summary of glob patterns: 26 | 27 | ``*`` 28 | Matches all files in the current directory. 29 | ``*.py`` 30 | Matches all .py file in the current directory. 31 | ``*.py?`` 32 | Matches all .py files and .pyi, .pyc etc in the currenct directory. 33 | ``project/*.py`` 34 | Matches all .py files in a directory called ``project``. 35 | ``*/*.py`` 36 | Matches all .py files in any sub directory. 37 | ``**/*.py`` 38 | Recursively matches all .py files. 39 | ``**/.git/`` 40 | Recursively matches all the git directories. 41 | 42 | 43 | Interface 44 | ~~~~~~~~~ 45 | 46 | PyFilesystem supports globbing via the ``glob`` attribute on every FS 47 | instance, which is an instance of :class:`~fs.glob.BoundGlobber`. Here's 48 | how you might use it to find all the Python files in your filesystem:: 49 | 50 | for match in my_fs.glob("**/*.py"): 51 | print(f"{match.path} is {match.info.size} bytes long") 52 | 53 | Calling ``.glob`` with a pattern will return an iterator of 54 | :class:`~fs.glob.GlobMatch` named tuples for each matching file or 55 | directory. A glob match contains two attributes; ``path`` which is the 56 | full path in the filesystem, and ``info`` which is an 57 | :class:`fs.info.Info` info object for the matched resource. 58 | 59 | 60 | Batch Methods 61 | ~~~~~~~~~~~~~ 62 | 63 | In addition to iterating over the results, you can also call methods on 64 | the :class:`~fs.glob.Globber` which apply to every matched path. 65 | 66 | For instance, here is how you can use glob to remove all ``.pyc`` files 67 | from a project directory:: 68 | 69 | >>> import fs 70 | >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove() 71 | 29 72 | 73 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. PyFilesystem2 documentation master file, created by 2 | sphinx-quickstart on Tue May 10 16:45:12 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to PyFilesystem2's documentation! 7 | ========================================= 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | introduction.rst 15 | guide.rst 16 | 17 | concepts.rst 18 | info.rst 19 | openers.rst 20 | walking.rst 21 | globbing.rst 22 | builtin.rst 23 | implementers.rst 24 | extension.rst 25 | external.rst 26 | interface.rst 27 | reference.rst 28 | contributing.md 29 | 30 | 31 | Indices and tables 32 | ================== 33 | 34 | * :ref:`genindex` 35 | * :ref:`modindex` 36 | * :ref:`search` 37 | -------------------------------------------------------------------------------- /docs/source/interface.rst: -------------------------------------------------------------------------------- 1 | .. _interface: 2 | 3 | PyFilesystem API 4 | ---------------- 5 | 6 | The following is a complete list of methods on PyFilesystem objects. 7 | 8 | * :meth:`~fs.base.FS.appendbytes` Append bytes to a file. 9 | * :meth:`~fs.base.FS.appendtext` Append text to a file. 10 | * :meth:`~fs.base.FS.check` Check if a filesystem is open or raise error. 11 | * :meth:`~fs.base.FS.close` Close the filesystem. 12 | * :meth:`~fs.base.FS.copy` Copy a file to another location. 13 | * :meth:`~fs.base.FS.copydir` Copy a directory to another location. 14 | * :meth:`~fs.base.FS.create` Create or truncate a file. 15 | * :meth:`~fs.base.FS.desc` Get a description of a resource. 16 | * :meth:`~fs.base.FS.download` Copy a file on the filesystem to a file-like object. 17 | * :meth:`~fs.base.FS.exists` Check if a path exists. 18 | * :meth:`~fs.base.FS.filterdir` Iterate resources, filtering by wildcard(s). 19 | * :meth:`~fs.base.FS.getbasic` Get basic info namespace for a resource. 20 | * :meth:`~fs.base.FS.getdetails` Get details info namespace for a resource. 21 | * :meth:`~fs.base.FS.getinfo` Get info regarding a file or directory. 22 | * :meth:`~fs.base.FS.getmeta` Get meta information for a resource. 23 | * :meth:`~fs.base.FS.getmodified` Get the last modified time of a resource. 24 | * :meth:`~fs.base.FS.getospath` Get path with encoding expected by the OS. 25 | * :meth:`~fs.base.FS.getsize` Get the size of a file. 26 | * :meth:`~fs.base.FS.getsyspath` Get the system path of a resource, if one exists. 27 | * :meth:`~fs.base.FS.gettype` Get the type of a resource. 28 | * :meth:`~fs.base.FS.geturl` Get a URL to a resource, if one exists. 29 | * :meth:`~fs.base.FS.hassyspath` Check if a resource maps to the OS filesystem. 30 | * :meth:`~fs.base.FS.hash` Get the hash of a file's contents. 31 | * :meth:`~fs.base.FS.hasurl` Check if a resource has a URL. 32 | * :meth:`~fs.base.FS.isclosed` Check if the filesystem is closed. 33 | * :meth:`~fs.base.FS.isempty` Check if a directory is empty. 34 | * :meth:`~fs.base.FS.isdir` Check if path maps to a directory. 35 | * :meth:`~fs.base.FS.isfile` Check if path maps to a file. 36 | * :meth:`~fs.base.FS.islink` Check if path is a link. 37 | * :meth:`~fs.base.FS.listdir` Get a list of resources in a directory. 38 | * :meth:`~fs.base.FS.lock` Get a thread lock context manager. 39 | * :meth:`~fs.base.FS.makedir` Make a directory. 40 | * :meth:`~fs.base.FS.makedirs` Make a directory and intermediate directories. 41 | * :meth:`~fs.base.FS.match` Match one or more wildcard patterns against a path. 42 | * :meth:`~fs.base.FS.move` Move a file to another location. 43 | * :meth:`~fs.base.FS.movedir` Move a directory to another location. 44 | * :meth:`~fs.base.FS.open` Open a file on the filesystem. 45 | * :meth:`~fs.base.FS.openbin` Open a binary file. 46 | * :meth:`~fs.base.FS.opendir` Get a filesystem object for a directory. 47 | * :meth:`~fs.base.FS.readbytes` Read file as bytes. 48 | * :meth:`~fs.base.FS.readtext` Read file as text. 49 | * :meth:`~fs.base.FS.remove` Remove a file. 50 | * :meth:`~fs.base.FS.removedir` Remove a directory. 51 | * :meth:`~fs.base.FS.removetree` Recursively remove file and directories. 52 | * :meth:`~fs.base.FS.scandir` Scan files and directories. 53 | * :meth:`~fs.base.FS.setinfo` Set resource information. 54 | * :meth:`~fs.base.FS.settimes` Set modified times for a resource. 55 | * :meth:`~fs.base.FS.touch` Create a file or update times. 56 | * :meth:`~fs.base.FS.tree` Render a tree view of the filesystem. 57 | * :meth:`~fs.base.FS.upload` Copy a binary file to the filesystem. 58 | * :meth:`~fs.base.FS.validatepath` Check a path is valid and return normalized path. 59 | * :meth:`~fs.base.FS.writebytes` Write a file as bytes. 60 | * :meth:`~fs.base.FS.writefile` Write a file-like object to the filesystem. 61 | * :meth:`~fs.base.FS.writetext` Write a file as text. 62 | -------------------------------------------------------------------------------- /docs/source/introduction.rst: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | PyFilesystem is a Python module that provides a common interface to any 5 | filesystem. 6 | 7 | Think of PyFilesystem ``FS`` objects as the next logical step to 8 | Python's ``file`` objects. In the same way that file objects abstract a 9 | single file, FS objects abstract an entire filesystem. 10 | 11 | 12 | Installing 13 | ---------- 14 | 15 | You can install PyFilesystem with ``pip`` as follows:: 16 | 17 | pip install fs 18 | 19 | Or to upgrade to the most recent version:: 20 | 21 | pip install fs --upgrade 22 | 23 | PyFilesystem is also available on conda_:: 24 | 25 | conda install fs -c conda-forge 26 | 27 | Alternatively, if you would like to install from source, you can check 28 | out `the code from Github `_. 29 | 30 | .. _conda: https://conda.io/docs/ -------------------------------------------------------------------------------- /docs/source/openers.rst: -------------------------------------------------------------------------------- 1 | .. _fs-urls: 2 | 3 | FS URLs 4 | ======= 5 | 6 | PyFilesystem can open a filesystem via an *FS URL*, which is similar to a URL you might enter in to a browser. FS URLs are useful if you want to specify a filesystem dynamically, such as in a conf file or from the command line. 7 | 8 | Format 9 | ------ 10 | 11 | FS URLs are formatted in the following way:: 12 | 13 | ://:@ 14 | 15 | The components are as follows: 16 | 17 | * ```` Identifies the type of filesystem to create. e.g. ``osfs``, ``ftp``. 18 | * ```` Optional username. 19 | * ```` Optional password. 20 | * ```` A *resource*, which may be a domain, path, or both. 21 | 22 | Here are a few examples:: 23 | 24 | osfs://~/projects 25 | osfs://c://system32 26 | ftp://ftp.example.org/pub 27 | mem:// 28 | ftp://will:daffodil@ftp.example.org/private 29 | 30 | 31 | If ```` is not specified then it is assumed to be an :class:`~fs.osfs.OSFS`, i.e. the following FS URLs are equivalent:: 32 | 33 | osfs://~/projects 34 | ~/projects 35 | 36 | .. note:: 37 | The `username` and `passwords` fields may not contain a colon (``:``) or an ``@`` symbol. If you need these symbols they may be `percent encoded `_. 38 | 39 | 40 | URL Parameters 41 | -------------- 42 | 43 | FS URLs may also be appended with a ``?`` symbol followed by a url-encoded query string. For example:: 44 | 45 | myprotocol://example.org?key1=value1&key2 46 | 47 | The query string would be decoded as ``{"key1": "value1", "key2": ""}``. 48 | 49 | Query strings are used to provide additional filesystem-specific information used when opening. See the filesystem documentation for information on what query string parameters are supported. 50 | 51 | 52 | Opening FS URLS 53 | --------------- 54 | 55 | To open a filesysem with a FS URL, you can use :meth:`~fs.opener.registry.Registry.open_fs`, which may be imported and used as follows:: 56 | 57 | from fs import open_fs 58 | projects_fs = open_fs('osfs://~/projects') 59 | 60 | 61 | Manually registering Openers 62 | ---------------------------- 63 | 64 | The ``fs.opener`` registry uses an entry point to install external openers 65 | (see :ref:`extension`), and it does so once, when you import `fs` for the first 66 | time. In some rare cases where entry points are not available (for instance, 67 | when running an embedded interpreter) or when extensions are installed *after* 68 | the interpreter has started (for instance in a notebook, see 69 | `PyFilesystem2#485 `_). 70 | 71 | However, a new opener can be installed manually at any time with the 72 | `fs.opener.registry.install` method. For instance, here's how the opener for 73 | the `s3fs `_ extension can be added to 74 | the registry:: 75 | 76 | import fs.opener 77 | from fs_s3fs.opener import S3FSOpener 78 | 79 | fs.opener.registry.install(S3FSOpener) 80 | # fs.open_fs("s3fs://...") should now work 81 | -------------------------------------------------------------------------------- /docs/source/reference.rst: -------------------------------------------------------------------------------- 1 | Reference 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | 7 | reference/base.rst 8 | reference/compress.rst 9 | reference/copy.rst 10 | reference/enums.rst 11 | reference/errors.rst 12 | reference/glob.rst 13 | reference/info_objects.rst 14 | reference/filesize.rst 15 | reference/mirror.rst 16 | reference/move.rst 17 | reference/mode.rst 18 | reference/opener.rst 19 | reference/path.rst 20 | reference/permissions.rst 21 | reference/tools.rst 22 | reference/tree.rst 23 | reference/walk.rst 24 | reference/wildcard.rst 25 | reference/wrap.rst 26 | reference/wrapfs.rst 27 | -------------------------------------------------------------------------------- /docs/source/reference/appfs.rst: -------------------------------------------------------------------------------- 1 | App Filesystems 2 | =============== 3 | 4 | .. automodule:: fs.appfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/base.rst: -------------------------------------------------------------------------------- 1 | fs.base 2 | ======= 3 | 4 | .. automodule:: fs.base 5 | :members: FS 6 | -------------------------------------------------------------------------------- /docs/source/reference/compress.rst: -------------------------------------------------------------------------------- 1 | fs.compress 2 | =========== 3 | 4 | .. automodule:: fs.compress 5 | :members: -------------------------------------------------------------------------------- /docs/source/reference/copy.rst: -------------------------------------------------------------------------------- 1 | fs.copy 2 | ======= 3 | 4 | .. automodule:: fs.copy 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/enums.rst: -------------------------------------------------------------------------------- 1 | fs.enums 2 | ======== 3 | 4 | .. automodule:: fs.enums 5 | :members: ResourceType, Seek 6 | :member-order: bysource 7 | -------------------------------------------------------------------------------- /docs/source/reference/errors.rst: -------------------------------------------------------------------------------- 1 | fs.errors 2 | ========= 3 | 4 | .. automodule:: fs.errors 5 | :members: 6 | :show-inheritance: 7 | -------------------------------------------------------------------------------- /docs/source/reference/filesize.rst: -------------------------------------------------------------------------------- 1 | fs.filesize 2 | =========== 3 | 4 | .. automodule:: fs.filesize 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/ftpfs.rst: -------------------------------------------------------------------------------- 1 | FTP Filesystem 2 | ============== 3 | 4 | .. automodule:: fs.ftpfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/glob.rst: -------------------------------------------------------------------------------- 1 | fs.glob 2 | ======= 3 | 4 | .. automodule:: fs.glob 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/info_objects.rst: -------------------------------------------------------------------------------- 1 | fs.info 2 | ======= 3 | 4 | .. automodule:: fs.info 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/memoryfs.rst: -------------------------------------------------------------------------------- 1 | Memory Filesystem 2 | ================= 3 | 4 | .. automodule:: fs.memoryfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/mirror.rst: -------------------------------------------------------------------------------- 1 | fs.mirror 2 | ========= 3 | 4 | .. automodule:: fs.mirror 5 | :members: mirror 6 | -------------------------------------------------------------------------------- /docs/source/reference/mode.rst: -------------------------------------------------------------------------------- 1 | fs.mode 2 | ======= 3 | 4 | .. automodule:: fs.mode 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/mountfs.rst: -------------------------------------------------------------------------------- 1 | Mount Filesystem 2 | ================ 3 | 4 | A Mount FS is a *virtual* filesystem which can seamlessly map 5 | sub-directories on to other filesystems. 6 | 7 | For example, lets say we have two filesystems containing config files 8 | and resources respectively:: 9 | 10 | [config_fs] 11 | |-- config.cfg 12 | `-- defaults.cfg 13 | 14 | [resources_fs] 15 | |-- images 16 | | |-- logo.jpg 17 | | `-- photo.jpg 18 | `-- data.dat 19 | 20 | We can combine these filesystems in to a single filesystem with the 21 | following code:: 22 | 23 | from fs.mountfs import MountFS 24 | combined_fs = MountFS() 25 | combined_fs.mount('config', config_fs) 26 | combined_fs.mount('resources', resources_fs) 27 | 28 | This will create a filesystem where paths under ``config/`` map to 29 | ``config_fs``, and paths under ``resources/`` map to ``resources_fs``:: 30 | 31 | [combined_fs] 32 | |-- config 33 | | |-- config.cfg 34 | | `-- defaults.cfg 35 | `-- resources 36 | |-- images 37 | | |-- logo.jpg 38 | | `-- photo.jpg 39 | `-- data.dat 40 | 41 | Now both filesystems may be accessed with the same path structure:: 42 | 43 | print(combined_fs.gettext('/config/defaults.cfg')) 44 | read_jpg(combined_fs.open('/resources/images/logo.jpg', 'rb') 45 | 46 | .. autoclass:: fs.mountfs.MountFS 47 | :members: 48 | 49 | .. autoexception:: fs.mountfs.MountError 50 | :show-inheritance: 51 | -------------------------------------------------------------------------------- /docs/source/reference/move.rst: -------------------------------------------------------------------------------- 1 | fs.move 2 | ======= 3 | 4 | .. automodule:: fs.move 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/multifs.rst: -------------------------------------------------------------------------------- 1 | Multi Filesystem 2 | ================ 3 | 4 | A MultiFS is a filesystem composed of a sequence of other filesystems, 5 | where the directory structure of each overlays the previous filesystem 6 | in the sequence. 7 | 8 | One use for such a filesystem would be to selectively override a set of 9 | files, to customize behavior. For example, to create a filesystem that 10 | could be used to *theme* a web application. We start with the following 11 | directories:: 12 | 13 | 14 | `-- templates 15 | |-- snippets 16 | | `-- panel.html 17 | |-- index.html 18 | |-- profile.html 19 | `-- base.html 20 | 21 | `-- theme 22 | |-- snippets 23 | | |-- widget.html 24 | | `-- extra.html 25 | |-- index.html 26 | `-- theme.html 27 | 28 | And we want to create a single filesystem that will load a file from 29 | ``templates/`` only if it isn't found in ``theme/``. Here's how we could 30 | do that:: 31 | 32 | 33 | from fs.osfs import OSFS 34 | from fs.multifs import MultiFS 35 | 36 | theme_fs = MultiFS() 37 | theme_fs.add_fs('templates', OSFS('templates')) 38 | theme_fs.add_fs('theme', OSFS('theme')) 39 | 40 | 41 | Now we have a ``theme_fs`` filesystem that presents a single view of both 42 | directories:: 43 | 44 | |-- snippets 45 | | |-- panel.html 46 | | |-- widget.html 47 | | `-- extra.html 48 | |-- index.html 49 | |-- profile.html 50 | |-- base.html 51 | `-- theme.html 52 | 53 | 54 | .. autoclass:: fs.multifs.MultiFS 55 | :members: 56 | -------------------------------------------------------------------------------- /docs/source/reference/opener.rst: -------------------------------------------------------------------------------- 1 | fs.opener 2 | ========= 3 | 4 | Open filesystems from a URL. 5 | 6 | fs.opener.base 7 | -------------- 8 | .. automodule:: fs.opener.base 9 | :members: 10 | 11 | fs.opener.parse 12 | --------------- 13 | .. automodule:: fs.opener.parse 14 | :members: 15 | 16 | fs.opener.registry 17 | ------------------ 18 | .. automodule:: fs.opener.registry 19 | :members: 20 | 21 | fs.opener.errors 22 | ---------------- 23 | .. automodule:: fs.opener.errors 24 | :members: 25 | :show-inheritance: 26 | -------------------------------------------------------------------------------- /docs/source/reference/osfs.rst: -------------------------------------------------------------------------------- 1 | OS Filesystem 2 | ============= 3 | 4 | .. automodule:: fs.osfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/path.rst: -------------------------------------------------------------------------------- 1 | fs.path 2 | ======= 3 | 4 | .. automodule:: fs.path 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/permissions.rst: -------------------------------------------------------------------------------- 1 | fs.permissions 2 | ============== 3 | 4 | .. automodule:: fs.permissions 5 | :members: -------------------------------------------------------------------------------- /docs/source/reference/subfs.rst: -------------------------------------------------------------------------------- 1 | Sub Filesystem 2 | ============== 3 | 4 | .. automodule:: fs.subfs 5 | :members: 6 | :member-order: bysource 7 | -------------------------------------------------------------------------------- /docs/source/reference/tarfs.rst: -------------------------------------------------------------------------------- 1 | Tar Filesystem 2 | ============== 3 | 4 | .. automodule:: fs.tarfs 5 | :members: 6 | :member-order: bysource 7 | -------------------------------------------------------------------------------- /docs/source/reference/tempfs.rst: -------------------------------------------------------------------------------- 1 | Temporary Filesystem 2 | ==================== 3 | 4 | .. automodule:: fs.tempfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/tools.rst: -------------------------------------------------------------------------------- 1 | fs.tools 2 | ======== 3 | 4 | .. automodule:: fs.tools 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/tree.rst: -------------------------------------------------------------------------------- 1 | fs.tree 2 | ======= 3 | 4 | .. automodule:: fs.tree 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/walk.rst: -------------------------------------------------------------------------------- 1 | fs.walk 2 | ======= 3 | 4 | .. automodule:: fs.walk 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/wildcard.rst: -------------------------------------------------------------------------------- 1 | fs.wildcard 2 | =========== 3 | 4 | .. automodule:: fs.wildcard 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/wrap.rst: -------------------------------------------------------------------------------- 1 | fs.wrap 2 | ======= 3 | 4 | .. automodule:: fs.wrap 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/wrapfs.rst: -------------------------------------------------------------------------------- 1 | fs.wrapfs 2 | ========= 3 | 4 | .. automodule:: fs.wrapfs 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/source/reference/zipfs.rst: -------------------------------------------------------------------------------- 1 | Zip Filesystem 2 | ============== 3 | 4 | .. automodule:: fs.zipfs 5 | :members: 6 | :member-order: bysource 7 | -------------------------------------------------------------------------------- /docs/source/walking.rst: -------------------------------------------------------------------------------- 1 | .. _walking: 2 | 3 | Walking 4 | ======= 5 | 6 | *Walking* a filesystem means recursively visiting a directory and any sub-directories. It is a fairly common requirement for copying, searching etc. 7 | 8 | To walk a filesystem (or directory) you can construct a :class:`~fs.walk.Walker` object and use its methods to do the walking. Here's an example that prints the path to every Python file in your projects directory:: 9 | 10 | >>> from fs import open_fs 11 | >>> from fs.walk import Walker 12 | >>> home_fs = open_fs('~/projects') 13 | >>> walker = Walker(filter=['*.py']) 14 | >>> for path in walker.files(home_fs): 15 | ... print(path) 16 | 17 | Generally speaking, however, you will only need to construct a Walker object if you want to customize some behavior of the walking algorithm. This is because you can access the functionality of a Walker object via the ``walk`` attribute on FS objects. Here's an example:: 18 | 19 | >>> from fs import open_fs 20 | >>> home_fs = open_fs('~/projects') 21 | >>> for path in home_fs.walk.files(filter=['*.py']): 22 | ... print(path) 23 | 24 | Note that the ``files`` method above doesn't require a ``fs`` parameter. This is because the ``walk`` attribute is a property which returns a :class:`~fs.walk.BoundWalker` object, which associates the filesystem with a walker. 25 | 26 | Walk Methods 27 | ~~~~~~~~~~~~ 28 | 29 | If you call the ``walk`` attribute on a :class:`~fs.walk.BoundWalker` it will return an iterable of :class:`~fs.walk.Step` named tuples with three values; a path to the directory, a list of :class:`~fs.info.Info` objects for directories, and a list of :class:`~fs.info.Info` objects for the files. Here's an example:: 30 | 31 | for step in home_fs.walk(filter=['*.py']): 32 | print('In dir {}'.format(step.path)) 33 | print('sub-directories: {!r}'.format(step.dirs)) 34 | print('files: {!r}'.format(step.files)) 35 | 36 | .. note :: 37 | Methods of :class:`~fs.walk.BoundWalker` invoke a corresponding method on a :class:`~fs.walk.Walker` object, with the *bound* filesystem. 38 | 39 | The ``walk`` attribute may appear to be a method, but is in fact a callable object. It supports other convenient methods that supply different information from the walk. For instance, :meth:`~fs.walk.BoundWalker.files`, which returns an iterable of file paths. Here's an example:: 40 | 41 | for path in home_fs.walk.files(filter=['*.py']): 42 | print('Python file: {}'.format(path)) 43 | 44 | The complement to ``files`` is :meth:`~fs.walk.BoundWalker.dirs` which returns paths to just the directories (and ignoring the files). Here's an example:: 45 | 46 | for dir_path in home_fs.walk.dirs(): 47 | print("{!r} contains sub-directory {}".format(home_fs, dir_path)) 48 | 49 | The :meth:`~fs.walk.BoundWalker.info` method returns a generator of tuples containing a path and an :class:`~fs.info.Info` object. You can use the ``is_dir`` attribute to know if the path refers to a directory or file. Here's an example:: 50 | 51 | for path, info in home_fs.walk.info(): 52 | if info.is_dir: 53 | print("[dir] {}".format(path)) 54 | else: 55 | print("[file] {}".format(path)) 56 | 57 | Finally, here's a nice example that counts the number of bytes of Python code in your home directory:: 58 | 59 | bytes_of_python = sum( 60 | info.size 61 | for info in home_fs.walk.info(namespaces=['details']) 62 | if not info.is_dir 63 | ) 64 | 65 | 66 | Search Algorithms 67 | ~~~~~~~~~~~~~~~~~ 68 | 69 | There are two general algorithms for searching a directory tree. The first method is `"breadth"`, which yields resources in the top of the directory tree first, before moving on to sub-directories. The second is `"depth"` which yields the most deeply nested resources, and works backwards to the top-most directory. 70 | 71 | Generally speaking, you will only need the a *depth* search if you will be deleting resources as you walk through them. The default *breadth* search is a generally more efficient way of looking through a filesystem. You can specify which method you want with the ``search`` parameter on most ``Walker`` methods. 72 | -------------------------------------------------------------------------------- /examples/README.txt: -------------------------------------------------------------------------------- 1 | This directory contains a number of example command line apps using PyFilesystem. 2 | 3 | They are intended to be a learning aid and not exactly finished products, but all these examples are completely functional. -------------------------------------------------------------------------------- /examples/count_py.py: -------------------------------------------------------------------------------- 1 | """ 2 | Display how much storage is used in your Python files. 3 | 4 | Usage: 5 | python count_py.py 6 | 7 | """ 8 | 9 | import sys 10 | 11 | from fs import open_fs 12 | from fs.filesize import traditional 13 | 14 | fs_url = sys.argv[1] 15 | count = 0 16 | 17 | with open_fs(fs_url) as fs: 18 | for _path, info in fs.walk.info(filter=["*.py"], namespaces=["details"]): 19 | count += info.size 20 | 21 | print(f'There is {traditional(count)} of Python in "{fs_url}"') 22 | -------------------------------------------------------------------------------- /examples/find_dups.py: -------------------------------------------------------------------------------- 1 | """ 2 | Find paths to files with identical contents. 3 | 4 | Usage: 5 | 6 | python find_dups.py 7 | 8 | """ 9 | 10 | import sys 11 | 12 | from collections import defaultdict 13 | 14 | from fs import open_fs 15 | 16 | hashes = defaultdict(list) 17 | with open_fs(sys.argv[1]) as fs: 18 | for path in fs.walk.files(): 19 | file_hash = fs.hash(path, "md5") 20 | hashes[file_hash].append(path) 21 | 22 | for paths in hashes.values(): 23 | if len(paths) > 1: 24 | for path in paths: 25 | print(path) 26 | print() 27 | 28 | -------------------------------------------------------------------------------- /examples/rm_pyc.py: -------------------------------------------------------------------------------- 1 | """ 2 | Remove all pyc files in a directory. 3 | 4 | Usage: 5 | 6 | python rm_pyc.py 7 | 8 | """ 9 | 10 | import sys 11 | 12 | from fs import open_fs 13 | 14 | with open_fs(sys.argv[1]) as fs: 15 | count = fs.glob("**/*.pyc").remove() 16 | print(f"{count} .pyc files remove") 17 | -------------------------------------------------------------------------------- /examples/upload.py: -------------------------------------------------------------------------------- 1 | """ 2 | Upload a file to a server (or other filesystem) 3 | 4 | Usage: 5 | 6 | python upload.py FILENAME 7 | 8 | example: 9 | 10 | python upload.py foo.txt ftp://example.org/uploads/ 11 | 12 | 13 | """ 14 | 15 | import sys 16 | 17 | import os 18 | 19 | from fs import open_fs 20 | 21 | _, file_path, fs_url = sys.argv 22 | filename = os.path.basename(file_path) 23 | 24 | with open_fs(fs_url) as fs: 25 | if fs.exists(filename): 26 | print("destination exists! aborting.") 27 | else: 28 | with open(file_path, "rb") as bin_file: 29 | fs.upload(filename, bin_file) 30 | print("upload successful!") 31 | -------------------------------------------------------------------------------- /fs/__init__.py: -------------------------------------------------------------------------------- 1 | """Python filesystem abstraction layer. 2 | """ 3 | 4 | __import__("pkg_resources").declare_namespace(__name__) # type: ignore 5 | 6 | from . import path 7 | from ._fscompat import fsdecode, fsencode 8 | from ._version import __version__ 9 | from .enums import ResourceType, Seek 10 | from .opener import open_fs 11 | 12 | __all__ = ["__version__", "ResourceType", "Seek", "open_fs"] 13 | -------------------------------------------------------------------------------- /fs/_bulk.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Implements a thread pool for parallel copying of files. 4 | 5 | """ 6 | 7 | from __future__ import unicode_literals 8 | 9 | import typing 10 | 11 | import threading 12 | from six.moves.queue import Queue 13 | 14 | from .copy import copy_file_internal, copy_modified_time 15 | from .errors import BulkCopyFailed 16 | from .tools import copy_file_data 17 | 18 | if typing.TYPE_CHECKING: 19 | from typing import IO, List, Optional, Text, Tuple, Type 20 | 21 | from types import TracebackType 22 | 23 | from .base import FS 24 | 25 | 26 | class _Worker(threading.Thread): 27 | """Worker thread that pulls tasks from a queue.""" 28 | 29 | def __init__(self, copier): 30 | # type (Copier) -> None 31 | self.copier = copier 32 | super(_Worker, self).__init__() 33 | self.daemon = True 34 | 35 | def run(self): 36 | # type () -> None 37 | queue = self.copier.queue 38 | while True: 39 | task = queue.get(block=True) 40 | try: 41 | if task is None: 42 | break # Sentinel to exit thread 43 | task() 44 | except Exception as error: 45 | self.copier.add_error(error) 46 | finally: 47 | queue.task_done() 48 | 49 | 50 | class _Task(object): 51 | """Base class for a task.""" 52 | 53 | def __call__(self): 54 | # type: () -> None 55 | """Task implementation.""" 56 | 57 | 58 | class _CopyTask(_Task): 59 | """A callable that copies from one file another.""" 60 | 61 | def __init__(self, src_file, dst_file): 62 | # type: (IO, IO) -> None 63 | self.src_file = src_file 64 | self.dst_file = dst_file 65 | 66 | def __call__(self): 67 | # type: () -> None 68 | try: 69 | copy_file_data(self.src_file, self.dst_file, chunk_size=1024 * 1024) 70 | finally: 71 | try: 72 | self.src_file.close() 73 | finally: 74 | self.dst_file.close() 75 | 76 | 77 | class Copier(object): 78 | """Copy files in worker threads.""" 79 | 80 | def __init__(self, num_workers=4, preserve_time=False): 81 | # type: (int, bool) -> None 82 | if num_workers < 0: 83 | raise ValueError("num_workers must be >= 0") 84 | self.num_workers = num_workers 85 | self.preserve_time = preserve_time 86 | self.all_tasks = [] # type: List[Tuple[FS, Text, FS, Text]] 87 | self.queue = None # type: Optional[Queue[_Task]] 88 | self.workers = [] # type: List[_Worker] 89 | self.errors = [] # type: List[Exception] 90 | self.running = False 91 | 92 | def start(self): 93 | """Start the workers.""" 94 | if self.num_workers: 95 | self.queue = Queue(maxsize=self.num_workers) 96 | self.workers = [_Worker(self) for _ in range(self.num_workers)] 97 | for worker in self.workers: 98 | worker.start() 99 | self.running = True 100 | 101 | def stop(self): 102 | """Stop the workers (will block until they are finished).""" 103 | if self.running and self.num_workers: 104 | # Notify the workers that all tasks have arrived 105 | # and wait for them to finish. 106 | for _worker in self.workers: 107 | self.queue.put(None) 108 | for worker in self.workers: 109 | worker.join() 110 | 111 | # If the "last modified" time is to be preserved, do it now. 112 | if self.preserve_time: 113 | for args in self.all_tasks: 114 | copy_modified_time(*args) 115 | 116 | # Free up references held by workers 117 | del self.workers[:] 118 | self.queue.join() 119 | self.running = False 120 | 121 | def add_error(self, error): 122 | """Add an exception raised by a task.""" 123 | self.errors.append(error) 124 | 125 | def __enter__(self): 126 | self.start() 127 | return self 128 | 129 | def __exit__( 130 | self, 131 | exc_type, # type: Optional[Type[BaseException]] 132 | exc_value, # type: Optional[BaseException] 133 | traceback, # type: Optional[TracebackType] 134 | ): 135 | self.stop() 136 | if traceback is None and self.errors: 137 | raise BulkCopyFailed(self.errors) 138 | 139 | def copy(self, src_fs, src_path, dst_fs, dst_path, preserve_time=False): 140 | # type: (FS, Text, FS, Text, bool) -> None 141 | """Copy a file from one fs to another.""" 142 | if self.queue is None: 143 | # This should be the most performant for a single-thread 144 | copy_file_internal( 145 | src_fs, src_path, dst_fs, dst_path, preserve_time=self.preserve_time 146 | ) 147 | else: 148 | self.all_tasks.append((src_fs, src_path, dst_fs, dst_path)) 149 | src_file = src_fs.openbin(src_path, "r") 150 | try: 151 | dst_file = dst_fs.openbin(dst_path, "w") 152 | except Exception: 153 | src_file.close() 154 | raise 155 | task = _CopyTask(src_file, dst_file) 156 | self.queue.put(task) 157 | -------------------------------------------------------------------------------- /fs/_fscompat.py: -------------------------------------------------------------------------------- 1 | import six 2 | 3 | try: 4 | from os import fsdecode, fsencode 5 | except ImportError: 6 | from backports.os import fsdecode, fsencode # type: ignore 7 | 8 | try: 9 | from os import fspath 10 | except ImportError: 11 | 12 | def fspath(path): # type: ignore 13 | """Return the path representation of a path-like object. 14 | 15 | If str or bytes is passed in, it is returned unchanged. Otherwise the 16 | os.PathLike interface is used to get the path representation. If the 17 | path representation is not str or bytes, TypeError is raised. If the 18 | provided path is not str, bytes, or os.PathLike, TypeError is raised. 19 | """ 20 | if isinstance(path, (six.text_type, bytes)): 21 | return path 22 | 23 | # Work from the object's type to match method resolution of other magic 24 | # methods. 25 | path_type = type(path) 26 | try: 27 | path_repr = path_type.__fspath__(path) 28 | except AttributeError: 29 | if hasattr(path_type, "__fspath__"): 30 | raise 31 | else: 32 | raise TypeError( 33 | "expected string type or os.PathLike object, " 34 | "not " + path_type.__name__ 35 | ) 36 | if isinstance(path_repr, (six.text_type, bytes)): 37 | return path_repr 38 | else: 39 | raise TypeError( 40 | "expected {}.__fspath__() to return string type " 41 | "not {}".format(path_type.__name__, type(path_repr).__name__) 42 | ) 43 | -------------------------------------------------------------------------------- /fs/_ftp_parse.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, print_function, unicode_literals 2 | 3 | import re 4 | import time 5 | import unicodedata 6 | from datetime import datetime 7 | 8 | try: 9 | from datetime import timezone 10 | except ImportError: 11 | from ._tzcompat import timezone # type: ignore 12 | 13 | from .enums import ResourceType 14 | from .permissions import Permissions 15 | 16 | EPOCH_DT = datetime.fromtimestamp(0, timezone.utc) 17 | 18 | 19 | RE_LINUX = re.compile( 20 | r""" 21 | ^ 22 | ([-dlpscbD]) 23 | ([r-][w-][xsS-][r-][w-][xsS-][r-][w-][xtT-][\.\+]?) 24 | \s+? 25 | (\d+) 26 | \s+? 27 | ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?) 28 | \s+? 29 | ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?) 30 | \s+? 31 | (\d+) 32 | \s+? 33 | (\w{3}\s+\d{1,2}\s+[\w:]+) 34 | \s+ 35 | (.*?) 36 | $ 37 | """, 38 | re.VERBOSE, 39 | ) 40 | 41 | 42 | RE_WINDOWSNT = re.compile( 43 | r""" 44 | ^ 45 | (?P\S+) 46 | \s+ 47 | (?P\S+(AM|PM)?) 48 | \s+ 49 | (?P(|\d+)) 50 | \s+ 51 | (?P.*) 52 | $ 53 | """, 54 | re.VERBOSE, 55 | ) 56 | 57 | 58 | def get_decoders(): 59 | """Return all available FTP LIST line decoders with their matching regexes.""" 60 | decoders = [ 61 | (RE_LINUX, decode_linux), 62 | (RE_WINDOWSNT, decode_windowsnt), 63 | ] 64 | return decoders 65 | 66 | 67 | def parse(lines): 68 | info = [] 69 | for line in lines: 70 | if not line.strip(): 71 | continue 72 | raw_info = parse_line(line) 73 | if raw_info is not None: 74 | info.append(raw_info) 75 | return info 76 | 77 | 78 | def parse_line(line): 79 | for line_re, decode_callable in get_decoders(): 80 | match = line_re.match(line) 81 | if match is not None: 82 | return decode_callable(line, match) 83 | return None 84 | 85 | 86 | def _parse_time(t, formats): 87 | for frmt in formats: 88 | try: 89 | _t = time.strptime(t, frmt) 90 | break 91 | except ValueError: 92 | continue 93 | else: 94 | return None 95 | 96 | year = _t.tm_year if _t.tm_year != 1900 else time.localtime().tm_year 97 | month = _t.tm_mon 98 | day = _t.tm_mday 99 | hour = _t.tm_hour 100 | minutes = _t.tm_min 101 | dt = datetime(year, month, day, hour, minutes, tzinfo=timezone.utc) 102 | 103 | epoch_time = (dt - EPOCH_DT).total_seconds() 104 | return epoch_time 105 | 106 | 107 | def _decode_linux_time(mtime): 108 | return _parse_time(mtime, formats=["%b %d %Y", "%b %d %H:%M"]) 109 | 110 | 111 | def decode_linux(line, match): 112 | ty, perms, links, uid, gid, size, mtime, name = match.groups() 113 | is_link = ty == "l" 114 | is_dir = ty == "d" or is_link 115 | if is_link: 116 | name, _, _link_name = name.partition("->") 117 | name = name.strip() 118 | _link_name = _link_name.strip() 119 | permissions = Permissions.parse(perms) 120 | 121 | mtime_epoch = _decode_linux_time(mtime) 122 | 123 | name = unicodedata.normalize("NFC", name) 124 | 125 | raw_info = { 126 | "basic": {"name": name, "is_dir": is_dir}, 127 | "details": { 128 | "size": int(size), 129 | "type": int(ResourceType.directory if is_dir else ResourceType.file), 130 | }, 131 | "access": {"permissions": permissions.dump()}, 132 | "ftp": {"ls": line}, 133 | } 134 | access = raw_info["access"] 135 | details = raw_info["details"] 136 | if mtime_epoch is not None: 137 | details["modified"] = mtime_epoch 138 | 139 | access["user"] = uid 140 | access["group"] = gid 141 | 142 | return raw_info 143 | 144 | 145 | def _decode_windowsnt_time(mtime): 146 | return _parse_time(mtime, formats=["%d-%m-%y %I:%M%p", "%d-%m-%y %H:%M"]) 147 | 148 | 149 | def decode_windowsnt(line, match): 150 | """Decode a Windows NT FTP LIST line. 151 | 152 | Examples: 153 | Decode a directory line:: 154 | 155 | >>> line = "11-02-18 02:12PM images" 156 | >>> match = RE_WINDOWSNT.match(line) 157 | >>> pprint(decode_windowsnt(line, match)) 158 | {'basic': {'is_dir': True, 'name': 'images'}, 159 | 'details': {'modified': 1518358320.0, 'type': 1}, 160 | 'ftp': {'ls': '11-02-18 02:12PM images'}} 161 | 162 | Decode a file line:: 163 | 164 | >>> line = "11-02-18 03:33PM 9276 logo.gif" 165 | >>> match = RE_WINDOWSNT.match(line) 166 | >>> pprint(decode_windowsnt(line, match)) 167 | {'basic': {'is_dir': False, 'name': 'logo.gif'}, 168 | 'details': {'modified': 1518363180.0, 'size': 9276, 'type': 2}, 169 | 'ftp': {'ls': '11-02-18 03:33PM 9276 logo.gif'}} 170 | 171 | Alternatively, the time might also be present in 24-hour format:: 172 | 173 | >>> line = "11-02-18 15:33 9276 logo.gif" 174 | >>> match = RE_WINDOWSNT.match(line) 175 | >>> decode_windowsnt(line, match)["details"]["modified"] 176 | 1518363180.0 177 | 178 | """ 179 | is_dir = match.group("size") == "" 180 | 181 | raw_info = { 182 | "basic": { 183 | "name": match.group("name"), 184 | "is_dir": is_dir, 185 | }, 186 | "details": { 187 | "type": int(ResourceType.directory if is_dir else ResourceType.file), 188 | }, 189 | "ftp": {"ls": line}, 190 | } 191 | 192 | if not is_dir: 193 | raw_info["details"]["size"] = int(match.group("size")) 194 | 195 | modified = _decode_windowsnt_time( 196 | match.group("modified_date") + " " + match.group("modified_time") 197 | ) 198 | if modified is not None: 199 | raw_info["details"]["modified"] = modified 200 | 201 | return raw_info 202 | -------------------------------------------------------------------------------- /fs/_pathcompat.py: -------------------------------------------------------------------------------- 1 | # mypy: ignore-errors 2 | try: 3 | from os.path import commonpath 4 | except ImportError: 5 | # Return the longest common sub-path of the sequence of paths given as input. 6 | # The paths are not normalized before comparing them (this is the 7 | # responsibility of the caller). Any trailing separator is stripped from the 8 | # returned path. 9 | 10 | def commonpath(paths): 11 | """Given a sequence of path names, returns the longest common sub-path.""" 12 | 13 | if not paths: 14 | raise ValueError("commonpath() arg is an empty sequence") 15 | 16 | paths = tuple(paths) 17 | if isinstance(paths[0], bytes): 18 | sep = b"/" 19 | curdir = b"." 20 | else: 21 | sep = "/" 22 | curdir = "." 23 | 24 | split_paths = [path.split(sep) for path in paths] 25 | 26 | try: 27 | (isabs,) = set(p[:1] == sep for p in paths) 28 | except ValueError: 29 | raise ValueError("Can't mix absolute and relative paths") 30 | 31 | split_paths = [[c for c in s if c and c != curdir] for s in split_paths] 32 | s1 = min(split_paths) 33 | s2 = max(split_paths) 34 | common = s1 35 | for i, c in enumerate(s1): 36 | if c != s2[i]: 37 | common = s1[:i] 38 | break 39 | 40 | prefix = sep if isabs else sep[:0] 41 | return prefix + sep.join(common) 42 | -------------------------------------------------------------------------------- /fs/_repr.py: -------------------------------------------------------------------------------- 1 | """Tools to generate __repr__ strings. 2 | """ 3 | 4 | from __future__ import unicode_literals 5 | 6 | import typing 7 | 8 | if typing.TYPE_CHECKING: 9 | from typing import Text, Tuple 10 | 11 | 12 | def make_repr(class_name, *args, **kwargs): 13 | # type: (Text, *object, **Tuple[object, object]) -> Text 14 | """Generate a repr string. 15 | 16 | Positional arguments should be the positional arguments used to 17 | construct the class. Keyword arguments should consist of tuples of 18 | the attribute value and default. If the value is the default, then 19 | it won't be rendered in the output. 20 | 21 | Example: 22 | >>> class MyClass(object): 23 | ... def __init__(self, name=None): 24 | ... self.name = name 25 | ... def __repr__(self): 26 | ... return make_repr('MyClass', 'foo', name=(self.name, None)) 27 | >>> MyClass('Will') 28 | MyClass('foo', name='Will') 29 | >>> MyClass(None) 30 | MyClass('foo') 31 | 32 | """ 33 | arguments = [repr(arg) for arg in args] 34 | arguments.extend( 35 | [ 36 | "{}={!r}".format(name, value) 37 | for name, (value, default) in sorted(kwargs.items()) 38 | if value != default 39 | ] 40 | ) 41 | return "{}({})".format(class_name, ", ".join(arguments)) 42 | -------------------------------------------------------------------------------- /fs/_typing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Typing objects missing from Python3.5.1 3 | 4 | """ 5 | import sys 6 | 7 | import six 8 | 9 | _PY = sys.version_info 10 | 11 | from typing import overload # type: ignore 12 | 13 | if _PY.major == 3 and _PY.minor == 5 and _PY.micro in (0, 1): 14 | 15 | def overload(func): # pragma: no cover # noqa: F811 16 | return func 17 | 18 | 19 | try: 20 | from typing import Text 21 | except ImportError: # pragma: no cover 22 | Text = six.text_type # type: ignore 23 | -------------------------------------------------------------------------------- /fs/_tzcompat.py: -------------------------------------------------------------------------------- 1 | """Compatibility shim for python2's lack of datetime.timezone. 2 | 3 | This is the example code from the Python 2 documentation: 4 | https://docs.python.org/2.7/library/datetime.html#tzinfo-objects 5 | """ 6 | 7 | from datetime import timedelta, tzinfo 8 | 9 | ZERO = timedelta(0) 10 | 11 | 12 | class UTC(tzinfo): 13 | """UTC""" 14 | 15 | def utcoffset(self, dt): 16 | return ZERO 17 | 18 | def tzname(self, dt): 19 | return "UTC" 20 | 21 | def dst(self, dt): 22 | return ZERO 23 | 24 | 25 | utc = UTC() 26 | 27 | 28 | class timezone: 29 | utc = utc 30 | -------------------------------------------------------------------------------- /fs/_url_tools.py: -------------------------------------------------------------------------------- 1 | import typing 2 | 3 | import platform 4 | import re 5 | import six 6 | 7 | if typing.TYPE_CHECKING: 8 | from typing import Text 9 | 10 | _WINDOWS_PLATFORM = platform.system() == "Windows" 11 | 12 | 13 | def url_quote(path_snippet): 14 | # type: (Text) -> Text 15 | """Quote a URL without quoting the Windows drive letter, if any. 16 | 17 | On Windows, it will separate drive letter and quote Windows 18 | path alone. No magic on Unix-like path, just pythonic 19 | `~urllib.request.pathname2url`. 20 | 21 | Arguments: 22 | path_snippet (str): a file path, relative or absolute. 23 | 24 | """ 25 | if _WINDOWS_PLATFORM and _has_drive_letter(path_snippet): 26 | drive_letter, path = path_snippet.split(":", 1) 27 | if six.PY2: 28 | path = path.encode("utf-8") 29 | path = six.moves.urllib.request.pathname2url(path) 30 | path_snippet = "{}:{}".format(drive_letter, path) 31 | else: 32 | if six.PY2: 33 | path_snippet = path_snippet.encode("utf-8") 34 | path_snippet = six.moves.urllib.request.pathname2url(path_snippet) 35 | return path_snippet 36 | 37 | 38 | def _has_drive_letter(path_snippet): 39 | # type: (Text) -> bool 40 | """Check whether a path contains a drive letter. 41 | 42 | Arguments: 43 | path_snippet (str): a file path, relative or absolute. 44 | 45 | Example: 46 | >>> _has_drive_letter("D:/Data") 47 | True 48 | >>> _has_drive_letter(r"C:\\System32\\ test") 49 | True 50 | >>> _has_drive_letter("/tmp/abc:test") 51 | False 52 | 53 | """ 54 | windows_drive_pattern = ".:[/\\\\].*$" 55 | return re.match(windows_drive_pattern, path_snippet) is not None 56 | -------------------------------------------------------------------------------- /fs/_version.py: -------------------------------------------------------------------------------- 1 | """Version, used in module and setup.py. 2 | """ 3 | __version__ = "2.4.16" 4 | -------------------------------------------------------------------------------- /fs/appfs.py: -------------------------------------------------------------------------------- 1 | """Manage filesystems in platform-specific application directories. 2 | 3 | These classes abstract away the different requirements for user data 4 | across platforms, which vary in their conventions. They are all 5 | subclasses of `~fs.osfs.OSFS`. 6 | 7 | """ 8 | # Thanks to authors of https://pypi.org/project/appdirs 9 | 10 | # see http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx 11 | 12 | import typing 13 | 14 | import abc 15 | import six 16 | from appdirs import AppDirs 17 | 18 | from ._repr import make_repr 19 | from .osfs import OSFS 20 | 21 | if typing.TYPE_CHECKING: 22 | from typing import Optional, Text 23 | 24 | 25 | __all__ = [ 26 | "UserDataFS", 27 | "UserConfigFS", 28 | "SiteDataFS", 29 | "SiteConfigFS", 30 | "UserCacheFS", 31 | "UserLogFS", 32 | ] 33 | 34 | 35 | class _CopyInitMeta(abc.ABCMeta): 36 | """A metaclass that performs a hard copy of the `__init__`. 37 | 38 | This is a fix for Sphinx, which is a pain to configure in a way that 39 | it documents the ``__init__`` method of a class when it is inherited. 40 | Copying ``__init__`` makes it think it is not inherited, and let us 41 | share the documentation between all the `_AppFS` subclasses. 42 | 43 | """ 44 | 45 | def __new__(mcls, classname, bases, cls_dict): 46 | cls_dict.setdefault("__init__", bases[0].__init__) 47 | return super(abc.ABCMeta, mcls).__new__(mcls, classname, bases, cls_dict) 48 | 49 | 50 | @six.add_metaclass(_CopyInitMeta) 51 | class _AppFS(OSFS): 52 | """Abstract base class for an app FS.""" 53 | 54 | # FIXME(@althonos): replace by ClassVar[Text] once 55 | # https://github.com/python/mypy/pull/4718 is accepted 56 | # (subclass override will raise errors until then) 57 | app_dir = None # type: Text 58 | 59 | def __init__( 60 | self, 61 | appname, # type: Text 62 | author=None, # type: Optional[Text] 63 | version=None, # type: Optional[Text] 64 | roaming=False, # type: bool 65 | create=True, # type: bool 66 | ): 67 | # type: (...) -> None 68 | """Create a new application-specific filesystem. 69 | 70 | Arguments: 71 | appname (str): The name of the application. 72 | author (str): The name of the author (used on Windows). 73 | version (str): Optional version string, if a unique location 74 | per version of the application is required. 75 | roaming (bool): If `True`, use a *roaming* profile on 76 | Windows. 77 | create (bool): If `True` (the default) the directory 78 | will be created if it does not exist. 79 | 80 | """ 81 | self.app_dirs = AppDirs(appname, author, version, roaming) 82 | self._create = create 83 | super(_AppFS, self).__init__( 84 | getattr(self.app_dirs, self.app_dir), create=create 85 | ) 86 | 87 | def __repr__(self): 88 | # type: () -> Text 89 | return make_repr( 90 | self.__class__.__name__, 91 | self.app_dirs.appname, 92 | author=(self.app_dirs.appauthor, None), 93 | version=(self.app_dirs.version, None), 94 | roaming=(self.app_dirs.roaming, False), 95 | create=(self._create, True), 96 | ) 97 | 98 | def __str__(self): 99 | # type: () -> Text 100 | return "<{} '{}'>".format( 101 | self.__class__.__name__.lower(), self.app_dirs.appname 102 | ) 103 | 104 | 105 | class UserDataFS(_AppFS): 106 | """A filesystem for per-user application data. 107 | 108 | May also be opened with 109 | ``open_fs('userdata://appname:author:version')``. 110 | 111 | """ 112 | 113 | app_dir = "user_data_dir" 114 | 115 | 116 | class UserConfigFS(_AppFS): 117 | """A filesystem for per-user config data. 118 | 119 | May also be opened with 120 | ``open_fs('userconf://appname:author:version')``. 121 | 122 | """ 123 | 124 | app_dir = "user_config_dir" 125 | 126 | 127 | class UserCacheFS(_AppFS): 128 | """A filesystem for per-user application cache data. 129 | 130 | May also be opened with 131 | ``open_fs('usercache://appname:author:version')``. 132 | 133 | """ 134 | 135 | app_dir = "user_cache_dir" 136 | 137 | 138 | class SiteDataFS(_AppFS): 139 | """A filesystem for application site data. 140 | 141 | May also be opened with 142 | ``open_fs('sitedata://appname:author:version')``. 143 | 144 | """ 145 | 146 | app_dir = "site_data_dir" 147 | 148 | 149 | class SiteConfigFS(_AppFS): 150 | """A filesystem for application config data. 151 | 152 | May also be opened with 153 | ``open_fs('siteconf://appname:author:version')``. 154 | 155 | """ 156 | 157 | app_dir = "site_config_dir" 158 | 159 | 160 | class UserLogFS(_AppFS): 161 | """A filesystem for per-user application log data. 162 | 163 | May also be opened with 164 | ``open_fs('userlog://appname:author:version')``. 165 | 166 | """ 167 | 168 | app_dir = "user_log_dir" 169 | -------------------------------------------------------------------------------- /fs/constants.py: -------------------------------------------------------------------------------- 1 | """Constants used by PyFilesystem. 2 | """ 3 | 4 | import io 5 | 6 | DEFAULT_CHUNK_SIZE = io.DEFAULT_BUFFER_SIZE * 16 7 | """`int`: the size of a single chunk read from or written to a file. 8 | """ 9 | -------------------------------------------------------------------------------- /fs/enums.py: -------------------------------------------------------------------------------- 1 | """Enums used by PyFilesystem. 2 | """ 3 | 4 | from __future__ import absolute_import, unicode_literals 5 | 6 | import os 7 | from enum import IntEnum, unique 8 | 9 | 10 | @unique 11 | class ResourceType(IntEnum): 12 | """Resource Types. 13 | 14 | Positive values are reserved, negative values are implementation 15 | dependent. 16 | 17 | Most filesystems will support only directory(1) and file(2). Other 18 | types exist to identify more exotic resource types supported 19 | by Linux filesystems. 20 | 21 | """ 22 | 23 | #: Unknown resource type, used if the filesystem is unable to 24 | #: tell what the resource is. 25 | unknown = 0 26 | #: A directory. 27 | directory = 1 28 | #: A simple file. 29 | file = 2 30 | #: A character file. 31 | character = 3 32 | #: A block special file. 33 | block_special_file = 4 34 | #: A first in first out file. 35 | fifo = 5 36 | #: A socket. 37 | socket = 6 38 | #: A symlink. 39 | symlink = 7 40 | 41 | 42 | @unique 43 | class Seek(IntEnum): 44 | """Constants used by `io.IOBase.seek`. 45 | 46 | These match `os.SEEK_CUR`, `os.SEEK_END`, and `os.SEEK_SET` 47 | from the standard library. 48 | 49 | """ 50 | 51 | #: Seek from the current file position. 52 | current = os.SEEK_CUR 53 | #: Seek from the end of the file. 54 | end = os.SEEK_END 55 | #: Seek from the start of the file. 56 | set = os.SEEK_SET 57 | -------------------------------------------------------------------------------- /fs/error_tools.py: -------------------------------------------------------------------------------- 1 | """Tools for managing OS errors. 2 | """ 3 | 4 | from __future__ import print_function, unicode_literals 5 | 6 | import sys 7 | import typing 8 | 9 | import errno 10 | import platform 11 | from contextlib import contextmanager 12 | from six import reraise 13 | 14 | from . import errors 15 | 16 | if typing.TYPE_CHECKING: 17 | from typing import Iterator, Optional, Text, Type, Union 18 | 19 | from types import TracebackType 20 | 21 | try: 22 | from collections.abc import Mapping 23 | except ImportError: 24 | from collections import Mapping # noqa: E811 25 | 26 | 27 | _WINDOWS_PLATFORM = platform.system() == "Windows" 28 | 29 | 30 | class _ConvertOSErrors(object): 31 | """Context manager to convert OSErrors in to FS Errors.""" 32 | 33 | FILE_ERRORS = { 34 | 64: errors.RemoteConnectionError, # ENONET 35 | errno.EACCES: errors.PermissionDenied, 36 | errno.ENOENT: errors.ResourceNotFound, 37 | errno.EFAULT: errors.ResourceNotFound, 38 | errno.ESRCH: errors.ResourceNotFound, 39 | errno.ENOTEMPTY: errors.DirectoryNotEmpty, 40 | errno.EEXIST: errors.FileExists, 41 | 183: errors.DirectoryExists, 42 | # errno.ENOTDIR: errors.DirectoryExpected, 43 | errno.ENOTDIR: errors.ResourceNotFound, 44 | errno.EISDIR: errors.FileExpected, 45 | errno.EINVAL: errors.FileExpected, 46 | errno.ENOSPC: errors.InsufficientStorage, 47 | errno.EPERM: errors.PermissionDenied, 48 | errno.ENETDOWN: errors.RemoteConnectionError, 49 | errno.ECONNRESET: errors.RemoteConnectionError, 50 | errno.ENAMETOOLONG: errors.PathError, 51 | errno.EOPNOTSUPP: errors.Unsupported, 52 | errno.ENOSYS: errors.Unsupported, 53 | } 54 | 55 | DIR_ERRORS = FILE_ERRORS.copy() 56 | DIR_ERRORS[errno.ENOTDIR] = errors.DirectoryExpected 57 | DIR_ERRORS[errno.EEXIST] = errors.DirectoryExists 58 | DIR_ERRORS[errno.EINVAL] = errors.DirectoryExpected 59 | 60 | if _WINDOWS_PLATFORM: # pragma: no cover 61 | DIR_ERRORS[13] = errors.DirectoryExpected 62 | DIR_ERRORS[267] = errors.DirectoryExpected 63 | FILE_ERRORS[13] = errors.FileExpected 64 | 65 | def __init__(self, opname, path, directory=False): 66 | # type: (Text, Text, bool) -> None 67 | self._opname = opname 68 | self._path = path 69 | self._directory = directory 70 | 71 | def __enter__(self): 72 | # type: () -> _ConvertOSErrors 73 | return self 74 | 75 | def __exit__( 76 | self, 77 | exc_type, # type: Optional[Type[BaseException]] 78 | exc_value, # type: Optional[BaseException] 79 | traceback, # type: Optional[TracebackType] 80 | ): 81 | # type: (...) -> None 82 | os_errors = self.DIR_ERRORS if self._directory else self.FILE_ERRORS 83 | if exc_type and isinstance(exc_value, EnvironmentError): 84 | _errno = exc_value.errno 85 | fserror = os_errors.get(_errno, errors.OperationFailed) 86 | if _errno == errno.EACCES and sys.platform == "win32": 87 | if getattr(exc_value, "args", None) == 32: # pragma: no cover 88 | fserror = errors.ResourceLocked 89 | reraise(fserror, fserror(self._path, exc=exc_value), traceback) 90 | 91 | 92 | # Stops linter complaining about invalid class name 93 | convert_os_errors = _ConvertOSErrors 94 | 95 | 96 | @contextmanager 97 | def unwrap_errors(path_replace): 98 | # type: (Union[Text, Mapping[Text, Text]]) -> Iterator[None] 99 | """Get a context to map OS errors to their `fs.errors` counterpart. 100 | 101 | The context will re-write the paths in resource exceptions to be 102 | in the same context as the wrapped filesystem. 103 | 104 | The only parameter may be the path from the parent, if only one path 105 | is to be unwrapped. Or it may be a dictionary that maps wrapped 106 | paths on to unwrapped paths. 107 | 108 | """ 109 | try: 110 | yield 111 | except errors.ResourceError as e: 112 | if hasattr(e, "path"): 113 | if isinstance(path_replace, Mapping): 114 | e.path = path_replace.get(e.path, e.path) 115 | else: 116 | e.path = path_replace 117 | raise 118 | -------------------------------------------------------------------------------- /fs/filesize.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """Functions for reporting filesizes. 3 | 4 | The functions declared in this module should cover the different 5 | usecases needed to generate a string representation of a file size 6 | using several different units. Since there are many standards regarding 7 | file size units, three different functions have been implemented. 8 | 9 | See Also: 10 | * `Wikipedia: Binary prefix `_ 11 | 12 | """ 13 | 14 | from __future__ import division, unicode_literals 15 | 16 | import typing 17 | 18 | if typing.TYPE_CHECKING: 19 | from typing import Iterable, SupportsInt, Text 20 | 21 | 22 | __all__ = ["traditional", "decimal", "binary"] 23 | 24 | 25 | def _to_str(size, suffixes, base): 26 | # type: (SupportsInt, Iterable[Text], int) -> Text 27 | try: 28 | size = int(size) 29 | except ValueError: 30 | raise TypeError("filesize requires a numeric value, not {!r}".format(size)) 31 | if size == 1: 32 | return "1 byte" 33 | elif size < base: 34 | return "{:,} bytes".format(size) 35 | 36 | # TODO (dargueta): Don't rely on unit or suffix being defined in the loop. 37 | for i, suffix in enumerate(suffixes, 2): # noqa: B007 38 | unit = base**i 39 | if size < unit: 40 | break 41 | return "{:,.1f} {}".format((base * size / unit), suffix) 42 | 43 | 44 | def traditional(size): 45 | # type: (SupportsInt) -> Text 46 | """Convert a filesize in to a string (powers of 1024, JDEC prefixes). 47 | 48 | In this convention, ``1024 B = 1 KB``. 49 | 50 | This is the format that was used to display the size of DVDs 51 | (*700 MB* meaning actually about *734 003 200 bytes*) before 52 | standardisation of IEC units among manufacturers, and still 53 | used by **Windows** to report the storage capacity of hard 54 | drives (*279.4 GB* meaning *279.4 × 1024³ bytes*). 55 | 56 | Arguments: 57 | size (int): A file size. 58 | 59 | Returns: 60 | `str`: A string containing an abbreviated file size and units. 61 | 62 | Example: 63 | >>> fs.filesize.traditional(30000) 64 | '29.3 KB' 65 | 66 | """ 67 | return _to_str(size, ("KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"), 1024) 68 | 69 | 70 | def binary(size): 71 | # type: (SupportsInt) -> Text 72 | """Convert a filesize in to a string (powers of 1024, IEC prefixes). 73 | 74 | In this convention, ``1024 B = 1 KiB``. 75 | 76 | This is the format that has gained adoption among manufacturers 77 | to avoid ambiguity regarding size units, since it explicitly states 78 | using a binary base (*KiB = kibi bytes = kilo binary bytes*). 79 | This format is notably being used by the **Linux** kernel (see 80 | ``man 7 units``). 81 | 82 | Arguments: 83 | int (size): A file size. 84 | 85 | Returns: 86 | `str`: A string containing a abbreviated file size and units. 87 | 88 | Example: 89 | >>> fs.filesize.binary(30000) 90 | '29.3 KiB' 91 | 92 | """ 93 | return _to_str(size, ("KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"), 1024) 94 | 95 | 96 | def decimal(size): 97 | # type: (SupportsInt) -> Text 98 | """Convert a filesize in to a string (powers of 1000, SI prefixes). 99 | 100 | In this convention, ``1000 B = 1 kB``. 101 | 102 | This is typically the format used to advertise the storage 103 | capacity of USB flash drives and the like (*256 MB* meaning 104 | actually a storage capacity of more than *256 000 000 B*), 105 | or used by **Mac OS X** since v10.6 to report file sizes. 106 | 107 | Arguments: 108 | int (size): A file size. 109 | 110 | Returns: 111 | `str`: A string containing a abbreviated file size and units. 112 | 113 | Example: 114 | >>> fs.filesize.decimal(30000) 115 | '30.0 kB' 116 | 117 | """ 118 | return _to_str(size, ("kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"), 1000) 119 | -------------------------------------------------------------------------------- /fs/lrucache.py: -------------------------------------------------------------------------------- 1 | """Least Recently Used cache mapping. 2 | """ 3 | 4 | from __future__ import absolute_import, unicode_literals 5 | 6 | import typing 7 | 8 | from collections import OrderedDict 9 | 10 | _K = typing.TypeVar("_K") 11 | _V = typing.TypeVar("_V") 12 | 13 | 14 | class LRUCache(OrderedDict, typing.Generic[_K, _V]): 15 | """A dictionary-like container that stores a given maximum items. 16 | 17 | If an additional item is added when the LRUCache is full, the least 18 | recently used key is discarded to make room for the new item. 19 | 20 | """ 21 | 22 | def __init__(self, cache_size): 23 | # type: (int) -> None 24 | """Create a new LRUCache with the given size.""" 25 | self.cache_size = cache_size 26 | super(LRUCache, self).__init__() 27 | 28 | def __setitem__(self, key, value): 29 | # type: (_K, _V) -> None 30 | """Store a new views, potentially discarding an old value.""" 31 | if key not in self: 32 | if len(self) >= self.cache_size: 33 | self.popitem(last=False) 34 | OrderedDict.__setitem__(self, key, value) 35 | 36 | def __getitem__(self, key): 37 | # type: (_K) -> _V 38 | """Get the item, but also makes it most recent.""" 39 | _super = typing.cast(OrderedDict, super(LRUCache, self)) 40 | value = _super.__getitem__(key) 41 | _super.__delitem__(key) 42 | _super.__setitem__(key, value) 43 | return value 44 | -------------------------------------------------------------------------------- /fs/mirror.py: -------------------------------------------------------------------------------- 1 | """Function for *mirroring* a filesystem. 2 | 3 | Mirroring will create a copy of a source filesystem on a destination 4 | filesystem. If there are no files on the destination, then mirroring 5 | is simply a straight copy. If there are any files or directories on the 6 | destination they may be deleted or modified to match the source. 7 | 8 | In order to avoid redundant copying of files, `mirror` can compare 9 | timestamps, and only copy files with a newer modified date. This 10 | timestamp comparison is only done if the file sizes are different. 11 | 12 | This scheme will work if you have mirrored a directory previously, and 13 | you would like to copy any changes. Otherwise you should set the 14 | ``copy_if_newer`` parameter to `False` to guarantee an exact copy, at 15 | the expense of potentially copying extra files. 16 | 17 | """ 18 | 19 | from __future__ import print_function, unicode_literals 20 | 21 | import typing 22 | 23 | from ._bulk import Copier 24 | from .copy import copy_file_internal 25 | from .errors import ResourceNotFound 26 | from .opener import manage_fs 27 | from .tools import is_thread_safe 28 | from .walk import Walker 29 | 30 | if typing.TYPE_CHECKING: 31 | from typing import Callable, Optional, Text, Union 32 | 33 | from .base import FS 34 | from .info import Info 35 | 36 | 37 | def _compare(info1, info2): 38 | # type: (Info, Info) -> bool 39 | """Compare two `Info` objects to see if they should be copied. 40 | 41 | Returns: 42 | bool: `True` if the `Info` are different in size or mtime. 43 | 44 | """ 45 | # Check filesize has changed 46 | if info1.size != info2.size: 47 | return True 48 | # Check modified dates 49 | date1 = info1.modified 50 | date2 = info2.modified 51 | return date1 is None or date2 is None or date1 > date2 52 | 53 | 54 | def mirror( 55 | src_fs, # type: Union[FS, Text] 56 | dst_fs, # type: Union[FS, Text] 57 | walker=None, # type: Optional[Walker] 58 | copy_if_newer=True, # type: bool 59 | workers=0, # type: int 60 | preserve_time=False, # type: bool 61 | ): 62 | # type: (...) -> None 63 | """Mirror files / directories from one filesystem to another. 64 | 65 | Mirroring a filesystem will create an exact copy of ``src_fs`` on 66 | ``dst_fs``, by removing any files / directories on the destination 67 | that aren't on the source, and copying files that aren't. 68 | 69 | Arguments: 70 | src_fs (FS or str): Source filesystem (URL or instance). 71 | dst_fs (FS or str): Destination filesystem (URL or instance). 72 | walker (~fs.walk.Walker, optional): An optional walker instance. 73 | copy_if_newer (bool): Only copy newer files (the default). 74 | workers (int): Number of worker threads used 75 | (0 for single threaded). Set to a relatively low number 76 | for network filesystems, 4 would be a good start. 77 | preserve_time (bool): If `True`, try to preserve mtime of the 78 | resources (defaults to `False`). 79 | 80 | """ 81 | 82 | def src(): 83 | return manage_fs(src_fs, writeable=False) 84 | 85 | def dst(): 86 | return manage_fs(dst_fs, create=True) 87 | 88 | with src() as _src_fs, dst() as _dst_fs: 89 | _thread_safe = is_thread_safe(_src_fs, _dst_fs) 90 | with Copier( 91 | num_workers=workers if _thread_safe else 0, preserve_time=preserve_time 92 | ) as copier: 93 | with _src_fs.lock(), _dst_fs.lock(): 94 | _mirror( 95 | _src_fs, 96 | _dst_fs, 97 | walker=walker, 98 | copy_if_newer=copy_if_newer, 99 | copy_file=copier.copy, 100 | preserve_time=preserve_time, 101 | ) 102 | 103 | 104 | def _mirror( 105 | src_fs, # type: FS 106 | dst_fs, # type: FS 107 | walker=None, # type: Optional[Walker] 108 | copy_if_newer=True, # type: bool 109 | copy_file=copy_file_internal, # type: Callable[[FS, str, FS, str, bool], None] 110 | preserve_time=False, # type: bool 111 | ): 112 | # type: (...) -> None 113 | walker = walker or Walker() 114 | walk = walker.walk(src_fs, namespaces=["details"]) 115 | for path, dirs, files in walk: 116 | try: 117 | dst = { 118 | info.name: info for info in dst_fs.scandir(path, namespaces=["details"]) 119 | } 120 | except ResourceNotFound: 121 | dst_fs.makedir(path) 122 | dst = {} 123 | 124 | # Copy files 125 | for _file in files: 126 | _path = _file.make_path(path) 127 | dst_file = dst.pop(_file.name, None) 128 | if dst_file is not None: 129 | if dst_file.is_dir: 130 | # Destination is a directory, remove it 131 | dst_fs.removetree(_path) 132 | else: 133 | # Compare file info 134 | if copy_if_newer and not _compare(_file, dst_file): 135 | continue 136 | copy_file(src_fs, _path, dst_fs, _path, preserve_time) 137 | 138 | # Make directories 139 | for _dir in dirs: 140 | _path = _dir.make_path(path) 141 | dst_dir = dst.pop(_dir.name, None) 142 | if dst_dir is not None: 143 | # Directory name exists on dst 144 | if not dst_dir.is_dir: 145 | # Not a directory, so remove it 146 | dst_fs.remove(_path) 147 | else: 148 | # Make the directory in dst 149 | dst_fs.makedir(_path, recreate=True) 150 | 151 | # Remove any remaining resources 152 | while dst: 153 | _, info = dst.popitem() 154 | _path = info.make_path(path) 155 | if info.is_dir: 156 | dst_fs.removetree(_path) 157 | else: 158 | dst_fs.remove(_path) 159 | -------------------------------------------------------------------------------- /fs/opener/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """Open filesystems from a URL. 3 | """ 4 | 5 | # Declare fs.opener as a namespace package 6 | __import__("pkg_resources").declare_namespace(__name__) # type: ignore 7 | 8 | # Import opener modules so that `registry.install` if called on each opener 9 | from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs 10 | 11 | # Import objects into fs.opener namespace 12 | from .base import Opener 13 | from .parse import parse_fs_url as parse 14 | from .registry import registry 15 | 16 | # Alias functions defined as Registry methods 17 | open_fs = registry.open_fs 18 | open = registry.open 19 | manage_fs = registry.manage_fs 20 | 21 | # __all__ with aliases and classes 22 | __all__ = ["registry", "Opener", "open_fs", "open", "manage_fs", "parse"] 23 | -------------------------------------------------------------------------------- /fs/opener/appfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """``AppFS`` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .errors import OpenerError 11 | from .registry import registry 12 | 13 | if typing.TYPE_CHECKING: 14 | from typing import Text, Union 15 | 16 | from ..appfs import _AppFS 17 | from ..subfs import SubFS 18 | from .parse import ParseResult 19 | 20 | 21 | @registry.install 22 | class AppFSOpener(Opener): 23 | """``AppFS`` opener.""" 24 | 25 | protocols = ["userdata", "userconf", "sitedata", "siteconf", "usercache", "userlog"] 26 | _protocol_mapping = None 27 | 28 | def open_fs( 29 | self, 30 | fs_url, # type: Text 31 | parse_result, # type: ParseResult 32 | writeable, # type: bool 33 | create, # type: bool 34 | cwd, # type: Text 35 | ): 36 | # type: (...) -> Union[_AppFS, SubFS[_AppFS]] 37 | 38 | from .. import appfs 39 | from ..subfs import ClosingSubFS 40 | 41 | if self._protocol_mapping is None: 42 | self._protocol_mapping = { 43 | "userdata": appfs.UserDataFS, 44 | "userconf": appfs.UserConfigFS, 45 | "sitedata": appfs.SiteDataFS, 46 | "siteconf": appfs.SiteConfigFS, 47 | "usercache": appfs.UserCacheFS, 48 | "userlog": appfs.UserLogFS, 49 | } 50 | 51 | fs_class = self._protocol_mapping[parse_result.protocol] 52 | resource, delim, path = parse_result.resource.partition("/") 53 | tokens = resource.split(":", 3) 54 | if len(tokens) == 2: 55 | appname, author = tokens 56 | version = None 57 | elif len(tokens) == 3: 58 | appname, author, version = tokens 59 | else: 60 | raise OpenerError( 61 | "resource should be : " 62 | "or ::" 63 | ) 64 | 65 | app_fs = fs_class(appname, author=author, version=version, create=create) 66 | 67 | if delim: 68 | if create: 69 | app_fs.makedir(path, recreate=True) 70 | return app_fs.opendir(path, factory=ClosingSubFS) 71 | 72 | return app_fs 73 | -------------------------------------------------------------------------------- /fs/opener/base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`Opener` abstract base class. 3 | """ 4 | 5 | import typing 6 | 7 | import abc 8 | import six 9 | 10 | if typing.TYPE_CHECKING: 11 | from typing import List, Text 12 | 13 | from ..base import FS 14 | from .parse import ParseResult 15 | 16 | 17 | @six.add_metaclass(abc.ABCMeta) 18 | class Opener(object): 19 | """The base class for filesystem openers. 20 | 21 | An opener is responsible for opening a filesystem for a given 22 | protocol. 23 | 24 | """ 25 | 26 | protocols = [] # type: List[Text] 27 | 28 | def __repr__(self): 29 | # type: () -> Text 30 | return "".format(self.protocols) 31 | 32 | @abc.abstractmethod 33 | def open_fs( 34 | self, 35 | fs_url, # type: Text 36 | parse_result, # type: ParseResult 37 | writeable, # type: bool 38 | create, # type: bool 39 | cwd, # type: Text 40 | ): 41 | # type: (...) -> FS 42 | """Open a filesystem object from a FS URL. 43 | 44 | Arguments: 45 | fs_url (str): A filesystem URL. 46 | parse_result (~fs.opener.parse.ParseResult): A parsed 47 | filesystem URL. 48 | writeable (bool): `True` if the filesystem must be writable. 49 | create (bool): `True` if the filesystem should be created 50 | if it does not exist. 51 | cwd (str): The current working directory (generally only 52 | relevant for OS filesystems). 53 | 54 | Raises: 55 | fs.opener.errors.OpenerError: If a filesystem could not 56 | be opened for any reason. 57 | 58 | Returns: 59 | `~fs.base.FS`: A filesystem instance. 60 | 61 | """ 62 | -------------------------------------------------------------------------------- /fs/opener/errors.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """Errors raised when attempting to open a filesystem. 3 | """ 4 | 5 | 6 | class ParseError(ValueError): 7 | """Attempt to parse an invalid FS URL.""" 8 | 9 | 10 | class OpenerError(Exception): 11 | """Base exception for opener related errors.""" 12 | 13 | 14 | class UnsupportedProtocol(OpenerError): 15 | """No opener found for the given protocol.""" 16 | 17 | 18 | class EntryPointError(OpenerError): 19 | """An entry point could not be loaded.""" 20 | 21 | 22 | class NotWriteable(OpenerError): 23 | """A writable FS could not be created.""" 24 | -------------------------------------------------------------------------------- /fs/opener/ftpfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`FTPFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from ..errors import CreateFailed 10 | from .base import Opener 11 | from .registry import registry 12 | 13 | if typing.TYPE_CHECKING: 14 | from typing import Text, Union 15 | 16 | from ..ftpfs import FTPFS # noqa: F401 17 | from ..subfs import SubFS 18 | from .parse import ParseResult 19 | 20 | 21 | @registry.install 22 | class FTPOpener(Opener): 23 | """`FTPFS` opener.""" 24 | 25 | protocols = ["ftp", "ftps"] 26 | 27 | @CreateFailed.catch_all 28 | def open_fs( 29 | self, 30 | fs_url, # type: Text 31 | parse_result, # type: ParseResult 32 | writeable, # type: bool 33 | create, # type: bool 34 | cwd, # type: Text 35 | ): 36 | # type: (...) -> Union[FTPFS, SubFS[FTPFS]] 37 | from ..ftpfs import FTPFS 38 | from ..subfs import ClosingSubFS 39 | 40 | ftp_host, _, dir_path = parse_result.resource.partition("/") 41 | ftp_host, _, ftp_port = ftp_host.partition(":") 42 | ftp_port = int(ftp_port) if ftp_port.isdigit() else 21 43 | ftp_fs = FTPFS( 44 | ftp_host, 45 | port=ftp_port, 46 | user=parse_result.username, 47 | passwd=parse_result.password, 48 | proxy=parse_result.params.get("proxy"), 49 | timeout=int(parse_result.params.get("timeout", "10")), 50 | tls=bool(parse_result.protocol == "ftps"), 51 | ) 52 | if dir_path: 53 | if create: 54 | ftp_fs.makedirs(dir_path, recreate=True) 55 | return ftp_fs.opendir(dir_path, factory=ClosingSubFS) 56 | else: 57 | return ftp_fs 58 | -------------------------------------------------------------------------------- /fs/opener/memoryfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`MemoryFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .registry import registry 11 | 12 | if typing.TYPE_CHECKING: 13 | from typing import Text 14 | 15 | from ..memoryfs import MemoryFS # noqa: F401 16 | from .parse import ParseResult 17 | 18 | 19 | @registry.install 20 | class MemOpener(Opener): 21 | """`MemoryFS` opener.""" 22 | 23 | protocols = ["mem"] 24 | 25 | def open_fs( 26 | self, 27 | fs_url, # type: Text 28 | parse_result, # type: ParseResult 29 | writeable, # type: bool 30 | create, # type: bool 31 | cwd, # type: Text 32 | ): 33 | # type: (...) -> MemoryFS 34 | from ..memoryfs import MemoryFS 35 | 36 | mem_fs = MemoryFS() 37 | return mem_fs 38 | -------------------------------------------------------------------------------- /fs/opener/osfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`OSFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .registry import registry 11 | 12 | if typing.TYPE_CHECKING: 13 | from typing import Text 14 | 15 | from ..osfs import OSFS # noqa: F401 16 | from .parse import ParseResult 17 | 18 | 19 | @registry.install 20 | class OSFSOpener(Opener): 21 | """`OSFS` opener.""" 22 | 23 | protocols = ["file", "osfs"] 24 | 25 | def open_fs( 26 | self, 27 | fs_url, # type: Text 28 | parse_result, # type: ParseResult 29 | writeable, # type: bool 30 | create, # type: bool 31 | cwd, # type: Text 32 | ): 33 | # type: (...) -> OSFS 34 | from os.path import abspath, expanduser, join, normpath 35 | 36 | from ..osfs import OSFS 37 | 38 | _path = abspath(join(cwd, expanduser(parse_result.resource))) 39 | path = normpath(_path) 40 | osfs = OSFS(path, create=create) 41 | return osfs 42 | -------------------------------------------------------------------------------- /fs/opener/parse.py: -------------------------------------------------------------------------------- 1 | """Function to parse FS URLs in to their constituent parts. 2 | """ 3 | 4 | from __future__ import absolute_import, print_function, unicode_literals 5 | 6 | import typing 7 | 8 | import collections 9 | import re 10 | import six 11 | from six.moves.urllib.parse import parse_qs, unquote 12 | 13 | from .errors import ParseError 14 | 15 | if typing.TYPE_CHECKING: 16 | from typing import Optional, Text 17 | 18 | 19 | class ParseResult( 20 | collections.namedtuple( 21 | "ParseResult", 22 | ["protocol", "username", "password", "resource", "params", "path"], 23 | ) 24 | ): 25 | """A named tuple containing fields of a parsed FS URL. 26 | 27 | Attributes: 28 | protocol (str): The protocol part of the url, e.g. ``osfs`` 29 | or ``ftp``. 30 | username (str, optional): A username, or `None`. 31 | password (str, optional): A password, or `None`. 32 | resource (str): A *resource*, typically a domain and path, e.g. 33 | ``ftp.example.org/dir``. 34 | params (dict): A dictionary of parameters extracted from the 35 | query string. 36 | path (str, optional): A path within the filesystem, or `None`. 37 | 38 | """ 39 | 40 | 41 | _RE_FS_URL = re.compile( 42 | r""" 43 | ^ 44 | (.*?) 45 | :\/\/ 46 | 47 | (?: 48 | (?:(.*?)@(.*?)) 49 | |(.*?) 50 | ) 51 | 52 | (?: 53 | !(.*?)$ 54 | )*$ 55 | """, 56 | re.VERBOSE, 57 | ) 58 | 59 | 60 | def parse_fs_url(fs_url): 61 | # type: (Text) -> ParseResult 62 | """Parse a Filesystem URL and return a `ParseResult`. 63 | 64 | Arguments: 65 | fs_url (str): A filesystem URL. 66 | 67 | Returns: 68 | ~fs.opener.parse.ParseResult: a parse result instance. 69 | 70 | Raises: 71 | ~fs.errors.ParseError: if the FS URL is not valid. 72 | 73 | """ 74 | match = _RE_FS_URL.match(fs_url) 75 | if match is None: 76 | raise ParseError("{!r} is not a fs2 url".format(fs_url)) 77 | 78 | fs_name, credentials, url1, url2, path = match.groups() 79 | if not credentials: 80 | username = None # type: Optional[Text] 81 | password = None # type: Optional[Text] 82 | url = url2 83 | else: 84 | username, _, password = credentials.partition(":") 85 | username = unquote(username) 86 | password = unquote(password) 87 | url = url1 88 | url, has_qs, qs = url.partition("?") 89 | resource = unquote(url) 90 | if has_qs: 91 | _params = parse_qs(qs, keep_blank_values=True) 92 | params = {k: unquote(v[0]) for k, v in six.iteritems(_params)} 93 | else: 94 | params = {} 95 | return ParseResult(fs_name, username, password, resource, params, path) 96 | -------------------------------------------------------------------------------- /fs/opener/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PyFilesystem/pyfilesystem2/77a8562785fc37cb2e30bdcd39c133097ba62dce/fs/opener/py.typed -------------------------------------------------------------------------------- /fs/opener/tarfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`TarFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .errors import NotWriteable 11 | from .registry import registry 12 | 13 | if typing.TYPE_CHECKING: 14 | from typing import Text 15 | 16 | from ..tarfs import TarFS # noqa: F401 17 | from .parse import ParseResult 18 | 19 | 20 | @registry.install 21 | class TarOpener(Opener): 22 | """`TarFS` opener.""" 23 | 24 | protocols = ["tar"] 25 | 26 | def open_fs( 27 | self, 28 | fs_url, # type: Text 29 | parse_result, # type: ParseResult 30 | writeable, # type: bool 31 | create, # type: bool 32 | cwd, # type: Text 33 | ): 34 | # type: (...) -> TarFS 35 | from ..tarfs import TarFS 36 | 37 | if not create and writeable: 38 | raise NotWriteable("Unable to open existing TAR file for writing") 39 | tar_fs = TarFS(parse_result.resource, write=create) 40 | return tar_fs 41 | -------------------------------------------------------------------------------- /fs/opener/tempfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`TempFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .registry import registry 11 | 12 | if typing.TYPE_CHECKING: 13 | from typing import Text 14 | 15 | from ..tempfs import TempFS # noqa: F401 16 | from .parse import ParseResult 17 | 18 | 19 | @registry.install 20 | class TempOpener(Opener): 21 | """`TempFS` opener.""" 22 | 23 | protocols = ["temp"] 24 | 25 | def open_fs( 26 | self, 27 | fs_url, # type: Text 28 | parse_result, # type: ParseResult 29 | writeable, # type: bool 30 | create, # type: bool 31 | cwd, # type: Text 32 | ): 33 | # type: (...) -> TempFS 34 | from ..tempfs import TempFS 35 | 36 | temp_fs = TempFS(identifier=parse_result.resource) 37 | return temp_fs 38 | -------------------------------------------------------------------------------- /fs/opener/zipfs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """`ZipFS` opener definition. 3 | """ 4 | 5 | from __future__ import absolute_import, print_function, unicode_literals 6 | 7 | import typing 8 | 9 | from .base import Opener 10 | from .errors import NotWriteable 11 | from .registry import registry 12 | 13 | if typing.TYPE_CHECKING: 14 | from typing import Text 15 | 16 | from ..zipfs import ZipFS # noqa: F401 17 | from .parse import ParseResult 18 | 19 | 20 | @registry.install 21 | class ZipOpener(Opener): 22 | """`ZipFS` opener.""" 23 | 24 | protocols = ["zip"] 25 | 26 | def open_fs( 27 | self, 28 | fs_url, # type: Text 29 | parse_result, # type: ParseResult 30 | writeable, # type: bool 31 | create, # type: bool 32 | cwd, # type: Text 33 | ): 34 | # type: (...) -> ZipFS 35 | from ..zipfs import ZipFS 36 | 37 | if not create and writeable: 38 | raise NotWriteable("Unable to open existing ZIP file for writing") 39 | zip_fs = ZipFS(parse_result.resource, write=create) 40 | return zip_fs 41 | -------------------------------------------------------------------------------- /fs/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PyFilesystem/pyfilesystem2/77a8562785fc37cb2e30bdcd39c133097ba62dce/fs/py.typed -------------------------------------------------------------------------------- /fs/subfs.py: -------------------------------------------------------------------------------- 1 | """Manage a directory in a *parent* filesystem. 2 | """ 3 | 4 | from __future__ import print_function, unicode_literals 5 | 6 | import typing 7 | 8 | import six 9 | 10 | from .path import abspath, join, normpath, relpath 11 | from .wrapfs import WrapFS 12 | 13 | if typing.TYPE_CHECKING: 14 | from typing import Text, Tuple 15 | 16 | from .base import FS # noqa: F401 17 | 18 | 19 | _F = typing.TypeVar("_F", bound="FS", covariant=True) 20 | 21 | 22 | @six.python_2_unicode_compatible 23 | class SubFS(WrapFS[_F], typing.Generic[_F]): 24 | """A sub-directory on a parent filesystem. 25 | 26 | A SubFS is a filesystem object that maps to a sub-directory of 27 | another filesystem. This is the object that is returned by 28 | `~fs.base.FS.opendir`. 29 | 30 | """ 31 | 32 | def __init__(self, parent_fs, path): # noqa: D107 33 | # type: (_F, Text) -> None 34 | super(SubFS, self).__init__(parent_fs) 35 | self._sub_dir = abspath(normpath(path)) 36 | 37 | def __repr__(self): 38 | # type: () -> Text 39 | return "{}({!r}, {!r})".format( 40 | self.__class__.__name__, self._wrap_fs, self._sub_dir 41 | ) 42 | 43 | def __str__(self): 44 | # type: () -> Text 45 | return "{parent}{dir}".format(parent=self._wrap_fs, dir=self._sub_dir) 46 | 47 | def delegate_fs(self): 48 | # type: () -> _F 49 | return self._wrap_fs 50 | 51 | def delegate_path(self, path): 52 | # type: (Text) -> Tuple[_F, Text] 53 | _path = join(self._sub_dir, relpath(normpath(path))) 54 | return self._wrap_fs, _path 55 | 56 | 57 | class ClosingSubFS(SubFS[_F], typing.Generic[_F]): 58 | """A version of `SubFS` which closes its parent when closed.""" 59 | 60 | def close(self): 61 | # type: () -> None 62 | self.delegate_fs().close() 63 | super(ClosingSubFS, self).close() 64 | -------------------------------------------------------------------------------- /fs/tempfs.py: -------------------------------------------------------------------------------- 1 | """Manage filesystems in temporary locations. 2 | 3 | A temporary filesytem is stored in a location defined by your OS 4 | (``/tmp`` on linux). The contents are deleted when the filesystem 5 | is closed. 6 | 7 | A `TempFS` is a good way of preparing a directory structure in advance, 8 | that you can later copy. It can also be used as a temporary data store. 9 | 10 | """ 11 | 12 | from __future__ import print_function, unicode_literals 13 | 14 | import typing 15 | 16 | import shutil 17 | import six 18 | import tempfile 19 | 20 | from . import errors 21 | from .osfs import OSFS 22 | 23 | if typing.TYPE_CHECKING: 24 | from typing import Optional, Text 25 | 26 | 27 | @six.python_2_unicode_compatible 28 | class TempFS(OSFS): 29 | """A temporary filesystem on the OS. 30 | 31 | Temporary filesystems are created using the `tempfile.mkdtemp` 32 | function to obtain a temporary folder in an OS-specific location. 33 | You can provide an alternative location with the ``temp_dir`` 34 | argument of the constructor. 35 | 36 | Examples: 37 | Create with the constructor:: 38 | 39 | >>> from fs.tempfs import TempFS 40 | >>> tmp_fs = TempFS() 41 | 42 | Or via an FS URL:: 43 | 44 | >>> import fs 45 | >>> tmp_fs = fs.open_fs("temp://") 46 | 47 | Use a specific identifier for the temporary folder to better 48 | illustrate its purpose:: 49 | 50 | >>> named_tmp_fs = fs.open_fs("temp://local_copy") 51 | >>> named_tmp_fs = TempFS(identifier="local_copy") 52 | 53 | """ 54 | 55 | def __init__( 56 | self, 57 | identifier="__tempfs__", # type: Text 58 | temp_dir=None, # type: Optional[Text] 59 | auto_clean=True, # type: bool 60 | ignore_clean_errors=True, # type: bool 61 | ): 62 | # type: (...) -> None 63 | """Create a new `TempFS` instance. 64 | 65 | Arguments: 66 | identifier (str): A string to distinguish the directory within 67 | the OS temp location, used as part of the directory name. 68 | temp_dir (str, optional): An OS path to your temp directory 69 | (leave as `None` to auto-detect). 70 | auto_clean (bool): If `True` (the default), the directory 71 | contents will be wiped on close. 72 | ignore_clean_errors (bool): If `True` (the default), any errors 73 | in the clean process will be suppressed. If `False`, they 74 | will be raised. 75 | 76 | """ 77 | self.identifier = identifier 78 | self._auto_clean = auto_clean 79 | self._ignore_clean_errors = ignore_clean_errors 80 | self._cleaned = False 81 | 82 | self.identifier = identifier.replace("/", "-") 83 | 84 | self._temp_dir = tempfile.mkdtemp(identifier or "fsTempFS", dir=temp_dir) 85 | super(TempFS, self).__init__(self._temp_dir) 86 | 87 | def __repr__(self): 88 | # type: () -> Text 89 | return "TempFS()" 90 | 91 | def __str__(self): 92 | # type: () -> Text 93 | return "".format(self._temp_dir) 94 | 95 | def close(self): 96 | # type: () -> None 97 | """Close the filesystem and release any resources. 98 | 99 | It is important to call this method when you have finished 100 | working with the filesystem. Some filesystems may not finalize 101 | changes until they are closed (archives for example). You may 102 | call this method explicitly (it is safe to call close multiple 103 | times), or you can use the filesystem as a context manager to 104 | automatically close. 105 | 106 | Hint: 107 | Depending on the value of ``auto_clean`` passed when creating 108 | the `TempFS`, the underlying temporary folder may be removed 109 | or not. 110 | 111 | Example: 112 | >>> tmp_fs = TempFS(auto_clean=False) 113 | >>> syspath = tmp_fs.getsyspath("/") 114 | >>> tmp_fs.close() 115 | >>> os.path.exists(syspath) 116 | True 117 | 118 | """ 119 | if self._auto_clean: 120 | self.clean() 121 | super(TempFS, self).close() 122 | 123 | def clean(self): 124 | # type: () -> None 125 | """Clean (delete) temporary files created by this filesystem.""" 126 | if self._cleaned: 127 | return 128 | 129 | try: 130 | shutil.rmtree(self._temp_dir) 131 | except Exception as error: 132 | if not self._ignore_clean_errors: 133 | raise errors.OperationFailed( 134 | msg="failed to remove temporary directory; {}".format(error), 135 | exc=error, 136 | ) 137 | self._cleaned = True 138 | -------------------------------------------------------------------------------- /fs/time.py: -------------------------------------------------------------------------------- 1 | """Time related tools. 2 | """ 3 | 4 | from __future__ import print_function, unicode_literals 5 | 6 | import typing 7 | 8 | from calendar import timegm 9 | from datetime import datetime 10 | 11 | try: 12 | from datetime import timezone 13 | except ImportError: 14 | from ._tzcompat import timezone # type: ignore 15 | 16 | if typing.TYPE_CHECKING: 17 | from typing import Optional 18 | 19 | 20 | def datetime_to_epoch(d): 21 | # type: (datetime) -> int 22 | """Convert datetime to epoch.""" 23 | return timegm(d.utctimetuple()) 24 | 25 | 26 | @typing.overload 27 | def epoch_to_datetime(t): # noqa: D103 28 | # type: (None) -> None 29 | pass 30 | 31 | 32 | @typing.overload 33 | def epoch_to_datetime(t): # noqa: D103 34 | # type: (int) -> datetime 35 | pass 36 | 37 | 38 | def epoch_to_datetime(t): 39 | # type: (Optional[int]) -> Optional[datetime] 40 | """Convert epoch time to a UTC datetime.""" 41 | if t is None: 42 | return None 43 | return datetime.fromtimestamp(t, tz=timezone.utc) 44 | -------------------------------------------------------------------------------- /fs/tools.py: -------------------------------------------------------------------------------- 1 | """Miscellaneous tools for operating on filesystems. 2 | """ 3 | 4 | from __future__ import print_function, unicode_literals 5 | 6 | import typing 7 | 8 | from . import errors 9 | from .errors import DirectoryNotEmpty, ResourceNotFound 10 | from .path import abspath, dirname, normpath, recursepath 11 | 12 | if typing.TYPE_CHECKING: 13 | from typing import IO, List, Optional, Text, Union 14 | 15 | from .base import FS 16 | 17 | 18 | def remove_empty(fs, path): 19 | # type: (FS, Text) -> None 20 | """Remove all empty parents. 21 | 22 | Arguments: 23 | fs (FS): A filesystem instance. 24 | path (str): Path to a directory on the filesystem. 25 | 26 | """ 27 | path = abspath(normpath(path)) 28 | try: 29 | while path not in ("", "/"): 30 | fs.removedir(path) 31 | path = dirname(path) 32 | except DirectoryNotEmpty: 33 | pass 34 | 35 | 36 | def copy_file_data(src_file, dst_file, chunk_size=None): 37 | # type: (IO, IO, Optional[int]) -> None 38 | """Copy data from one file object to another. 39 | 40 | Arguments: 41 | src_file (io.IOBase): File open for reading. 42 | dst_file (io.IOBase): File open for writing. 43 | chunk_size (int): Number of bytes to copy at 44 | a time (or `None` to use sensible default). 45 | 46 | """ 47 | _chunk_size = 1024 * 1024 if chunk_size is None else chunk_size 48 | read = src_file.read 49 | write = dst_file.write 50 | # The 'or None' is so that it works with binary and text files 51 | for chunk in iter( 52 | lambda: read(_chunk_size) or None, None 53 | ): # type: Optional[Union[bytes, str]] 54 | write(chunk) 55 | 56 | 57 | def get_intermediate_dirs(fs, dir_path): 58 | # type: (FS, Text) -> List[Text] 59 | """Get a list of non-existing intermediate directories. 60 | 61 | Arguments: 62 | fs (FS): A filesystem instance. 63 | dir_path (str): A path to a new directory on the filesystem. 64 | 65 | Returns: 66 | list: A list of non-existing paths. 67 | 68 | Raises: 69 | ~fs.errors.DirectoryExpected: If a path component 70 | references a file and not a directory. 71 | 72 | """ 73 | intermediates = [] 74 | with fs.lock(): 75 | for path in recursepath(abspath(dir_path), reverse=True): 76 | try: 77 | resource = fs.getinfo(path) 78 | except ResourceNotFound: 79 | intermediates.append(abspath(path)) 80 | else: 81 | if resource.is_dir: 82 | break 83 | raise errors.DirectoryExpected(dir_path) 84 | return intermediates[::-1][:-1] 85 | 86 | 87 | def is_thread_safe(*filesystems): 88 | # type: (FS) -> bool 89 | """Check if all filesystems are thread-safe. 90 | 91 | Arguments: 92 | filesystems (FS): Filesystems instances to check. 93 | 94 | Returns: 95 | bool: if all filesystems are thread safe. 96 | 97 | """ 98 | return all(fs.getmeta().get("thread_safe", False) for fs in filesystems) 99 | -------------------------------------------------------------------------------- /fs/wildcard.py: -------------------------------------------------------------------------------- 1 | """Match wildcard filenames. 2 | """ 3 | # Adapted from https://hg.python.org/cpython/file/2.7/Lib/fnmatch.py 4 | 5 | from __future__ import print_function, unicode_literals 6 | 7 | import typing 8 | 9 | import re 10 | from functools import partial 11 | 12 | from .lrucache import LRUCache 13 | 14 | if typing.TYPE_CHECKING: 15 | from typing import Callable, Iterable, Pattern, Text, Tuple 16 | 17 | 18 | _PATTERN_CACHE = LRUCache(1000) # type: LRUCache[Tuple[Text, bool], Pattern] 19 | 20 | 21 | def match(pattern, name): 22 | # type: (Text, Text) -> bool 23 | """Test whether a name matches a wildcard pattern. 24 | 25 | Arguments: 26 | pattern (str): A wildcard pattern, e.g. ``"*.py"``. 27 | name (str): A filename. 28 | 29 | Returns: 30 | bool: `True` if the filename matches the pattern. 31 | 32 | """ 33 | try: 34 | re_pat = _PATTERN_CACHE[(pattern, True)] 35 | except KeyError: 36 | res = "(?ms)" + _translate(pattern) + r"\Z" 37 | _PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res) 38 | return re_pat.match(name) is not None 39 | 40 | 41 | def imatch(pattern, name): 42 | # type: (Text, Text) -> bool 43 | """Test whether a name matches a wildcard pattern (case insensitive). 44 | 45 | Arguments: 46 | pattern (str): A wildcard pattern, e.g. ``"*.py"``. 47 | name (bool): A filename. 48 | 49 | Returns: 50 | bool: `True` if the filename matches the pattern. 51 | 52 | """ 53 | try: 54 | re_pat = _PATTERN_CACHE[(pattern, False)] 55 | except KeyError: 56 | res = "(?ms)" + _translate(pattern, case_sensitive=False) + r"\Z" 57 | _PATTERN_CACHE[(pattern, False)] = re_pat = re.compile(res, re.IGNORECASE) 58 | return re_pat.match(name) is not None 59 | 60 | 61 | def match_any(patterns, name): 62 | # type: (Iterable[Text], Text) -> bool 63 | """Test if a name matches any of a list of patterns. 64 | 65 | Will return `True` if ``patterns`` is an empty list. 66 | 67 | Arguments: 68 | patterns (list): A list of wildcard pattern, e.g ``["*.py", 69 | "*.pyc"]`` 70 | name (str): A filename. 71 | 72 | Returns: 73 | bool: `True` if the name matches at least one of the patterns. 74 | 75 | """ 76 | if not patterns: 77 | return True 78 | return any(match(pattern, name) for pattern in patterns) 79 | 80 | 81 | def imatch_any(patterns, name): 82 | # type: (Iterable[Text], Text) -> bool 83 | """Test if a name matches any of a list of patterns (case insensitive). 84 | 85 | Will return `True` if ``patterns`` is an empty list. 86 | 87 | Arguments: 88 | patterns (list): A list of wildcard pattern, e.g ``["*.py", 89 | "*.pyc"]`` 90 | name (str): A filename. 91 | 92 | Returns: 93 | bool: `True` if the name matches at least one of the patterns. 94 | 95 | """ 96 | if not patterns: 97 | return True 98 | return any(imatch(pattern, name) for pattern in patterns) 99 | 100 | 101 | def get_matcher(patterns, case_sensitive): 102 | # type: (Iterable[Text], bool) -> Callable[[Text], bool] 103 | """Get a callable that matches names against the given patterns. 104 | 105 | Arguments: 106 | patterns (list): A list of wildcard pattern. e.g. ``["*.py", 107 | "*.pyc"]`` 108 | case_sensitive (bool): If ``True``, then the callable will be case 109 | sensitive, otherwise it will be case insensitive. 110 | 111 | Returns: 112 | callable: a matcher that will return `True` if the name given as 113 | an argument matches any of the given patterns. 114 | 115 | Example: 116 | >>> from fs import wildcard 117 | >>> is_python = wildcard.get_matcher(['*.py'], True) 118 | >>> is_python('__init__.py') 119 | True 120 | >>> is_python('foo.txt') 121 | False 122 | 123 | """ 124 | if not patterns: 125 | return lambda name: True 126 | if case_sensitive: 127 | return partial(match_any, patterns) 128 | else: 129 | return partial(imatch_any, patterns) 130 | 131 | 132 | def _translate(pattern, case_sensitive=True): 133 | # type: (Text, bool) -> Text 134 | """Translate a wildcard pattern to a regular expression. 135 | 136 | There is no way to quote meta-characters. 137 | 138 | Arguments: 139 | pattern (str): A wildcard pattern. 140 | case_sensitive (bool): Set to `False` to use a case 141 | insensitive regex (default `True`). 142 | 143 | Returns: 144 | str: A regex equivalent to the given pattern. 145 | 146 | """ 147 | if not case_sensitive: 148 | pattern = pattern.lower() 149 | i, n = 0, len(pattern) 150 | res = [] 151 | while i < n: 152 | c = pattern[i] 153 | i = i + 1 154 | if c == "*": 155 | res.append("[^/]*") 156 | elif c == "?": 157 | res.append(".") 158 | elif c == "[": 159 | j = i 160 | if j < n and pattern[j] == "!": 161 | j = j + 1 162 | if j < n and pattern[j] == "]": 163 | j = j + 1 164 | while j < n and pattern[j] != "]": 165 | j = j + 1 166 | if j >= n: 167 | res.append("\\[") 168 | else: 169 | stuff = pattern[i:j].replace("\\", "\\\\") 170 | i = j + 1 171 | if stuff[0] == "!": 172 | stuff = "^" + stuff[1:] 173 | elif stuff[0] == "^": 174 | stuff = "\\" + stuff 175 | res.append("[%s]" % stuff) 176 | else: 177 | res.append(re.escape(c)) 178 | return "".join(res) 179 | -------------------------------------------------------------------------------- /requirements-readthedocs.txt: -------------------------------------------------------------------------------- 1 | # requirements for readthedocs.io 2 | -e . 3 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # --- Project configuration ------------------------------------------------- 2 | 3 | [metadata] 4 | version = attr: fs._version.__version__ 5 | name = fs 6 | author = Will McGugan 7 | author_email = will@willmcgugan.com 8 | maintainer = Martin Larralde 9 | maintainer_email = martin.larralde@embl.de 10 | url = https://github.com/PyFilesystem/pyfilesystem2 11 | license = MIT 12 | license_file = LICENSE 13 | description = Python's filesystem abstraction layer 14 | long_description = file: README.md 15 | long_description_content_type = text/markdown 16 | platform = any 17 | classifiers = 18 | Development Status :: 5 - Production/Stable 19 | Intended Audience :: Developers 20 | License :: OSI Approved :: MIT License 21 | Operating System :: OS Independent 22 | Programming Language :: Python 23 | Programming Language :: Python :: 2.7 24 | Programming Language :: Python :: 3.5 25 | Programming Language :: Python :: 3.6 26 | Programming Language :: Python :: 3.7 27 | Programming Language :: Python :: 3.8 28 | Programming Language :: Python :: 3.9 29 | Programming Language :: Python :: 3.10 30 | Programming Language :: Python :: Implementation :: CPython 31 | Programming Language :: Python :: Implementation :: PyPy 32 | Topic :: System :: Filesystems 33 | Typing :: Typed 34 | project_urls = 35 | Bug Reports = https://github.com/PyFilesystem/pyfilesystem2/issues 36 | Documentation = https://pyfilesystem2.readthedocs.io/en/latest/ 37 | Wiki = https://www.pyfilesystem.org/ 38 | 39 | [options] 40 | zip_safe = false 41 | packages = find: 42 | setup_requires = 43 | setuptools >=38.3.0 44 | install_requires = 45 | appdirs~=1.4.3 46 | setuptools 47 | six ~=1.10 48 | enum34 ~=1.1.6 ; python_version < '3.4' 49 | typing ~=3.6 ; python_version < '3.6' 50 | backports.os ~=0.1 ; python_version < '3.0' 51 | 52 | [options.extras_require] 53 | scandir = 54 | scandir~=1.5 ; python_version < '3.5' 55 | 56 | [options.packages.find] 57 | exclude = tests 58 | 59 | [options.package_data] 60 | fs = py.typed 61 | 62 | [bdist_wheel] 63 | universal = 1 64 | 65 | # --- Individual linter configuration --------------------------------------- 66 | 67 | [pydocstyle] 68 | inherit = false 69 | ignore = D102,D105,D200,D203,D213,D406,D407 70 | match-dir = (?!tests)(?!docs)[^\.].* 71 | match = (?!test)(?!setup)[^\._].*\.py 72 | 73 | [mypy] 74 | ignore_missing_imports = true 75 | 76 | [mypy-fs.*] 77 | disallow_any_decorated = false 78 | disallow_any_generics = false 79 | disallow_any_unimported = true 80 | disallow_subclassing_any = true 81 | disallow_untyped_calls = false 82 | disallow_untyped_defs = false 83 | ignore_missing_imports = false 84 | warn_unused_ignores = false 85 | warn_return_any = false 86 | 87 | [mypy-fs.test] 88 | disallow_untyped_defs = false 89 | 90 | [flake8] 91 | extend-ignore = E203,E402,W503 92 | max-line-length = 88 93 | per-file-ignores = 94 | fs/__init__.py:F401 95 | fs/*/__init__.py:F401 96 | tests/*:E501 97 | fs/opener/*:F811 98 | fs/_fscompat.py:F401 99 | fs/_pathcompat.py:C401 100 | 101 | [isort] 102 | default_section = THIRDPARTY 103 | known_first_party = fs 104 | known_standard_library = sys, typing 105 | line_length = 88 106 | profile = black 107 | skip_gitignore = true 108 | 109 | # --- Test and coverage configuration ------------------------------------------ 110 | 111 | [coverage:run] 112 | branch = true 113 | omit = fs/test.py 114 | source = fs 115 | relative_files = true 116 | parallel = true 117 | 118 | [coverage:report] 119 | show_missing = true 120 | skip_covered = true 121 | exclude_lines = 122 | pragma: no cover 123 | if False: 124 | it typing.TYPE_CHECKING: 125 | @typing.overload 126 | @overload 127 | 128 | [tool:pytest] 129 | markers = 130 | slow: marks tests as slow (deselect with '-m "not slow"') 131 | 132 | # --- Tox automation configuration --------------------------------------------- 133 | 134 | [tox:tox] 135 | envlist = py{27,34}{,-scandir}, py{35,36,37,38,39,310}, pypy{27,36,37}, typecheck, codestyle, docstyle, codeformat 136 | sitepackages = false 137 | skip_missing_interpreters = true 138 | requires = 139 | setuptools >=38.3.0 140 | 141 | [testenv] 142 | commands = python -m coverage run --rcfile {toxinidir}/setup.cfg -m pytest {posargs} {toxinidir}/tests 143 | deps = 144 | -rtests/requirements.txt 145 | coverage~=5.0 146 | py{35,36,37,38,39,310,py36,py37}: pytest~=6.0 147 | py{27,34,py27}: pytest~=4.6 148 | py{35,36,37,38,39,310,py36,py37}: pytest-randomly~=3.5 149 | py{27,34,py27}: pytest-randomly~=1.2 150 | scandir: .[scandir] 151 | !scandir: . 152 | 153 | [testenv:typecheck] 154 | commands = mypy --config-file {toxinidir}/setup.cfg {toxinidir}/fs 155 | deps = 156 | . 157 | mypy==0.800 158 | 159 | [testenv:codestyle] 160 | commands = flake8 --config={toxinidir}/setup.cfg {toxinidir}/fs {toxinidir}/tests 161 | deps = 162 | flake8==3.7.9 163 | #flake8-builtins==1.5.3 164 | flake8-bugbear==19.8.0 165 | flake8-comprehensions==3.1.4 166 | flake8-mutable==1.2.0 167 | flake8-tuple==0.4.0 168 | 169 | [testenv:codeformat] 170 | commands = black --check {toxinidir}/fs 171 | deps = 172 | black==22.3.0 173 | 174 | [testenv:docstyle] 175 | commands = pydocstyle --config={toxinidir}/setup.cfg {toxinidir}/fs 176 | deps = 177 | pydocstyle==5.1.1 178 | 179 | [gh-actions] 180 | python = 181 | 2.7: py27, py27-scandir 182 | 3.4: py34, py34-scandir 183 | 3.5: py35 184 | 3.6: py36 185 | 3.7: py37 186 | 3.8: py38 187 | 3.9: py39 188 | 3.10: py310 189 | pypy-2.7: pypy27 190 | pypy-3.6: pypy36 191 | pypy-3.7: pypy37 192 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | 5 | with open(os.path.join("fs", "_version.py")) as f: 6 | exec(f.read()) 7 | 8 | from setuptools import setup 9 | 10 | setup(version=__version__) 11 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PyFilesystem/pyfilesystem2/77a8562785fc37cb2e30bdcd39c133097ba62dce/tests/__init__.py -------------------------------------------------------------------------------- /tests/mark.py: -------------------------------------------------------------------------------- 1 | def slow(cls): 2 | return cls 3 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | # the bare requirements for running tests 2 | 3 | # pyftpdlib is needed to spawn a FTP server for the 4 | # FTPFS test suite 5 | pyftpdlib ~=1.5 6 | 7 | # these are optional dependencies for pyftpdlib that 8 | # are not explicitly listed, we need to install these 9 | # ourselves 10 | psutil ~=5.0 11 | pysendfile ~=2.0 ; python_version <= "3.3" 12 | 13 | # mock is only available from Python 3.3 onward, and 14 | # mock v4+ doesn't support Python 2.7 anymore 15 | mock ~=3.0 ; python_version < "3.3" 16 | 17 | # parametrized to prevent code duplication in tests. 18 | parameterized ~=0.8 -------------------------------------------------------------------------------- /tests/test_appfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import shutil 4 | import six 5 | import tempfile 6 | import unittest 7 | 8 | try: 9 | from unittest import mock 10 | except ImportError: 11 | import mock 12 | 13 | import fs.test 14 | from fs import appfs 15 | 16 | 17 | class _TestAppFS(fs.test.FSTestCases): 18 | 19 | AppFS = None 20 | 21 | @classmethod 22 | def setUpClass(cls): 23 | super(_TestAppFS, cls).setUpClass() 24 | cls.tmpdir = tempfile.mkdtemp() 25 | 26 | @classmethod 27 | def tearDownClass(cls): 28 | shutil.rmtree(cls.tmpdir) 29 | 30 | def make_fs(self): 31 | with mock.patch( 32 | "appdirs.{}".format(self.AppFS.app_dir), 33 | autospec=True, 34 | spec_set=True, 35 | return_value=tempfile.mkdtemp(dir=self.tmpdir), 36 | ): 37 | return self.AppFS("fstest", "willmcgugan", "1.0") 38 | 39 | if six.PY2: 40 | 41 | def test_repr(self): 42 | self.assertEqual( 43 | repr(self.fs), 44 | "{}(u'fstest', author=u'willmcgugan', version=u'1.0')".format( 45 | self.AppFS.__name__ 46 | ), 47 | ) 48 | 49 | else: 50 | 51 | def test_repr(self): 52 | self.assertEqual( 53 | repr(self.fs), 54 | "{}('fstest', author='willmcgugan', version='1.0')".format( 55 | self.AppFS.__name__ 56 | ), 57 | ) 58 | 59 | def test_str(self): 60 | self.assertEqual( 61 | str(self.fs), "<{} 'fstest'>".format(self.AppFS.__name__.lower()) 62 | ) 63 | 64 | 65 | class TestUserDataFS(_TestAppFS, unittest.TestCase): 66 | AppFS = appfs.UserDataFS 67 | 68 | 69 | class TestUserConfigFS(_TestAppFS, unittest.TestCase): 70 | AppFS = appfs.UserConfigFS 71 | 72 | 73 | class TestUserCacheFS(_TestAppFS, unittest.TestCase): 74 | AppFS = appfs.UserCacheFS 75 | 76 | 77 | class TestSiteDataFS(_TestAppFS, unittest.TestCase): 78 | AppFS = appfs.SiteDataFS 79 | 80 | 81 | class TestSiteConfigFS(_TestAppFS, unittest.TestCase): 82 | AppFS = appfs.SiteConfigFS 83 | 84 | 85 | class TestUserLogFS(_TestAppFS, unittest.TestCase): 86 | AppFS = appfs.UserLogFS 87 | -------------------------------------------------------------------------------- /tests/test_archives.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: UTF-8 2 | from __future__ import unicode_literals 3 | 4 | import os 5 | import stat 6 | from six import text_type 7 | 8 | from fs import errors, walk 9 | from fs.enums import ResourceType 10 | from fs.opener import open_fs 11 | from fs.test import UNICODE_TEXT 12 | 13 | 14 | class ArchiveTestCases(object): 15 | def make_source_fs(self): 16 | return open_fs("temp://") 17 | 18 | def build_source(self, fs): 19 | fs.makedirs("foo/bar/baz") 20 | fs.makedir("tmp") 21 | fs.writetext("Файл", "unicode filename") 22 | fs.writetext("top.txt", "Hello, World") 23 | fs.writetext("top2.txt", "Hello, World") 24 | fs.writetext("foo/bar/egg", "foofoo") 25 | fs.makedir("unicode") 26 | fs.writetext("unicode/text.txt", UNICODE_TEXT) 27 | 28 | def compress(self, fs): 29 | pass 30 | 31 | def load_archive(self): 32 | pass 33 | 34 | def remove_archive(self): 35 | pass 36 | 37 | def setUp(self): 38 | self.source_fs = source_fs = self.make_source_fs() 39 | self.build_source(source_fs) 40 | self.compress(source_fs) 41 | self.fs = self.load_archive() 42 | 43 | def tearDown(self): 44 | self.source_fs.close() 45 | self.fs.close() 46 | self.remove_archive() 47 | 48 | def test_repr(self): 49 | repr(self.fs) 50 | 51 | def test_str(self): 52 | self.assertIsInstance(text_type(self.fs), text_type) 53 | 54 | def test_readonly(self): 55 | with self.assertRaises(errors.ResourceReadOnly): 56 | self.fs.makedir("newdir") 57 | with self.assertRaises(errors.ResourceReadOnly): 58 | self.fs.remove("top.txt") 59 | with self.assertRaises(errors.ResourceReadOnly): 60 | self.fs.removedir("foo/bar/baz") 61 | with self.assertRaises(errors.ResourceReadOnly): 62 | self.fs.create("foo.txt") 63 | with self.assertRaises(errors.ResourceReadOnly): 64 | self.fs.setinfo("foo.txt", {}) 65 | 66 | def test_getinfo(self): 67 | root = self.fs.getinfo("/", ["details"]) 68 | self.assertEqual(root.name, "") 69 | self.assertTrue(root.is_dir) 70 | self.assertEqual(root.get("details", "type"), ResourceType.directory) 71 | 72 | bar = self.fs.getinfo("foo/bar", ["details"]) 73 | self.assertEqual(bar.name, "bar") 74 | self.assertTrue(bar.is_dir) 75 | self.assertEqual(bar.get("details", "type"), ResourceType.directory) 76 | 77 | top = self.fs.getinfo("top.txt", ["details", "access"]) 78 | self.assertEqual(top.size, 12) 79 | self.assertFalse(top.is_dir) 80 | 81 | try: 82 | source_syspath = self.source_fs.getsyspath("/top.txt") 83 | except errors.NoSysPath: 84 | pass 85 | else: 86 | if top.has_namespace("access"): 87 | self.assertEqual( 88 | top.permissions.mode, stat.S_IMODE(os.stat(source_syspath).st_mode) 89 | ) 90 | 91 | self.assertEqual(top.get("details", "type"), ResourceType.file) 92 | 93 | def test_listdir(self): 94 | self.assertEqual( 95 | sorted(self.source_fs.listdir("/")), sorted(self.fs.listdir("/")) 96 | ) 97 | for name in self.fs.listdir("/"): 98 | self.assertIsInstance(name, text_type) 99 | with self.assertRaises(errors.DirectoryExpected): 100 | self.fs.listdir("top.txt") 101 | with self.assertRaises(errors.ResourceNotFound): 102 | self.fs.listdir("nothere") 103 | 104 | def test_open(self): 105 | with self.fs.open("top.txt") as f: 106 | chars = [] 107 | while True: 108 | c = f.read(2) 109 | if not c: 110 | break 111 | chars.append(c) 112 | self.assertEqual("".join(chars), "Hello, World") 113 | with self.assertRaises(errors.ResourceNotFound): 114 | with self.fs.open("nothere.txt") as f: 115 | pass 116 | with self.assertRaises(errors.FileExpected): 117 | with self.fs.open("foo") as f: 118 | pass 119 | 120 | def test_gets(self): 121 | self.assertEqual(self.fs.readtext("top.txt"), "Hello, World") 122 | self.assertEqual(self.fs.readtext("foo/bar/egg"), "foofoo") 123 | self.assertEqual(self.fs.readbytes("top.txt"), b"Hello, World") 124 | self.assertEqual(self.fs.readbytes("foo/bar/egg"), b"foofoo") 125 | with self.assertRaises(errors.ResourceNotFound): 126 | self.fs.readbytes("what.txt") 127 | 128 | def test_walk_files(self): 129 | source_files = sorted(walk.walk_files(self.source_fs)) 130 | archive_files = sorted(walk.walk_files(self.fs)) 131 | 132 | self.assertEqual(source_files, archive_files) 133 | 134 | def test_implied_dir(self): 135 | self.fs.getinfo("foo/bar") 136 | self.fs.getinfo("foo") 137 | -------------------------------------------------------------------------------- /tests/test_base.py: -------------------------------------------------------------------------------- 1 | """Test (abstract) base FS class.""" 2 | 3 | from __future__ import unicode_literals 4 | 5 | import unittest 6 | 7 | from fs import errors 8 | from fs.base import FS 9 | 10 | 11 | class DummyFS(FS): 12 | def getinfo(self, path, namespaces=None): 13 | pass 14 | 15 | def listdir(self, path): 16 | pass 17 | 18 | def makedir(self, path, permissions=None, recreate=False): 19 | pass 20 | 21 | def openbin(self, path, mode="r", buffering=-1, **options): 22 | pass 23 | 24 | def remove(self, path): 25 | pass 26 | 27 | def removedir(self, path): 28 | pass 29 | 30 | def setinfo(self, path, info): 31 | pass 32 | 33 | 34 | class TestBase(unittest.TestCase): 35 | def setUp(self): 36 | self.fs = DummyFS() 37 | 38 | def test_validatepath(self): 39 | """Test validatepath method.""" 40 | with self.assertRaises(TypeError): 41 | self.fs.validatepath(b"bytes") 42 | 43 | self.fs._meta["invalid_path_chars"] = "Z" 44 | with self.assertRaises(errors.InvalidCharsInPath): 45 | self.fs.validatepath("Time for some ZZZs") 46 | 47 | self.fs.validatepath("fine") 48 | self.fs.validatepath("good.fine") 49 | 50 | self.fs._meta["invalid_path_chars"] = "" 51 | self.fs.validatepath("Time for some ZZZs") 52 | 53 | def mock_getsyspath(path): 54 | return path 55 | 56 | self.fs.getsyspath = mock_getsyspath 57 | 58 | self.fs._meta["max_sys_path_length"] = 10 59 | 60 | self.fs.validatepath("0123456789") 61 | self.fs.validatepath("012345678") 62 | self.fs.validatepath("01234567") 63 | 64 | with self.assertRaises(errors.InvalidPath): 65 | self.fs.validatepath("0123456789A") 66 | -------------------------------------------------------------------------------- /tests/test_bulk.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs._bulk import Copier, _Task 6 | from fs.errors import BulkCopyFailed 7 | 8 | 9 | class BrokenTask(_Task): 10 | def __call__(self): 11 | 1 / 0 12 | 13 | 14 | class TestBulk(unittest.TestCase): 15 | def test_worker_error(self): 16 | with self.assertRaises(BulkCopyFailed): 17 | with Copier(num_workers=2) as copier: 18 | copier.queue.put(BrokenTask()) 19 | -------------------------------------------------------------------------------- /tests/test_encoding.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import os 4 | import platform 5 | import shutil 6 | import six 7 | import tempfile 8 | import unittest 9 | 10 | import fs 11 | from fs.osfs import OSFS 12 | 13 | if platform.system() != "Windows": 14 | 15 | @unittest.skipIf(platform.system() == "Darwin", "Bad unicode not possible on OSX") 16 | class TestEncoding(unittest.TestCase): 17 | 18 | TEST_FILENAME = b"foo\xb1bar" 19 | # fsdecode throws error on Windows 20 | TEST_FILENAME_UNICODE = fs.fsdecode(TEST_FILENAME) 21 | 22 | def setUp(self): 23 | dir_path = self.dir_path = tempfile.mkdtemp() 24 | if six.PY2: 25 | with open(os.path.join(dir_path, self.TEST_FILENAME), "wb") as f: 26 | f.write(b"baz") 27 | else: 28 | with open( 29 | os.path.join(dir_path, self.TEST_FILENAME_UNICODE), "wb" 30 | ) as f: 31 | f.write(b"baz") 32 | 33 | def tearDown(self): 34 | shutil.rmtree(self.dir_path) 35 | 36 | def test_open(self): 37 | with OSFS(self.dir_path) as test_fs: 38 | self.assertTrue(test_fs.exists(self.TEST_FILENAME_UNICODE)) 39 | self.assertTrue(test_fs.isfile(self.TEST_FILENAME_UNICODE)) 40 | self.assertFalse(test_fs.isdir(self.TEST_FILENAME_UNICODE)) 41 | with test_fs.open(self.TEST_FILENAME_UNICODE, "rb") as f: 42 | self.assertEqual(f.read(), b"baz") 43 | self.assertEqual(test_fs.readtext(self.TEST_FILENAME_UNICODE), "baz") 44 | test_fs.remove(self.TEST_FILENAME_UNICODE) 45 | self.assertFalse(test_fs.exists(self.TEST_FILENAME_UNICODE)) 46 | 47 | def test_listdir(self): 48 | with OSFS(self.dir_path) as test_fs: 49 | dirlist = test_fs.listdir("/") 50 | self.assertEqual(dirlist, [self.TEST_FILENAME_UNICODE]) 51 | self.assertEqual(test_fs.readtext(dirlist[0]), "baz") 52 | 53 | def test_scandir(self): 54 | with OSFS(self.dir_path) as test_fs: 55 | for info in test_fs.scandir("/"): 56 | self.assertIsInstance(info.name, six.text_type) 57 | self.assertEqual(info.name, self.TEST_FILENAME_UNICODE) 58 | -------------------------------------------------------------------------------- /tests/test_enums.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | from fs import enums 5 | 6 | 7 | class TestEnums(unittest.TestCase): 8 | def test_enums(self): 9 | self.assertEqual(enums.Seek.current, os.SEEK_CUR) 10 | self.assertEqual(enums.Seek.end, os.SEEK_END) 11 | self.assertEqual(enums.Seek.set, os.SEEK_SET) 12 | self.assertEqual(enums.ResourceType.unknown, 0) 13 | -------------------------------------------------------------------------------- /tests/test_error_tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import errno 4 | import unittest 5 | 6 | import fs.errors 7 | from fs.error_tools import convert_os_errors 8 | 9 | 10 | class TestErrorTools(unittest.TestCase): 11 | def test_convert_enoent(self): 12 | exception = OSError(errno.ENOENT, "resource not found") 13 | with self.assertRaises(fs.errors.ResourceNotFound) as ctx: 14 | with convert_os_errors("stat", "/tmp/test"): 15 | raise exception 16 | self.assertEqual(ctx.exception.exc, exception) 17 | self.assertEqual(ctx.exception.path, "/tmp/test") 18 | 19 | def test_convert_enametoolong(self): 20 | exception = OSError(errno.ENAMETOOLONG, "File name too long: test") 21 | with self.assertRaises(fs.errors.PathError) as ctx: 22 | with convert_os_errors("stat", "/tmp/test"): 23 | raise exception 24 | self.assertEqual(ctx.exception.exc, exception) 25 | self.assertEqual(ctx.exception.path, "/tmp/test") 26 | -------------------------------------------------------------------------------- /tests/test_errors.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import multiprocessing 4 | import unittest 5 | from six import text_type 6 | 7 | from fs import errors 8 | from fs.errors import CreateFailed 9 | 10 | 11 | class TestErrors(unittest.TestCase): 12 | def test_str(self): 13 | err = errors.FSError("oh dear") 14 | repr(err) 15 | self.assertEqual(text_type(err), "oh dear") 16 | 17 | def test_unsupported(self): 18 | err = errors.Unsupported("stuff") 19 | repr(err) 20 | self.assertEqual(text_type(err), "not supported") 21 | 22 | def test_raise_in_multiprocessing(self): 23 | # Without the __reduce__ methods in FSError subclasses, this test will hang forever. 24 | tests = [ 25 | [errors.ResourceNotFound, "some_path"], 26 | [errors.FilesystemClosed], 27 | [errors.CreateFailed], 28 | [errors.NoSysPath, "some_path"], 29 | [errors.NoURL, "some_path", "some_purpose"], 30 | [errors.Unsupported], 31 | [errors.IllegalBackReference, "path"], 32 | [errors.MissingInfoNamespace, "path"], 33 | ] 34 | try: 35 | pool = multiprocessing.Pool(1) 36 | for args in tests: 37 | exc = args[0](*args[1:]) 38 | exc.__reduce__() 39 | with self.assertRaises(args[0]): 40 | pool.apply(_multiprocessing_test_task, args) 41 | finally: 42 | pool.close() 43 | 44 | 45 | def _multiprocessing_test_task(err, *args): 46 | raise err(*args) 47 | 48 | 49 | class TestCreateFailed(unittest.TestCase): 50 | def test_catch_all(self): 51 | 52 | errors = (ZeroDivisionError, ValueError, CreateFailed) 53 | 54 | @CreateFailed.catch_all 55 | def test(x): 56 | raise errors[x] 57 | 58 | for index, _exc in enumerate(errors): 59 | try: 60 | test(index) 61 | except Exception as e: 62 | self.assertIsInstance(e, CreateFailed) 63 | if e.exc is not None: 64 | self.assertNotIsInstance(e.exc, CreateFailed) 65 | -------------------------------------------------------------------------------- /tests/test_filesize.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs import filesize 6 | 7 | 8 | class TestFilesize(unittest.TestCase): 9 | def test_traditional(self): 10 | 11 | self.assertEqual(filesize.traditional(0), "0 bytes") 12 | self.assertEqual(filesize.traditional(1), "1 byte") 13 | self.assertEqual(filesize.traditional(2), "2 bytes") 14 | self.assertEqual(filesize.traditional(1024), "1.0 KB") 15 | 16 | self.assertEqual(filesize.traditional(1024 * 1024), "1.0 MB") 17 | 18 | self.assertEqual(filesize.traditional(1024 * 1024 + 1), "1.0 MB") 19 | 20 | self.assertEqual(filesize.traditional(1.5 * 1024 * 1024), "1.5 MB") 21 | 22 | def test_binary(self): 23 | 24 | self.assertEqual(filesize.binary(0), "0 bytes") 25 | self.assertEqual(filesize.binary(1), "1 byte") 26 | self.assertEqual(filesize.binary(2), "2 bytes") 27 | self.assertEqual(filesize.binary(1024), "1.0 KiB") 28 | 29 | self.assertEqual(filesize.binary(1024 * 1024), "1.0 MiB") 30 | 31 | self.assertEqual(filesize.binary(1024 * 1024 + 1), "1.0 MiB") 32 | 33 | self.assertEqual(filesize.binary(1.5 * 1024 * 1024), "1.5 MiB") 34 | 35 | def test_decimal(self): 36 | 37 | self.assertEqual(filesize.decimal(0), "0 bytes") 38 | self.assertEqual(filesize.decimal(1), "1 byte") 39 | self.assertEqual(filesize.decimal(2), "2 bytes") 40 | self.assertEqual(filesize.decimal(1000), "1.0 kB") 41 | 42 | self.assertEqual(filesize.decimal(1000 * 1000), "1.0 MB") 43 | 44 | self.assertEqual(filesize.decimal(1000 * 1000 + 1), "1.0 MB") 45 | 46 | self.assertEqual(filesize.decimal(1200 * 1000), "1.2 MB") 47 | 48 | def test_errors(self): 49 | 50 | with self.assertRaises(TypeError): 51 | filesize.traditional("foo") 52 | -------------------------------------------------------------------------------- /tests/test_fscompat.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import six 4 | import unittest 5 | 6 | from fs._fscompat import fsdecode, fsencode, fspath 7 | 8 | 9 | class PathMock(object): 10 | def __init__(self, path): 11 | self._path = path 12 | 13 | def __fspath__(self): 14 | return self._path 15 | 16 | 17 | class BrokenPathMock(object): 18 | def __init__(self, path): 19 | self._path = path 20 | 21 | def __fspath__(self): 22 | return self.broken 23 | 24 | 25 | class TestFSCompact(unittest.TestCase): 26 | def test_fspath(self): 27 | path = PathMock("foo") 28 | self.assertEqual(fspath(path), "foo") 29 | path = PathMock(b"foo") 30 | self.assertEqual(fspath(path), b"foo") 31 | path = "foo" 32 | assert path is fspath(path) 33 | 34 | with self.assertRaises(TypeError): 35 | fspath(100) 36 | 37 | with self.assertRaises(TypeError): 38 | fspath(PathMock(5)) 39 | 40 | with self.assertRaises(AttributeError): 41 | fspath(BrokenPathMock("foo")) 42 | 43 | def test_fsencode(self): 44 | encode_bytes = fsencode(b"foo") 45 | assert isinstance(encode_bytes, bytes) 46 | self.assertEqual(encode_bytes, b"foo") 47 | 48 | encode_bytes = fsencode("foo") 49 | assert isinstance(encode_bytes, bytes) 50 | self.assertEqual(encode_bytes, b"foo") 51 | 52 | with self.assertRaises(TypeError): 53 | fsencode(5) 54 | 55 | def test_fsdecode(self): 56 | decode_text = fsdecode(b"foo") 57 | assert isinstance(decode_text, six.text_type) 58 | decode_text = fsdecode("foo") 59 | assert isinstance(decode_text, six.text_type) 60 | with self.assertRaises(TypeError): 61 | fsdecode(5) 62 | -------------------------------------------------------------------------------- /tests/test_imports.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import unittest 4 | 5 | 6 | class TestImports(unittest.TestCase): 7 | def test_import_path(self): 8 | """Test import fs also imports other symbols.""" 9 | restore_fs = sys.modules.pop("fs") 10 | sys.modules.pop("fs.path") 11 | try: 12 | import fs 13 | 14 | fs.path 15 | fs.Seek 16 | fs.ResourceType 17 | fs.open_fs 18 | finally: 19 | sys.modules["fs"] = restore_fs 20 | -------------------------------------------------------------------------------- /tests/test_info.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | from datetime import datetime 5 | 6 | from fs.enums import ResourceType 7 | from fs.info import Info 8 | from fs.permissions import Permissions 9 | from fs.time import datetime_to_epoch 10 | 11 | try: 12 | from datetime import timezone 13 | except ImportError: 14 | from fs._tzcompat import timezone # type: ignore 15 | 16 | 17 | class TestInfo(unittest.TestCase): 18 | def test_empty(self): 19 | """Test missing info.""" 20 | info = Info({"basic": {}, "details": {}, "access": {}, "link": {}}) 21 | 22 | self.assertIsNone(info.name) 23 | self.assertIsNone(info.is_dir) 24 | self.assertEqual(info.type, ResourceType.unknown) 25 | self.assertIsNone(info.accessed) 26 | self.assertIsNone(info.modified) 27 | self.assertIsNone(info.created) 28 | self.assertIsNone(info.metadata_changed) 29 | self.assertIsNone(info.accessed) 30 | self.assertIsNone(info.permissions) 31 | self.assertIsNone(info.user) 32 | self.assertIsNone(info.group) 33 | self.assertIsNone(info.target) 34 | self.assertFalse(info.is_link) 35 | 36 | def test_access(self): 37 | info = Info( 38 | { 39 | "access": { 40 | "uid": 10, 41 | "gid": 12, 42 | "user": "will", 43 | "group": "devs", 44 | "permissions": ["u_r"], 45 | } 46 | } 47 | ) 48 | self.assertIsInstance(info.permissions, Permissions) 49 | self.assertEqual(info.permissions, Permissions(user="r")) 50 | self.assertEqual(info.user, "will") 51 | self.assertEqual(info.group, "devs") 52 | self.assertEqual(info.uid, 10) 53 | self.assertEqual(info.gid, 12) 54 | 55 | def test_link(self): 56 | info = Info({"link": {"target": "foo"}}) 57 | self.assertTrue(info.is_link) 58 | self.assertEqual(info.target, "foo") 59 | 60 | def test_basic(self): 61 | # Check simple file 62 | info = Info({"basic": {"name": "bar.py", "is_dir": False}}) 63 | self.assertEqual(info.name, "bar.py") 64 | self.assertIsInstance(info.is_dir, bool) 65 | self.assertFalse(info.is_dir) 66 | self.assertEqual(repr(info), "") 67 | self.assertEqual(info.suffix, ".py") 68 | 69 | # Check dir 70 | info = Info({"basic": {"name": "foo", "is_dir": True}}) 71 | self.assertTrue(info.is_dir) 72 | self.assertEqual(repr(info), "") 73 | self.assertEqual(info.suffix, "") 74 | 75 | def test_details(self): 76 | dates = [ 77 | datetime(2016, 7, 5, tzinfo=timezone.utc), 78 | datetime(2016, 7, 6, tzinfo=timezone.utc), 79 | datetime(2016, 7, 7, tzinfo=timezone.utc), 80 | datetime(2016, 7, 8, tzinfo=timezone.utc), 81 | ] 82 | epochs = [datetime_to_epoch(d) for d in dates] 83 | 84 | info = Info( 85 | { 86 | "details": { 87 | "accessed": epochs[0], 88 | "modified": epochs[1], 89 | "created": epochs[2], 90 | "metadata_changed": epochs[3], 91 | "type": int(ResourceType.file), 92 | } 93 | } 94 | ) 95 | self.assertEqual(info.accessed, dates[0]) 96 | self.assertEqual(info.modified, dates[1]) 97 | self.assertEqual(info.created, dates[2]) 98 | self.assertEqual(info.metadata_changed, dates[3]) 99 | self.assertIsInstance(info.type, ResourceType) 100 | self.assertEqual(info.type, ResourceType.file) 101 | self.assertEqual(info.type, 2) 102 | 103 | def test_has_namespace(self): 104 | info = Info({"basic": {}, "details": {}}) 105 | self.assertTrue(info.has_namespace("basic")) 106 | self.assertTrue(info.has_namespace("details")) 107 | self.assertFalse(info.has_namespace("access")) 108 | 109 | def test_copy(self): 110 | info = Info({"basic": {"name": "bar", "is_dir": False}}) 111 | info_copy = info.copy() 112 | self.assertEqual(info.raw, info_copy.raw) 113 | 114 | def test_get(self): 115 | info = Info({"baz": {}}) 116 | self.assertIsNone(info.get("foo", "bar")) 117 | self.assertIsNone(info.get("baz", "bar")) 118 | 119 | def test_suffix(self): 120 | info = Info({"basic": {"name": "foo.tar.gz"}}) 121 | self.assertEqual(info.suffix, ".gz") 122 | self.assertEqual(info.suffixes, [".tar", ".gz"]) 123 | self.assertEqual(info.stem, "foo") 124 | info = Info({"basic": {"name": "foo"}}) 125 | self.assertEqual(info.suffix, "") 126 | self.assertEqual(info.suffixes, []) 127 | self.assertEqual(info.stem, "foo") 128 | 129 | info = Info({"basic": {"name": ".foo"}}) 130 | self.assertEqual(info.suffix, "") 131 | self.assertEqual(info.suffixes, []) 132 | self.assertEqual(info.stem, ".foo") 133 | -------------------------------------------------------------------------------- /tests/test_iotools.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import io 4 | import six 5 | import unittest 6 | 7 | from fs import iotools, tempfs 8 | from fs.test import UNICODE_TEXT 9 | 10 | 11 | class TestIOTools(unittest.TestCase): 12 | def setUp(self): 13 | self.fs = tempfs.TempFS("iotoolstest") 14 | 15 | def tearDown(self): 16 | self.fs.close() 17 | del self.fs 18 | 19 | def test_make_stream(self): 20 | """Test make_stream""" 21 | 22 | self.fs.writebytes("foo.bin", b"foofoo") 23 | 24 | with self.fs.openbin("foo.bin") as f: 25 | data = f.read() 26 | self.assertTrue(isinstance(data, bytes)) 27 | 28 | with self.fs.openbin("text.txt", "wb") as f: 29 | f.write(UNICODE_TEXT.encode("utf-8")) 30 | 31 | with self.fs.openbin("text.txt") as f: 32 | with iotools.make_stream("text.txt", f, "rt") as f2: 33 | repr(f2) 34 | text = f2.read() 35 | self.assertIsInstance(text, six.text_type) 36 | 37 | def test_readinto(self): 38 | 39 | self.fs.writebytes("bytes.bin", b"foofoobarbar") 40 | 41 | with self.fs.openbin("bytes.bin") as bin_file: 42 | with iotools.make_stream("bytes.bin", bin_file, "rb") as f: 43 | data = bytearray(3) 44 | bytes_read = f.readinto(data) 45 | self.assertEqual(bytes_read, 3) 46 | self.assertEqual(bytes(data), b"foo") 47 | self.assertEqual(f.readline(1), b"f") 48 | 49 | def no_readinto(size): 50 | raise AttributeError 51 | 52 | with self.fs.openbin("bytes.bin") as bin_file: 53 | bin_file.readinto = no_readinto 54 | with iotools.make_stream("bytes.bin", bin_file, "rb") as f: 55 | data = bytearray(3) 56 | bytes_read = f.readinto(data) 57 | self.assertEqual(bytes_read, 3) 58 | self.assertEqual(bytes(data), b"foo") 59 | self.assertEqual(f.readline(1), b"f") 60 | 61 | def test_readinto1(self): 62 | 63 | self.fs.writebytes("bytes.bin", b"foofoobarbar") 64 | 65 | with self.fs.openbin("bytes.bin") as bin_file: 66 | with iotools.make_stream("bytes.bin", bin_file, "rb") as f: 67 | data = bytearray(3) 68 | bytes_read = f.readinto1(data) 69 | self.assertEqual(bytes_read, 3) 70 | self.assertEqual(bytes(data), b"foo") 71 | self.assertEqual(f.readline(1), b"f") 72 | 73 | def no_readinto(size): 74 | raise AttributeError 75 | 76 | with self.fs.openbin("bytes.bin") as bin_file: 77 | bin_file.readinto = no_readinto 78 | with iotools.make_stream("bytes.bin", bin_file, "rb") as f: 79 | data = bytearray(3) 80 | bytes_read = f.readinto1(data) 81 | self.assertEqual(bytes_read, 3) 82 | self.assertEqual(bytes(data), b"foo") 83 | self.assertEqual(f.readline(1), b"f") 84 | 85 | def test_isatty(self): 86 | with self.fs.openbin("text.txt", "wb") as f: 87 | with iotools.make_stream("text.txt", f, "wb") as f1: 88 | self.assertFalse(f1.isatty()) 89 | 90 | def test_readlines(self): 91 | self.fs.writebytes("foo", b"barbar\nline1\nline2") 92 | with self.fs.open("foo", "rb") as f: 93 | f = iotools.make_stream("foo", f, "rb") 94 | self.assertEqual(list(f), [b"barbar\n", b"line1\n", b"line2"]) 95 | with self.fs.open("foo", "rt") as f: 96 | f = iotools.make_stream("foo", f, "rb") 97 | self.assertEqual(f.readlines(), ["barbar\n", "line1\n", "line2"]) 98 | 99 | def test_readall(self): 100 | self.fs.writebytes("foo", b"foobar") 101 | with self.fs.open("foo", "rt") as f: 102 | self.assertEqual(f.read(), "foobar") 103 | 104 | def test_writelines(self): 105 | with self.fs.open("foo", "wb") as f: 106 | f = iotools.make_stream("foo", f, "rb") 107 | f.writelines([b"foo", b"bar", b"baz"]) 108 | self.assertEqual(self.fs.readbytes("foo"), b"foobarbaz") 109 | 110 | def test_seekable(self): 111 | 112 | f = io.BytesIO(b"HelloWorld") 113 | raw_wrapper = iotools.RawWrapper(f) 114 | self.assertTrue(raw_wrapper.seekable()) 115 | 116 | def no_seekable(): 117 | raise AttributeError("seekable") 118 | 119 | f.seekable = no_seekable 120 | 121 | def seek(pos, whence): 122 | raise IOError("no seek") 123 | 124 | raw_wrapper.seek = seek 125 | 126 | self.assertFalse(raw_wrapper.seekable()) 127 | 128 | def test_line_iterator(self): 129 | f = io.BytesIO(b"Hello\nWorld\n\nfoo") 130 | self.assertEqual( 131 | list(iotools.line_iterator(f)), [b"Hello\n", b"World\n", b"\n", b"foo"] 132 | ) 133 | 134 | f = io.BytesIO(b"Hello\nWorld\n\nfoo") 135 | self.assertEqual(list(iotools.line_iterator(f, 10)), [b"Hello\n", b"Worl"]) 136 | 137 | def test_make_stream_writer(self): 138 | f = io.BytesIO() 139 | s = iotools.make_stream("foo", f, "wb", buffering=1) 140 | self.assertIsInstance(s, io.BufferedWriter) 141 | s.write(b"Hello") 142 | self.assertEqual(f.getvalue(), b"Hello") 143 | 144 | def test_make_stream_reader(self): 145 | f = io.BytesIO(b"Hello") 146 | s = iotools.make_stream("foo", f, "rb", buffering=1) 147 | self.assertIsInstance(s, io.BufferedReader) 148 | self.assertEqual(s.read(), b"Hello") 149 | 150 | def test_make_stream_reader_writer(self): 151 | f = io.BytesIO(b"Hello") 152 | s = iotools.make_stream("foo", f, "+b", buffering=1) 153 | self.assertIsInstance(s, io.BufferedRandom) 154 | self.assertEqual(s.read(), b"Hello") 155 | s.write(b" World") 156 | self.assertEqual(f.getvalue(), b"Hello World") 157 | -------------------------------------------------------------------------------- /tests/test_lrucache.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs import lrucache 6 | 7 | 8 | class TestLRUCache(unittest.TestCase): 9 | def setUp(self): 10 | self.lrucache = lrucache.LRUCache(3) 11 | 12 | def test_lrucache(self): 13 | # insert some values 14 | self.lrucache["foo"] = 1 15 | self.lrucache["bar"] = 2 16 | self.lrucache["baz"] = 3 17 | self.assertIn("foo", self.lrucache) 18 | 19 | # Cache size is 3, so the following should kick oldest one out 20 | self.lrucache["egg"] = 4 21 | self.assertNotIn("foo", self.lrucache) 22 | self.assertIn("egg", self.lrucache) 23 | 24 | # cache is now full 25 | # look up two keys 26 | self.lrucache["bar"] 27 | self.lrucache["baz"] 28 | 29 | # Insert a new value 30 | self.lrucache["eggegg"] = 5 31 | # Check it kicked out the 'oldest' key 32 | self.assertNotIn("egg", self.lrucache) 33 | -------------------------------------------------------------------------------- /tests/test_memoryfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import posixpath 4 | import unittest 5 | 6 | from fs import memoryfs 7 | from fs.test import UNICODE_TEXT, FSTestCases 8 | 9 | try: 10 | # Only supported on Python 3.4+ 11 | import tracemalloc 12 | except ImportError: 13 | tracemalloc = None 14 | 15 | 16 | class TestMemoryFS(FSTestCases, unittest.TestCase): 17 | """Test OSFS implementation.""" 18 | 19 | def make_fs(self): 20 | return memoryfs.MemoryFS() 21 | 22 | def _create_many_files(self): 23 | for parent_dir in {"/", "/one", "/one/two", "/one/other-two/three"}: 24 | self.fs.makedirs(parent_dir, recreate=True) 25 | for file_id in range(50): 26 | self.fs.writetext( 27 | posixpath.join(parent_dir, str(file_id)), UNICODE_TEXT 28 | ) 29 | 30 | @unittest.skipUnless( 31 | tracemalloc, reason="`tracemalloc` isn't supported on this Python version." 32 | ) 33 | def test_close_mem_free(self): 34 | """Ensure all file memory is freed when calling close(). 35 | 36 | Prevents regression against issue #308. 37 | """ 38 | trace_filters = [tracemalloc.Filter(True, "*/memoryfs.py")] 39 | tracemalloc.start() 40 | 41 | before = tracemalloc.take_snapshot().filter_traces(trace_filters) 42 | self._create_many_files() 43 | after_create = tracemalloc.take_snapshot().filter_traces(trace_filters) 44 | 45 | self.fs.close() 46 | after_close = tracemalloc.take_snapshot().filter_traces(trace_filters) 47 | tracemalloc.stop() 48 | 49 | [diff_create] = after_create.compare_to( 50 | before, key_type="filename", cumulative=True 51 | ) 52 | self.assertGreater( 53 | diff_create.size_diff, 54 | 0, 55 | "Memory usage didn't increase after creating files; diff is %0.2f KiB." 56 | % (diff_create.size_diff / 1024.0), 57 | ) 58 | 59 | [diff_close] = after_close.compare_to( 60 | after_create, key_type="filename", cumulative=True 61 | ) 62 | self.assertLess( 63 | diff_close.size_diff, 64 | 0, 65 | "Memory usage increased after closing the file system; diff is %0.2f KiB." 66 | % (diff_close.size_diff / 1024.0), 67 | ) 68 | 69 | def test_copy_preserve_time(self): 70 | self.fs.makedir("foo") 71 | self.fs.makedir("bar") 72 | self.fs.touch("foo/file.txt") 73 | 74 | src_datetime = self.fs.getmodified("foo/file.txt") 75 | 76 | self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True) 77 | self.assertTrue(self.fs.exists("bar/file.txt")) 78 | 79 | dst_datetime = self.fs.getmodified("bar/file.txt") 80 | self.assertEqual(dst_datetime, src_datetime) 81 | 82 | 83 | class TestMemoryFile(unittest.TestCase): 84 | def setUp(self): 85 | self.fs = memoryfs.MemoryFS() 86 | 87 | def tearDown(self): 88 | self.fs.close() 89 | 90 | def test_readline_writing(self): 91 | with self.fs.openbin("test.txt", "w") as f: 92 | self.assertRaises(IOError, f.readline) 93 | 94 | def test_readinto_writing(self): 95 | with self.fs.openbin("test.txt", "w") as f: 96 | self.assertRaises(IOError, f.readinto, bytearray(10)) 97 | -------------------------------------------------------------------------------- /tests/test_mirror.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | from parameterized import parameterized_class 5 | 6 | from fs import open_fs 7 | from fs.mirror import mirror 8 | 9 | 10 | @parameterized_class(("WORKERS",), [(0,), (1,), (2,), (4,)]) 11 | class TestMirror(unittest.TestCase): 12 | def _contents(self, fs): 13 | """Extract an FS in to a simple data structure.""" 14 | namespaces = ("details", "metadata_changed", "modified") 15 | contents = [] 16 | for path, dirs, files in fs.walk(): 17 | for info in dirs: 18 | _path = info.make_path(path) 19 | contents.append((_path, "dir", b"")) 20 | for info in files: 21 | _path = info.make_path(path) 22 | _bytes = fs.readbytes(_path) 23 | _info = fs.getinfo(_path, namespaces) 24 | contents.append( 25 | ( 26 | _path, 27 | "file", 28 | _bytes, 29 | _info.modified, 30 | _info.metadata_changed, 31 | ) 32 | ) 33 | return sorted(contents) 34 | 35 | def assert_compare_fs(self, fs1, fs2): 36 | """Assert filesystems and contents are the same.""" 37 | self.assertEqual(self._contents(fs1), self._contents(fs2)) 38 | 39 | def test_empty_mirror(self): 40 | m1 = open_fs("mem://") 41 | m2 = open_fs("mem://") 42 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 43 | self.assert_compare_fs(m1, m2) 44 | 45 | def test_mirror_one_file(self): 46 | m1 = open_fs("mem://") 47 | m1.writetext("foo", "hello") 48 | m2 = open_fs("mem://") 49 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 50 | self.assert_compare_fs(m1, m2) 51 | 52 | def test_mirror_one_file_one_dir(self): 53 | m1 = open_fs("mem://") 54 | m1.writetext("foo", "hello") 55 | m1.makedir("bar") 56 | m2 = open_fs("mem://") 57 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 58 | self.assert_compare_fs(m1, m2) 59 | 60 | def test_mirror_delete_replace(self): 61 | m1 = open_fs("mem://") 62 | m1.writetext("foo", "hello") 63 | m1.makedir("bar") 64 | m2 = open_fs("mem://") 65 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 66 | self.assert_compare_fs(m1, m2) 67 | m2.remove("foo") 68 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 69 | self.assert_compare_fs(m1, m2) 70 | m2.removedir("bar") 71 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 72 | self.assert_compare_fs(m1, m2) 73 | 74 | def test_mirror_extra_dir(self): 75 | m1 = open_fs("mem://") 76 | m1.writetext("foo", "hello") 77 | m1.makedir("bar") 78 | m2 = open_fs("mem://") 79 | m2.makedir("baz") 80 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 81 | self.assert_compare_fs(m1, m2) 82 | 83 | def test_mirror_extra_file(self): 84 | m1 = open_fs("mem://") 85 | m1.writetext("foo", "hello") 86 | m1.makedir("bar") 87 | m2 = open_fs("mem://") 88 | m2.makedir("baz") 89 | m2.touch("egg") 90 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 91 | self.assert_compare_fs(m1, m2) 92 | 93 | def test_mirror_wrong_type(self): 94 | m1 = open_fs("mem://") 95 | m1.writetext("foo", "hello") 96 | m1.makedir("bar") 97 | m2 = open_fs("mem://") 98 | m2.makedir("foo") 99 | m2.touch("bar") 100 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 101 | self.assert_compare_fs(m1, m2) 102 | 103 | def test_mirror_update(self): 104 | m1 = open_fs("mem://") 105 | m1.writetext("foo", "hello") 106 | m1.makedir("bar") 107 | m2 = open_fs("mem://") 108 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 109 | self.assert_compare_fs(m1, m2) 110 | m2.appendtext("foo", " world!") 111 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) 112 | self.assert_compare_fs(m1, m2) 113 | -------------------------------------------------------------------------------- /tests/test_mode.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | from six import text_type 5 | 6 | from fs.mode import Mode, check_readable, check_writable 7 | 8 | 9 | class TestMode(unittest.TestCase): 10 | def test_checks(self): 11 | self.assertTrue(check_readable("r")) 12 | self.assertTrue(check_readable("r+")) 13 | self.assertTrue(check_readable("rt")) 14 | self.assertTrue(check_readable("rb")) 15 | 16 | self.assertFalse(check_readable("w")) 17 | self.assertTrue(check_readable("w+")) 18 | self.assertFalse(check_readable("wt")) 19 | self.assertFalse(check_readable("wb")) 20 | self.assertFalse(check_readable("a")) 21 | 22 | self.assertTrue(check_writable("w")) 23 | self.assertTrue(check_writable("w+")) 24 | self.assertTrue(check_writable("r+")) 25 | self.assertFalse(check_writable("r")) 26 | self.assertTrue(check_writable("a")) 27 | 28 | def test_mode_object(self): 29 | with self.assertRaises(ValueError): 30 | Mode("") 31 | with self.assertRaises(ValueError): 32 | Mode("J") 33 | with self.assertRaises(ValueError): 34 | Mode("b") 35 | with self.assertRaises(ValueError): 36 | Mode("rtb") 37 | 38 | mode = Mode("w") 39 | repr(mode) 40 | self.assertEqual(text_type(mode), "w") 41 | self.assertTrue(mode.create) 42 | self.assertFalse(mode.reading) 43 | self.assertTrue(mode.writing) 44 | self.assertFalse(mode.appending) 45 | self.assertFalse(mode.updating) 46 | self.assertTrue(mode.truncate) 47 | self.assertFalse(mode.exclusive) 48 | self.assertFalse(mode.binary) 49 | self.assertTrue(mode.text) 50 | -------------------------------------------------------------------------------- /tests/test_mountfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs.memoryfs import MemoryFS 6 | from fs.mountfs import MountError, MountFS 7 | from fs.tempfs import TempFS 8 | from fs.test import FSTestCases 9 | 10 | 11 | class TestMountFS(FSTestCases, unittest.TestCase): 12 | """Test OSFS implementation.""" 13 | 14 | def make_fs(self): 15 | fs = MountFS() 16 | mem_fs = MemoryFS() 17 | fs.mount("/", mem_fs) 18 | return fs 19 | 20 | 21 | class TestMountFS2(FSTestCases, unittest.TestCase): 22 | """Test OSFS implementation.""" 23 | 24 | def make_fs(self): 25 | fs = MountFS() 26 | mem_fs = MemoryFS() 27 | fs.mount("/foo", mem_fs) 28 | return fs.opendir("foo") 29 | 30 | 31 | class TestMountFSBehaviours(unittest.TestCase): 32 | def test_bad_mount(self): 33 | mount_fs = MountFS() 34 | with self.assertRaises(TypeError): 35 | mount_fs.mount("foo", 5) 36 | with self.assertRaises(TypeError): 37 | mount_fs.mount("foo", b"bar") 38 | 39 | def test_listdir(self): 40 | mount_fs = MountFS() 41 | self.assertEqual(mount_fs.listdir("/"), []) 42 | m1 = MemoryFS() 43 | m3 = MemoryFS() 44 | m4 = TempFS() 45 | mount_fs.mount("/m1", m1) 46 | mount_fs.mount("/m2", "temp://") 47 | mount_fs.mount("/m3", m3) 48 | with self.assertRaises(MountError): 49 | mount_fs.mount("/m3/foo", m4) 50 | self.assertEqual(sorted(mount_fs.listdir("/")), ["m1", "m2", "m3"]) 51 | m3.makedir("foo") 52 | self.assertEqual(sorted(mount_fs.listdir("/m3")), ["foo"]) 53 | 54 | def test_auto_close(self): 55 | """Test MountFS auto close is working""" 56 | mount_fs = MountFS() 57 | m1 = MemoryFS() 58 | m2 = MemoryFS() 59 | mount_fs.mount("/m1", m1) 60 | mount_fs.mount("/m2", m2) 61 | self.assertFalse(m1.isclosed()) 62 | self.assertFalse(m2.isclosed()) 63 | mount_fs.close() 64 | self.assertTrue(m1.isclosed()) 65 | self.assertTrue(m2.isclosed()) 66 | 67 | def test_no_auto_close(self): 68 | """Test MountFS auto close can be disabled""" 69 | mount_fs = MountFS(auto_close=False) 70 | m1 = MemoryFS() 71 | m2 = MemoryFS() 72 | mount_fs.mount("/m1", m1) 73 | mount_fs.mount("/m2", m2) 74 | self.assertFalse(m1.isclosed()) 75 | self.assertFalse(m2.isclosed()) 76 | mount_fs.close() 77 | self.assertFalse(m1.isclosed()) 78 | self.assertFalse(m2.isclosed()) 79 | 80 | def test_empty(self): 81 | """Test MountFS with nothing mounted.""" 82 | mount_fs = MountFS() 83 | self.assertEqual(mount_fs.listdir("/"), []) 84 | 85 | def test_mount_self(self): 86 | mount_fs = MountFS() 87 | with self.assertRaises(ValueError): 88 | mount_fs.mount("/", mount_fs) 89 | 90 | def test_desc(self): 91 | mount_fs = MountFS() 92 | mount_fs.desc("/") 93 | -------------------------------------------------------------------------------- /tests/test_multifs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs import errors 6 | from fs.memoryfs import MemoryFS 7 | from fs.multifs import MultiFS 8 | from fs.test import FSTestCases 9 | 10 | 11 | class TestMultiFS(FSTestCases, unittest.TestCase): 12 | """Test OSFS implementation.""" 13 | 14 | def setUp(self): 15 | fs = MultiFS() 16 | mem_fs = MemoryFS() 17 | fs.add_fs("mem", mem_fs, write=True) 18 | self.fs = fs 19 | self.mem_fs = mem_fs 20 | 21 | def make_fs(self): 22 | fs = MultiFS() 23 | mem_fs = MemoryFS() 24 | fs.add_fs("mem", mem_fs, write=True) 25 | return fs 26 | 27 | def test_get_fs(self): 28 | self.assertIs(self.fs.get_fs("mem"), self.mem_fs) 29 | 30 | def test_which(self): 31 | self.fs.writebytes("foo", b"bar") 32 | self.assertEqual(self.fs.which("foo"), ("mem", self.mem_fs)) 33 | self.assertEqual(self.fs.which("bar", "w"), ("mem", self.mem_fs)) 34 | self.assertEqual(self.fs.which("baz"), (None, None)) 35 | 36 | def test_auto_close(self): 37 | """Test MultiFS auto close is working""" 38 | multi_fs = MultiFS() 39 | m1 = MemoryFS() 40 | m2 = MemoryFS() 41 | multi_fs.add_fs("m1", m1) 42 | multi_fs.add_fs("m2", m2) 43 | self.assertFalse(m1.isclosed()) 44 | self.assertFalse(m2.isclosed()) 45 | multi_fs.close() 46 | self.assertTrue(m1.isclosed()) 47 | self.assertTrue(m2.isclosed()) 48 | 49 | def test_no_auto_close(self): 50 | """Test MultiFS auto close can be disabled""" 51 | multi_fs = MultiFS(auto_close=False) 52 | self.assertEqual(repr(multi_fs), "MultiFS(auto_close=False)") 53 | m1 = MemoryFS() 54 | m2 = MemoryFS() 55 | multi_fs.add_fs("m1", m1) 56 | multi_fs.add_fs("m2", m2) 57 | self.assertFalse(m1.isclosed()) 58 | self.assertFalse(m2.isclosed()) 59 | multi_fs.close() 60 | self.assertFalse(m1.isclosed()) 61 | self.assertFalse(m2.isclosed()) 62 | 63 | def test_opener(self): 64 | """Test use of FS URLs.""" 65 | multi_fs = MultiFS() 66 | with self.assertRaises(TypeError): 67 | multi_fs.add_fs("foo", 5) 68 | multi_fs.add_fs("f1", "mem://") 69 | multi_fs.add_fs("f2", "temp://") 70 | self.assertIsInstance(multi_fs.get_fs("f1"), MemoryFS) 71 | 72 | def test_priority(self): 73 | """Test priority order is working""" 74 | m1 = MemoryFS() 75 | m2 = MemoryFS() 76 | m3 = MemoryFS() 77 | m1.writebytes("name", b"m1") 78 | m2.writebytes("name", b"m2") 79 | m3.writebytes("name", b"m3") 80 | multi_fs = MultiFS(auto_close=False) 81 | multi_fs.add_fs("m1", m1) 82 | multi_fs.add_fs("m2", m2) 83 | multi_fs.add_fs("m3", m3) 84 | self.assertEqual(multi_fs.readbytes("name"), b"m3") 85 | 86 | m1 = MemoryFS() 87 | m2 = MemoryFS() 88 | m3 = MemoryFS() 89 | m1.writebytes("name", b"m1") 90 | m2.writebytes("name", b"m2") 91 | m3.writebytes("name", b"m3") 92 | multi_fs = MultiFS(auto_close=False) 93 | multi_fs.add_fs("m1", m1) 94 | multi_fs.add_fs("m2", m2, priority=10) 95 | multi_fs.add_fs("m3", m3) 96 | self.assertEqual(multi_fs.readbytes("name"), b"m2") 97 | 98 | m1 = MemoryFS() 99 | m2 = MemoryFS() 100 | m3 = MemoryFS() 101 | m1.writebytes("name", b"m1") 102 | m2.writebytes("name", b"m2") 103 | m3.writebytes("name", b"m3") 104 | multi_fs = MultiFS(auto_close=False) 105 | multi_fs.add_fs("m1", m1) 106 | multi_fs.add_fs("m2", m2, priority=10) 107 | multi_fs.add_fs("m3", m3, priority=10) 108 | self.assertEqual(multi_fs.readbytes("name"), b"m3") 109 | 110 | m1 = MemoryFS() 111 | m2 = MemoryFS() 112 | m3 = MemoryFS() 113 | m1.writebytes("name", b"m1") 114 | m2.writebytes("name", b"m2") 115 | m3.writebytes("name", b"m3") 116 | multi_fs = MultiFS(auto_close=False) 117 | multi_fs.add_fs("m1", m1, priority=11) 118 | multi_fs.add_fs("m2", m2, priority=10) 119 | multi_fs.add_fs("m3", m3, priority=10) 120 | self.assertEqual(multi_fs.readbytes("name"), b"m1") 121 | 122 | def test_no_writable(self): 123 | fs = MultiFS() 124 | with self.assertRaises(errors.ResourceReadOnly): 125 | fs.writebytes("foo", b"bar") 126 | 127 | def test_validate_path(self): 128 | self.fs.write_fs = None 129 | self.fs.validatepath("foo") 130 | 131 | def test_listdir_duplicates(self): 132 | m1 = MemoryFS() 133 | m2 = MemoryFS() 134 | m1.touch("foo") 135 | m2.touch("foo") 136 | multi_fs = MultiFS() 137 | multi_fs.add_fs("m1", m1) 138 | multi_fs.add_fs("m2", m2) 139 | self.assertEqual(multi_fs.listdir("/"), ["foo"]) 140 | -------------------------------------------------------------------------------- /tests/test_new_name.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | import warnings 5 | 6 | from fs.base import _new_name 7 | 8 | 9 | class TestNewNameDecorator(unittest.TestCase): 10 | def double(self, n): 11 | "Double a number" 12 | return n * 2 13 | 14 | times_2 = _new_name(double, "times_2") 15 | 16 | def test_old_name(self): 17 | """Test _new_name method issues a warning""" 18 | with warnings.catch_warnings(record=True) as w: 19 | warnings.simplefilter("always") 20 | result = self.times_2(2) 21 | self.assertEqual(len(w), 1) 22 | self.assertEqual(w[0].category, DeprecationWarning) 23 | self.assertEqual( 24 | str(w[0].message), 25 | "method 'times_2' has been deprecated, please rename to 'double'", 26 | ) 27 | self.assertEqual(result, 4) 28 | -------------------------------------------------------------------------------- /tests/test_permissions.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals 2 | 3 | import unittest 4 | from six import text_type 5 | 6 | from fs.permissions import Permissions, make_mode 7 | 8 | 9 | class TestPermissions(unittest.TestCase): 10 | def test_make_mode(self): 11 | self.assertEqual(make_mode(None), 0o777) 12 | self.assertEqual(make_mode(0o755), 0o755) 13 | self.assertEqual(make_mode(["u_r", "u_w", "u_x"]), 0o700) 14 | self.assertEqual(make_mode(Permissions(user="rwx")), 0o700) 15 | 16 | def test_parse(self): 17 | self.assertEqual(Permissions.parse("---------").mode, 0) 18 | self.assertEqual(Permissions.parse("rwxrw-r--").mode, 0o764) 19 | 20 | def test_create(self): 21 | self.assertEqual(Permissions.create(None).mode, 0o777) 22 | self.assertEqual(Permissions.create(0o755).mode, 0o755) 23 | self.assertEqual(Permissions.create(["u_r", "u_w", "u_x"]).mode, 0o700) 24 | self.assertEqual(Permissions.create(Permissions(user="rwx")).mode, 0o700) 25 | with self.assertRaises(ValueError): 26 | Permissions.create("foo") 27 | 28 | def test_constructor(self): 29 | p = Permissions(names=["foo", "bar"]) 30 | self.assertIn("foo", p) 31 | self.assertIn("bar", p) 32 | self.assertNotIn("baz", p) 33 | 34 | p = Permissions(user="r", group="w", other="x") 35 | self.assertIn("u_r", p) 36 | self.assertIn("g_w", p) 37 | self.assertIn("o_x", p) 38 | self.assertNotIn("sticky", p) 39 | self.assertNotIn("setuid", p) 40 | self.assertNotIn("setguid", p) 41 | 42 | p = Permissions( 43 | user="rwx", group="rwx", other="rwx", sticky=True, setuid=True, setguid=True 44 | ) 45 | self.assertIn("sticky", p) 46 | self.assertIn("setuid", p) 47 | self.assertIn("setguid", p) 48 | 49 | p = Permissions(mode=0o421) 50 | self.assertIn("u_r", p) 51 | self.assertIn("g_w", p) 52 | self.assertIn("o_x", p) 53 | self.assertNotIn("u_w", p) 54 | self.assertNotIn("g_x", p) 55 | self.assertNotIn("o_r", p) 56 | self.assertNotIn("sticky", p) 57 | self.assertNotIn("setuid", p) 58 | self.assertNotIn("setguid", p) 59 | 60 | def test_properties(self): 61 | p = Permissions() 62 | self.assertFalse(p.u_r) 63 | self.assertNotIn("u_r", p) 64 | p.u_r = True 65 | self.assertIn("u_r", p) 66 | self.assertTrue(p.u_r) 67 | p.u_r = False 68 | self.assertNotIn("u_r", p) 69 | self.assertFalse(p.u_r) 70 | 71 | self.assertFalse(p.u_w) 72 | p.add("u_w") 73 | self.assertTrue(p.u_w) 74 | p.remove("u_w") 75 | self.assertFalse(p.u_w) 76 | 77 | def test_repr(self): 78 | self.assertEqual( 79 | repr(Permissions()), "Permissions(user='', group='', other='')" 80 | ) 81 | self.assertEqual(repr(Permissions(names=["foo"])), "Permissions(names=['foo'])") 82 | repr(Permissions(user="rwx", group="rw", other="r")) 83 | repr(Permissions(user="rwx", group="rw", other="r", sticky=True)) 84 | repr(Permissions(user="rwx", group="rw", other="r", setuid=True)) 85 | repr(Permissions(user="rwx", group="rw", other="r", setguid=True)) 86 | 87 | def test_as_str(self): 88 | p = Permissions(user="rwx", group="rwx", other="rwx") 89 | self.assertEqual(p.as_str(), "rwxrwxrwx") 90 | self.assertEqual(str(p), "rwxrwxrwx") 91 | p = Permissions(mode=0o777, setuid=True, setguid=True, sticky=True) 92 | self.assertEqual(p.as_str(), "rwsrwsrwt") 93 | 94 | def test_mode(self): 95 | p = Permissions(user="rwx", group="rw", other="") 96 | self.assertEqual(p.mode, 0o760) 97 | 98 | def test_serialize(self): 99 | p = Permissions(names=["foo"]) 100 | self.assertEqual(p.dump(), ["foo"]) 101 | pp = Permissions.load(["foo"]) 102 | self.assertIn("foo", pp) 103 | 104 | def test_iter(self): 105 | iter_p = iter(Permissions(names=["foo"])) 106 | self.assertEqual(list(iter_p), ["foo"]) 107 | 108 | def test_equality(self): 109 | self.assertEqual(Permissions(mode=0o700), Permissions(user="rwx")) 110 | self.assertNotEqual(Permissions(mode=0o500), Permissions(user="rwx")) 111 | 112 | self.assertEqual(Permissions(mode=0o700), ["u_r", "u_w", "u_x"]) 113 | 114 | def test_copy(self): 115 | p = Permissions(mode=0o700) 116 | p_copy = p.copy() 117 | self.assertIsNot(p, p_copy) 118 | self.assertEqual(p, p_copy) 119 | 120 | def test_check(self): 121 | p = Permissions(user="rwx") 122 | self.assertTrue(p.check("u_r")) 123 | self.assertTrue(p.check("u_r", "u_w")) 124 | self.assertTrue(p.check("u_r", "u_w", "u_x")) 125 | self.assertFalse(p.check("u_r", "g_w")) 126 | self.assertFalse(p.check("g_r", "g_w")) 127 | self.assertFalse(p.check("foo")) 128 | 129 | def test_mode_set(self): 130 | p = Permissions(user="r") 131 | self.assertEqual(text_type(p), "r--------") 132 | p.mode = 0o700 133 | self.assertEqual(text_type(p), "rwx------") 134 | -------------------------------------------------------------------------------- /tests/test_subfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import os 4 | import shutil 5 | import tempfile 6 | import unittest 7 | 8 | from fs import osfs 9 | from fs.memoryfs import MemoryFS 10 | from fs.path import relpath 11 | from fs.subfs import SubFS 12 | 13 | from .test_osfs import TestOSFS 14 | 15 | 16 | class TestSubFS(TestOSFS): 17 | """Test OSFS implementation.""" 18 | 19 | def setUp(self): 20 | self.temp_dir = tempfile.mkdtemp("fstest") 21 | self.parent_fs = osfs.OSFS(self.temp_dir) 22 | self.parent_fs.makedir("__subdir__") 23 | self.fs = self.parent_fs.opendir("__subdir__") 24 | 25 | def tearDown(self): 26 | shutil.rmtree(self.temp_dir) 27 | self.parent_fs.close() 28 | self.fs.close() 29 | 30 | def _get_real_path(self, path): 31 | _path = os.path.join(self.temp_dir, "__subdir__", relpath(path)) 32 | return _path 33 | 34 | 35 | class CustomSubFS(SubFS): 36 | """Just a custom class to change the type""" 37 | 38 | def custom_function(self, custom_path): 39 | fs, delegate_path = self.delegate_path(custom_path) 40 | fs.custom_function(delegate_path) 41 | 42 | 43 | class CustomSubFS2(SubFS): 44 | """Just a custom class to change the type""" 45 | 46 | 47 | class CustomFS(MemoryFS): 48 | subfs_class = CustomSubFS 49 | 50 | def __init__(self): 51 | super(CustomFS, self).__init__() 52 | self.custom_path = None 53 | 54 | def custom_function(self, custom_path): 55 | self.custom_path = custom_path 56 | 57 | 58 | class TestCustomSubFS(unittest.TestCase): 59 | """Test customization of the SubFS returned from opendir etc""" 60 | 61 | def test_opendir(self): 62 | fs = CustomFS() 63 | fs.makedir("__subdir__") 64 | subfs = fs.opendir("__subdir__") 65 | # By default, you get the fs's defined custom SubFS 66 | assert isinstance(subfs, CustomSubFS) 67 | 68 | subfs.custom_function("filename") 69 | assert fs.custom_path == "/__subdir__/filename" 70 | 71 | # Providing the factory explicitly still works 72 | subfs = fs.opendir("__subdir__", factory=CustomSubFS2) 73 | assert isinstance(subfs, CustomSubFS2) 74 | -------------------------------------------------------------------------------- /tests/test_tempfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import os 4 | 5 | from fs import errors 6 | from fs.tempfs import TempFS 7 | 8 | from .test_osfs import TestOSFS 9 | 10 | try: 11 | from unittest import mock 12 | except ImportError: 13 | import mock 14 | 15 | 16 | class TestTempFS(TestOSFS): 17 | """Test OSFS implementation.""" 18 | 19 | def make_fs(self): 20 | return TempFS() 21 | 22 | def test_clean(self): 23 | t = TempFS() 24 | _temp_dir = t.getsyspath("/") 25 | self.assertTrue(os.path.isdir(_temp_dir)) 26 | t.close() 27 | self.assertFalse(os.path.isdir(_temp_dir)) 28 | 29 | @mock.patch("shutil.rmtree", create=True) 30 | def test_clean_error(self, rmtree): 31 | rmtree.side_effect = Exception("boom") 32 | with self.assertRaises(errors.OperationFailed): 33 | t = TempFS(ignore_clean_errors=False) 34 | t.writebytes("foo", b"bar") 35 | t.close() 36 | -------------------------------------------------------------------------------- /tests/test_time.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals 2 | 3 | import unittest 4 | from datetime import datetime 5 | 6 | from fs.time import datetime_to_epoch, epoch_to_datetime 7 | 8 | try: 9 | from datetime import timezone 10 | except ImportError: 11 | from fs._tzcompat import timezone # type: ignore 12 | 13 | 14 | class TestEpoch(unittest.TestCase): 15 | def test_epoch_to_datetime(self): 16 | self.assertEqual( 17 | epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=timezone.utc) 18 | ) 19 | 20 | def test_datetime_to_epoch(self): 21 | self.assertEqual( 22 | datetime_to_epoch(datetime(1974, 7, 5, tzinfo=timezone.utc)), 142214400 23 | ) 24 | -------------------------------------------------------------------------------- /tests/test_tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | 5 | from fs import tools 6 | from fs.mode import validate_open_mode, validate_openbin_mode 7 | from fs.opener import open_fs 8 | 9 | 10 | class TestTools(unittest.TestCase): 11 | def test_remove_empty(self): 12 | fs = open_fs("temp://") 13 | fs.makedirs("foo/bar/baz/egg/") 14 | fs.create("foo/bar/test.txt") 15 | 16 | tools.remove_empty(fs, "foo/bar/baz/egg") 17 | self.assertFalse(fs.isdir("foo/bar/baz")) 18 | self.assertTrue(fs.isdir("foo/bar")) 19 | fs.remove("foo/bar/test.txt") 20 | 21 | tools.remove_empty(fs, "foo/bar") 22 | self.assertEqual(fs.listdir("/"), []) 23 | 24 | def test_validate_openbin_mode(self): 25 | with self.assertRaises(ValueError): 26 | validate_openbin_mode("X") 27 | with self.assertRaises(ValueError): 28 | validate_openbin_mode("") 29 | with self.assertRaises(ValueError): 30 | validate_openbin_mode("rX") 31 | with self.assertRaises(ValueError): 32 | validate_openbin_mode("rt") 33 | validate_openbin_mode("r") 34 | validate_openbin_mode("w") 35 | validate_openbin_mode("a") 36 | validate_openbin_mode("r+") 37 | validate_openbin_mode("w+") 38 | validate_openbin_mode("a+") 39 | 40 | def test_validate_open_mode(self): 41 | with self.assertRaises(ValueError): 42 | validate_open_mode("X") 43 | with self.assertRaises(ValueError): 44 | validate_open_mode("") 45 | with self.assertRaises(ValueError): 46 | validate_open_mode("rX") 47 | 48 | validate_open_mode("rt") 49 | validate_open_mode("r") 50 | validate_open_mode("rb") 51 | validate_open_mode("w") 52 | validate_open_mode("a") 53 | validate_open_mode("r+") 54 | validate_open_mode("w+") 55 | validate_open_mode("a+") 56 | -------------------------------------------------------------------------------- /tests/test_tree.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals 2 | 3 | import io 4 | import unittest 5 | 6 | from fs import tree 7 | from fs.memoryfs import MemoryFS 8 | 9 | 10 | class TestInfo(unittest.TestCase): 11 | def setUp(self): 12 | self.fs = MemoryFS() 13 | self.fs.makedir("foo") 14 | self.fs.makedir("bar") 15 | self.fs.makedir("baz") 16 | self.fs.makedirs("foo/egg1") 17 | self.fs.makedirs("foo/egg2") 18 | self.fs.create("/root1") 19 | self.fs.create("/root2") 20 | self.fs.create("/foo/test.txt") 21 | self.fs.create("/foo/test2.txt") 22 | self.fs.create("/foo/.hidden") 23 | self.fs.makedirs("/deep/deep1/deep2/deep3/deep4/deep5/deep6") 24 | 25 | def test_tree(self): 26 | 27 | output_file = io.StringIO() 28 | 29 | tree.render(self.fs, file=output_file) 30 | 31 | expected = "|-- bar\n|-- baz\n|-- deep\n| `-- deep1\n| `-- deep2\n| `-- deep3\n| `-- deep4\n| `-- deep5\n|-- foo\n| |-- egg1\n| |-- egg2\n| |-- .hidden\n| |-- test.txt\n| `-- test2.txt\n|-- root1\n`-- root2\n" 32 | self.assertEqual(output_file.getvalue(), expected) 33 | 34 | def test_tree_encoding(self): 35 | 36 | output_file = io.StringIO() 37 | 38 | tree.render(self.fs, file=output_file, with_color=True) 39 | 40 | print(repr(output_file.getvalue())) 41 | 42 | expected = "\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mbar\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mbaz\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mdeep\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep1\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep2\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep3\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep4\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep5\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mfoo\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[1;34megg1\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[1;34megg2\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[33m.hidden\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m test.txt\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m test2.txt\n\x1b[32m\u251c\u2500\u2500\x1b[0m root1\n\x1b[32m\u2514\u2500\u2500\x1b[0m root2\n" 43 | self.assertEqual(output_file.getvalue(), expected) 44 | 45 | def test_tree_bytes_no_dirs_first(self): 46 | 47 | output_file = io.StringIO() 48 | 49 | tree.render(self.fs, file=output_file, dirs_first=False) 50 | 51 | expected = "|-- bar\n|-- baz\n|-- deep\n| `-- deep1\n| `-- deep2\n| `-- deep3\n| `-- deep4\n| `-- deep5\n|-- foo\n| |-- .hidden\n| |-- egg1\n| |-- egg2\n| |-- test.txt\n| `-- test2.txt\n|-- root1\n`-- root2\n" 52 | self.assertEqual(output_file.getvalue(), expected) 53 | 54 | def test_error(self): 55 | output_file = io.StringIO() 56 | 57 | filterdir = self.fs.filterdir 58 | 59 | def broken_filterdir(path, **kwargs): 60 | if path.startswith("/deep/deep1/"): 61 | # Because error messages differ accross Python versions 62 | raise Exception("integer division or modulo by zero") 63 | return filterdir(path, **kwargs) 64 | 65 | self.fs.filterdir = broken_filterdir 66 | tree.render(self.fs, file=output_file, with_color=True) 67 | 68 | expected = "\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mbar\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mbaz\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mdeep\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep1\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[1;34mdeep2\x1b[0m\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m \x1b[31merror (integer division or modulo by zero)\x1b[0m\n\x1b[32m\u251c\u2500\u2500\x1b[0m \x1b[1;34mfoo\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[1;34megg1\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[1;34megg2\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m \x1b[33m.hidden\x1b[0m\n\x1b[32m\u2502 \u251c\u2500\u2500\x1b[0m test.txt\n\x1b[32m\u2502 \u2514\u2500\u2500\x1b[0m test2.txt\n\x1b[32m\u251c\u2500\u2500\x1b[0m root1\n\x1b[32m\u2514\u2500\u2500\x1b[0m root2\n" 69 | tree_output = output_file.getvalue() 70 | print(repr(tree_output)) 71 | 72 | self.assertEqual(expected, tree_output) 73 | 74 | output_file = io.StringIO() 75 | tree.render(self.fs, file=output_file, with_color=False) 76 | 77 | expected = "|-- bar\n|-- baz\n|-- deep\n| `-- deep1\n| `-- deep2\n| `-- error (integer division or modulo by zero)\n|-- foo\n| |-- egg1\n| |-- egg2\n| |-- .hidden\n| |-- test.txt\n| `-- test2.txt\n|-- root1\n`-- root2\n" 78 | self.assertEqual(expected, output_file.getvalue()) 79 | -------------------------------------------------------------------------------- /tests/test_url_tools.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """Test url tools. """ 3 | from __future__ import unicode_literals 4 | 5 | import platform 6 | import unittest 7 | 8 | from fs._url_tools import url_quote 9 | 10 | 11 | class TestBase(unittest.TestCase): 12 | def test_quote(self): 13 | test_fixtures = [ 14 | # test_snippet, expected 15 | ["foo/bar/egg/foofoo", "foo/bar/egg/foofoo"], 16 | ["foo/bar ha/barz", "foo/bar%20ha/barz"], 17 | ["example b.txt", "example%20b.txt"], 18 | ["exampleㄓ.txt", "example%E3%84%93.txt"], 19 | ] 20 | if platform.system() == "Windows": 21 | test_fixtures.extend( 22 | [ 23 | ["C:\\My Documents\\test.txt", "C:/My%20Documents/test.txt"], 24 | ["C:/My Documents/test.txt", "C:/My%20Documents/test.txt"], 25 | # on Windows '\' is regarded as path separator 26 | ["test/forward\\slash", "test/forward/slash"], 27 | ] 28 | ) 29 | else: 30 | test_fixtures.extend( 31 | [ 32 | # colon:tmp is bad path under Windows 33 | ["test/colon:tmp", "test/colon%3Atmp"], 34 | # Unix treat \ as %5C 35 | ["test/forward\\slash", "test/forward%5Cslash"], 36 | ] 37 | ) 38 | for test_snippet, expected in test_fixtures: 39 | self.assertEqual(url_quote(test_snippet), expected) 40 | -------------------------------------------------------------------------------- /tests/test_wildcard.py: -------------------------------------------------------------------------------- 1 | """Test f2s.fnmatch.""" 2 | 3 | from __future__ import unicode_literals 4 | 5 | import unittest 6 | 7 | from fs import wildcard 8 | 9 | 10 | class TestFNMatch(unittest.TestCase): 11 | """Test wildcard.""" 12 | 13 | def test_wildcard(self): 14 | self.assertTrue(wildcard.match("*.py", "file.py")) 15 | self.assertTrue(wildcard.match("????.py", "????.py")) 16 | self.assertTrue(wildcard.match("file.py", "file.py")) 17 | self.assertTrue(wildcard.match("file.py[co]", "file.pyc")) 18 | self.assertTrue(wildcard.match("file.py[co]", "file.pyo")) 19 | self.assertTrue(wildcard.match("file.py[!c]", "file.py0")) 20 | self.assertTrue(wildcard.match("file.py[^]", "file.py^")) 21 | 22 | self.assertFalse(wildcard.match("*.jpg", "file.py")) 23 | self.assertFalse(wildcard.match("toolong.py", "????.py")) 24 | self.assertFalse(wildcard.match("file.pyc", "file.py")) 25 | self.assertFalse(wildcard.match("file.py[co]", "file.pyz")) 26 | self.assertFalse(wildcard.match("file.py[!o]", "file.pyo")) 27 | self.assertFalse(wildcard.match("file.py[]", "file.py0")) 28 | 29 | self.assertTrue(wildcard.imatch("*.py", "FILE.py")) 30 | self.assertTrue(wildcard.imatch("*.py", "file.PY")) 31 | 32 | def test_match_any(self): 33 | self.assertTrue(wildcard.match_any([], "foo.py")) 34 | self.assertTrue(wildcard.imatch_any([], "foo.py")) 35 | self.assertTrue(wildcard.match_any(["*.py", "*.pyc"], "foo.pyc")) 36 | self.assertTrue(wildcard.imatch_any(["*.py", "*.pyc"], "FOO.pyc")) 37 | 38 | def test_get_matcher(self): 39 | matcher = wildcard.get_matcher([], True) 40 | self.assertTrue(matcher("foo.py")) 41 | matcher = wildcard.get_matcher(["*.py"], True) 42 | self.assertTrue(matcher("foo.py")) 43 | self.assertFalse(matcher("foo.PY")) 44 | matcher = wildcard.get_matcher(["*.py"], False) 45 | self.assertTrue(matcher("foo.py")) 46 | self.assertTrue(matcher("FOO.py")) 47 | -------------------------------------------------------------------------------- /tests/test_wrapfs.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | import unittest 4 | from six import text_type 5 | 6 | from fs import wrapfs 7 | from fs.opener import open_fs 8 | 9 | 10 | class WrappedFS(wrapfs.WrapFS): 11 | wrap_name = "test" 12 | 13 | 14 | class TestWrapFS(unittest.TestCase): 15 | def setUp(self): 16 | self.wrapped_fs = open_fs("mem://") 17 | self.fs = WrappedFS(self.wrapped_fs) 18 | 19 | def test_encode(self): 20 | self.assertEqual((self.wrapped_fs, "foo"), self.fs.delegate_path("foo")) 21 | self.assertEqual((self.wrapped_fs, "bar"), self.fs.delegate_path("bar")) 22 | self.assertIs(self.wrapped_fs, self.fs.delegate_fs()) 23 | 24 | def test_repr(self): 25 | self.assertEqual(repr(self.fs), "WrappedFS(MemoryFS())") 26 | 27 | def test_str(self): 28 | self.assertEqual(text_type(self.fs), "(test)") 29 | self.assertEqual(text_type(wrapfs.WrapFS(open_fs("mem://"))), "") 30 | --------------------------------------------------------------------------------