├── docs ├── _static │ ├── .empty │ └── custom.css ├── _templates │ └── .empty ├── releases.rst ├── proposed │ ├── index.rst │ ├── conflict-api.rst │ └── recovery.rst ├── Makefile ├── invite-diagram.seqdiag ├── invite-diagram-readonly.seqdiag ├── limitations.rst ├── backdoors.rst ├── release-process.rst ├── index.rst └── CODE_OF_CONDUCT.rst ├── integration ├── __init__.py ├── test_magic_folder.py ├── README ├── test_status.py ├── test_list.py ├── test_invite.py ├── test_general_cli.py ├── test_same_files.py ├── test_kitties.py ├── test_add.py └── test_tahoe_objects.py ├── newsfragments ├── 750.minor ├── 764.minor ├── 769.minor ├── 770.minor ├── 773.minor ├── .keep-directory ├── 764.feature └── 760.installation ├── src ├── magic_folder │ ├── join.py │ ├── util │ │ ├── __init__.py │ │ ├── encoding.py │ │ ├── attrs_zope.py │ │ ├── observer.py │ │ ├── wrap.py │ │ └── file.py │ ├── test │ │ ├── cli │ │ │ ├── __init__.py │ │ │ └── test_api_cli.py │ │ ├── plugins │ │ │ └── magic_folder_tests_dropin.py │ │ ├── agentutil.py │ │ ├── test_magicpath.py │ │ ├── test_endpoints.py │ │ ├── test_util_attrs_zope.py │ │ ├── test_strategies.py │ │ ├── test_util_database.py │ │ ├── __init__.py │ │ ├── test_util_file.py │ │ ├── test_pid.py │ │ ├── test_common.py │ │ ├── eliotutil.py │ │ └── matchers.py │ ├── testing │ │ └── __init__.py │ ├── __init__.py │ ├── __main__.py │ ├── magicpath.py │ ├── _coverage.py │ ├── show_config.py │ ├── initialize.py │ ├── _endpoint_parser.py │ ├── list.py │ ├── migrate.py │ ├── endpoints.py │ ├── pid.py │ └── common.py └── twisted │ └── plugins │ └── magic_folder_dropin.py ├── .github ├── pull_request_template.md └── workflows │ ├── codechecks.yml │ ├── macos.yaml │ ├── windows.yml │ └── linux.yml ├── requirements ├── read-the-docs.txt ├── tahoe-integration-1.17.txt ├── tahoe-integration-1.18.txt ├── tahoe-integration-1.19.txt ├── tahoe-integration-master.txt ├── tox.in ├── build.in ├── README ├── test.in ├── tox.txt ├── base.in └── platform.txt ├── .coveragerc ├── MANIFEST.in ├── misc ├── build_helpers │ ├── sqlite_version.py │ ├── update-version.py │ ├── platform-pins.py │ └── run-deprecations.py └── coding_tools │ └── check-debugging.py ├── pytest.ini ├── signatures ├── README.rst ├── magic-folder-22.10.0.tar.gz.asc ├── magic-folder-22.10.1.tar.gz.asc ├── magic-folder-22.2.0.tar.gz.asc ├── magic-folder-22.2.1.tar.gz.asc ├── magic-folder-22.5.0.tar.gz.asc ├── magic-folder-22.5.1.tar.gz.asc ├── magic-folder-22.8.0.tar.gz.asc ├── magic-folder-22.9.0.tar.gz.asc ├── magic-folder-23.1.1.tar.gz.asc ├── magic-folder-23.3.0.tar.gz.asc ├── magic-folder-23.3.1.tar.gz.asc ├── magic-folder-23.5.0.tar.gz.asc ├── magic-folder-23.6.0.tar.gz.asc ├── magic_folder-22.1.0.tar.gz.asc ├── magic_folder-22.1.0-py3-none-any.whl.asc ├── magic_folder-22.10.0-py3-none-any.whl.asc ├── magic_folder-22.10.1-py3-none-any.whl.asc ├── magic_folder-22.2.0-py3-none-any.whl.asc ├── magic_folder-22.2.1-py3-none-any.whl.asc ├── magic_folder-22.5.0-py3-none-any.whl.asc ├── magic_folder-22.5.1-py3-none-any.whl.asc ├── magic_folder-22.8.0-py3-none-any.whl.asc ├── magic_folder-22.9.0-py3-none-any.whl.asc ├── magic_folder-23.1.1-py3-none-any.whl.asc ├── magic_folder-23.3.0-py3-none-any.whl.asc ├── magic_folder-23.3.1-py3-none-any.whl.asc ├── magic_folder-23.5.0-py3-none-any.whl.asc └── magic_folder-23.6.0-py3-none-any.whl.asc ├── .flake8 ├── .gitignore ├── .readthedocs.yaml ├── pyproject.toml ├── Makefile ├── setup.py ├── CREDITS └── README.rst /docs/_static/.empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_templates/.empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/750.minor: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/764.minor: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/769.minor: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/770.minor: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/773.minor: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /src/magic_folder/join.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /newsfragments/.keep-directory: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/magic_folder/util/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/magic_folder/test/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/magic_folder/testing/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements/read-the-docs.txt: -------------------------------------------------------------------------------- 1 | sphinx-rtd-theme 2 | -------------------------------------------------------------------------------- /newsfragments/764.feature: -------------------------------------------------------------------------------- 1 | Update CI, due to various deprecations and time passing 2 | -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | #known-issues-and-limitations li { 2 | margin-bottom: 1em; 3 | } 4 | -------------------------------------------------------------------------------- /newsfragments/760.installation: -------------------------------------------------------------------------------- 1 | Replaced deprecated `appdirs` dependency with `platformdirs` 2 | -------------------------------------------------------------------------------- /requirements/tahoe-integration-1.17.txt: -------------------------------------------------------------------------------- 1 | tahoe-lafs==1.17.1 2 | attrs<24.1.0 3 | cryptography<43.0.0 4 | -------------------------------------------------------------------------------- /requirements/tahoe-integration-1.18.txt: -------------------------------------------------------------------------------- 1 | tahoe-lafs==1.18.0 2 | attrs<24.1.0 3 | cryptography<43.0.0 4 | -------------------------------------------------------------------------------- /requirements/tahoe-integration-1.19.txt: -------------------------------------------------------------------------------- 1 | tahoe-lafs==1.19.0 2 | attrs<24.1.0 3 | cryptography<43.0.0 4 | -------------------------------------------------------------------------------- /requirements/tahoe-integration-master.txt: -------------------------------------------------------------------------------- 1 | git+https://github.com/tahoe-lafs/tahoe-lafs@master#egg=tahoe-lafs 2 | -------------------------------------------------------------------------------- /src/magic_folder/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) Least Authority TFA GmbH 2 | 3 | 4 | 5 | __all__ = [ 6 | "__version__", 7 | ] 8 | 9 | from ._version import version as __version__ 10 | -------------------------------------------------------------------------------- /src/magic_folder/__main__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Least Authority TFA GmbH. 2 | # See COPYING.* for details. 3 | 4 | if __name__ == '__main__': 5 | from magic_folder.cli import _entry 6 | _entry() 7 | -------------------------------------------------------------------------------- /requirements/tox.in: -------------------------------------------------------------------------------- 1 | -r test.in 2 | 3 | wheel 4 | subunitreporter 5 | # because the integration tests import tahoe stuff 6 | # (this doesn't successfully "override" base.in .. that would be ideal 7 | attrs<24.1.0 8 | cryptography<43.0.0 9 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | # -*- mode: conf -*- 2 | 3 | [run] 4 | # only record trace data for this package 5 | source = 6 | magic_folder 7 | # and don't trace the test files themselves 8 | omit = 9 | */magic_folder/test/* 10 | parallel = True 11 | branch = True 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include COPYING.GPL COPYING.TGPPL.rst CREDITS Makefile NEWS.rst 3 | include tox.ini 4 | include .coveragerc 5 | recursive-include requirements *.in *.txt 6 | graft docs 7 | graft misc 8 | graft integration 9 | 10 | global-exclude *~ *.pyc 11 | -------------------------------------------------------------------------------- /docs/releases.rst: -------------------------------------------------------------------------------- 1 | Releases of Magic Folder 2 | ======================== 3 | 4 | Versions follow the year and month of their release, with a minor number increasing for each release that month. 5 | 6 | See also the :ref:`release-process` for further explanation. 7 | 8 | 9 | .. include:: ../NEWS.rst 10 | -------------------------------------------------------------------------------- /misc/build_helpers/sqlite_version.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) Least Authority TFA GmbH 2 | from __future__ import absolute_import, division, print_function 3 | 4 | """ 5 | dump the underlying sqlite3 version 6 | """ 7 | 8 | import sqlite3 9 | 10 | print("sqlite3 version: {}".format(sqlite3.sqlite_version)) 11 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = integration 3 | filterwarnings = 4 | # This is a poor interaction of eliot's inline_callbacks and twisted 5 | # Once we are on python 3, we can stop using returnValue at all. 6 | ignore:.*returnValue should only be invoked by functions decorated with inlineCallbacks.* 7 | -------------------------------------------------------------------------------- /signatures/README.rst: -------------------------------------------------------------------------------- 1 | Release Signatures 2 | ================== 3 | 4 | PyPI recently decided to delete all signatures and ignore uploaded signatures for wheels and source releases. 5 | 6 | There is no alternative offered. 7 | 8 | Until there's some other way to offer signatures, this directory will have to serve. 9 | -------------------------------------------------------------------------------- /requirements/build.in: -------------------------------------------------------------------------------- 1 | # these requirements are for developers producing release builds 2 | # see also the Makefile and DEVELOPERS for more information 3 | -r base.in 4 | setuptools_scm>=6.4.0 5 | dulwich>=0.20.30 6 | twine>=3.7.1 7 | towncrier>=21.3.0 8 | readme_renderer>=32.0 9 | wheel>=0.37.1 10 | gpg>=1.10.0 11 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = 3 | # existing errors 4 | E30,E501, 5 | E124,E26,E401,E402,E731, 6 | # flake8-future-import '__future__ import "..." present' 7 | FI5, 8 | # __future__ "annotations" missing 9 | FI18, 10 | extend-exclude = src/magic_folder/_version.py 11 | # flake8-future-import options 12 | min-version = 3.8 13 | require-code = true 14 | -------------------------------------------------------------------------------- /src/twisted/plugins/magic_folder_dropin.py: -------------------------------------------------------------------------------- 1 | from __future__ import ( 2 | absolute_import, 3 | division, 4 | print_function, 5 | ) 6 | 7 | from twisted.application.service import ( 8 | ServiceMaker, 9 | ) 10 | 11 | magic_folder = ServiceMaker( 12 | "Magic-Folder for Tahoe-LAFS", 13 | "magic_folder.cli", 14 | "Tahoe-LAFS-based file synchronization", 15 | "magic_folder", 16 | ) 17 | -------------------------------------------------------------------------------- /src/magic_folder/test/plugins/magic_folder_tests_dropin.py: -------------------------------------------------------------------------------- 1 | # This source file is not in a package directory. It does not live in the 2 | # magic_folder package hierarchy at all. It lives in twisted.plugins. Thus, 3 | # we must use absolute imports for anything we want from magic_folder. 4 | 5 | from magic_folder.test.common import ( 6 | AdoptedServerPort, 7 | ) 8 | 9 | adoptedEndpointParser = AdoptedServerPort() 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv 2 | 3 | *.pyc 4 | *~ 5 | 6 | # generated files 7 | src/magic_folder.egg-info/ 8 | 9 | src/twisted/plugins/dropin.cache 10 | src/magic_folder/test/plugins/dropin.cache 11 | 12 | _trial_temp/ 13 | .hypothesis/ 14 | .tox/ 15 | 16 | .coverage* 17 | 18 | # eliot files 19 | magic-folder-cli.*.eliot 20 | eliot.log 21 | integration.eliot.json 22 | docs/_build/ 23 | PRIVATE-release-token 24 | pypirc-magicfolder 25 | -------------------------------------------------------------------------------- /requirements/README: -------------------------------------------------------------------------------- 1 | The files in here are cobbled together to make the _actual_ 2 | requirements files, including pins. 3 | 4 | To re-pin all requirements, run: 5 | 6 | tox -e pin-requirements 7 | 8 | To add a new (direct) requirement that simply didn't exist before, run: 9 | 10 | tox -e pin-requirements -- --no-upgrade 11 | 12 | The place to actually add the new requirement is in one of the *.in 13 | files. That is: "base.in" for a general dependency or "test.in" for a 14 | development-only dependency. 15 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | sphinx: 16 | configuration: docs/conf.py 17 | 18 | python: 19 | install: 20 | - requirements: requirements/read-the-docs.txt 21 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.10.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmM4siMRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaafq+wgAm3rFl/jnCVX33T3/ag+femYrpXZzCQHK 5 | ZMfS8+q0v6O71Sj6qtu6csuKqh3jgujUwrgfKadwmmlTHKZHH2Y/n+4epKHwE4rH 6 | CPEXEJ87cDwT/0tvIaI9OnZLQcjeXf7Fwlr2JICp5gr+MtN4T6S40xtVJWkEAxZG 7 | 32piXYXEwMjlGdANFFRvfs7XwF6wVw2uHlm3GQpfTxKSK82MjPPX3nsWWZ2ARu9c 8 | RcejUTR+y6/9CQHIRuS+4Nf5BgbbkjHFZiNMXP15QsO299K8BdAN8s/dPYQZeTxW 9 | MqS7JDVFYsL2CKbV/hbk6A088ErK4irirLfprIciOgBGiYWxLKf/NQ== 10 | =DWrb 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.10.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmM7QPcRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaac3pAgAjr40DVyTJRReRRGaYZynNfuy9I0Tp4i6 5 | zB/BZTj4QYk11VXjFmEe1Pv+wXLMknUzYPaJ1QbvoQFzoc3rxu8Qcl1QgqOY44zY 6 | JO2LtNDpxB9r7PdFv73SFi72ize3hiHGc3JPFnWqez7uA7yhITmjl4Pdahr1y5hq 7 | KDkl8xX1DtuptDLFwptBQGO3pKZSUZhsRriLSG5hd9sbhfjngzmTjd8oHuSyc4Xi 8 | cHLLameZEjbz2SFDdZVgIdkKE2IkFIW/ZjvWPE4v/8ICijUMCIFPfb7Ki77OHKuB 9 | OYjN4voZryNfNv7ctzdYk0UYf3qDMCdvjtvupdiOpZyciQu1sTG69Q== 10 | =u7ZO 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.2.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmIKfzoRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacZxQf/bbI4kfDXG8tV50buxHfv6n3pWbAqP9Od 5 | LrqiswN/K7AjLd8oZXMRN+9ExljPu0fnygXhqVB+oM4SXQFHye/fxOTjIP3ZY7hA 6 | yYyT8pghLkxwY2pqNupaLydtipfhdiZop1dd7/Ey+nRA3pSrpYNR83N9xNsHGds0 7 | FCmnHLP9yXOo1VuQgHipRPTK/N1sTDDJVUiQpzcBfBW7jLk2e0lTuv0OtrfbJZxA 8 | ocI7TGpYBFh2suoB1TefK5hvbBP+xM8GJ22V4F/ZtD43PeCgCj2n3Y+7yQCsCRR0 9 | g7ly/cXArkRD9qDxdTL/lroiRIg2GH8lm1KYbyxyr6DKUhqvizoTfA== 10 | =0dAV 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.2.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmIWd8YRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaezUwf/WGkL2qkiHRUG4BHdRg+F9YKbD8NYVZsi 5 | M1ztNLUtReIBZ5oOSVsP7zK7xV7/pG9O31oaLkl9RKUSPtrzQQqPDPY2jUvFpGGd 6 | OSF7pZIRkd4DAEdtdUbcyhF++ZdMioDHbC3ugk7xVCT87EpWquQrq6xZza7DR3N6 7 | Aa1kXwSQPC9GjBso555u4zNRmngWcYzqb0xkxvjEGKhoZihe1vc0AejTyu/4YvsZ 8 | QPMY0CI/r4spPnv1eEl0ZOtwrE7vjNIk68mylfwDbov7VE3EmnsUe5B982KIkKFi 9 | Lr/arVoOA3CZeYcpOmoBjTFO6Z1pOrMYyNI9HWDeZZ6OmArnbJM5Wg== 10 | =wE0+ 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.5.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmJ+y9wRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaehjgf9H9pWVwR2/O1lOf4k+9Z1FReIMZcLklYS 5 | mSl7kHgtKMku/KZ3DYhAVTSGeuXlxIpHhpq7vy9yqJQaYfAQ1rxHTkMzdzpg9rts 6 | bRpv6A/eYvrAY/u/q3Y3bMZ+AqmBTEhZs9i5P1V4UoYFlk6xo6dpZwFsI8LSB62A 7 | by3kIrhe90MoM3PJc4noO6WhedsM55RJByzq5UF5ege/JQdIsrLzukqWTfve2Rcd 8 | d4U9v7DxG4anlX50zqqd5TzIE47axkXnVnBlpSGi/v9pUCGgWKSMGRg8rPcKw+7r 9 | iYIfycoBtZ/td/4OoGsYzyrEdaBBmP5C5d2TSHUtgAJyZU5u27CWoQ== 10 | =bUki 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.5.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmJ+yY0RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaafLgAgAse6PSDENEyEyn7kpAh8AAif8kAEBW41L 5 | +3kBG8Uap0l122wbBXPlgITqZgNs8Xsz6Kg+e3u2mxwYF4VwJK6gC1cidLxUkRmc 6 | d3kI7NBPWW9Sr989DpyaVKmoUOWnVLnz+Cg3BgB/UHVzlZNuYa3t5Pf65lK6LIGz 7 | Kuql47s5VdrosfUjJEXsdoUiYwEmEa4m/9vjysxiGPTByXLQPlkgd01BPyaCP0e9 8 | 7/NZSTASl16lxVxwry7I2P0cSUXnQDkmz9ALJya5AmhjLVLPp+JYc7B7VLaaxyx/ 9 | 6DUHCXGq+WXjhQafvkVadp+8Nb3y0YWcY+gSJC909KX9XdRggxMgiA== 10 | =fcZR 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.8.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmLreY0RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaf0FQf/W6eo/JFy+GiaZl4pX0RFI4B8OJpqhYij 5 | SrAdf5E93ikjEcQ6/I3Q/UydgeyjMutbV8x5rkMm55ioUqRgsaaOLGJ1+24FLVp0 6 | /y6yGuOYjoaQTrPF+BPQhYfwZ55iRYSDxLOOczDGwLrjbRFjkBMolvCjQZPEUbS+ 7 | DGxKkNZCZaq98oQtCOrlTsZ2dscRVin2xKWtPMqIX3ROJI2BwtrYnGGU8U8agepJ 8 | 24vJsXbMKobkM+tNKWNedPIpAKcOkZFpWfvOYhOHza80rPVkps7Uw4r3rDiGvL35 9 | +lUfjm224n6vGoNwPzqMQFRHLVewtbSSipxO8RQaEZDctG4G6DC3AA== 10 | =8TnF 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-22.9.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmMiC4QRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaeSSAf+ObhlxgydYEcXRxoeVKmm7GfTEo0amTAf 5 | HEQTUdPxLvyVl9xtxr/3AhSlZpxLxDBd/81GP1jS2EF3QQwYyl3Agx8s9mvgIFDi 6 | /fK/RVUC9Ro0e3Dv7aJpD6FC1NrbEJn+qv9yYDFwf/ZE4Sz91f5J6FS2/DumgPG4 7 | iZz+AZ3HTW37ikdNrqUEv4JhyheEGFCBMnC/caXbBcxC9YM61wUDBOn0vexHtGL7 8 | OjyhAZJ0xI8B+1w70iZMdQ5MI0/21XQyf2e35CBQK1GJM7U6mIYu2Wly8jttyxcx 9 | Lu0yd6u+H5ypmJSFHSz5/p0WretsL+7ZHzHAdbJIi2llSDxhaemAUA== 10 | =JjIf 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-23.1.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmPYWNkRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaadSiQf9EXjUoKSvLnov1HmSR82iEI1RBCzr+bPQ 5 | 2Sv3LzrywdY26bP/pgceh66EFBMthwNItrtbLYERlP9/6oIVSholQgOxw+S7+w7g 6 | 0xlKTrRtSLU6UjhTlJYIyILfoCk1+ZUZF5kAuTicv3wu9RM/UBPtuT4WeET9m2LZ 7 | /IB1iZs8XjN2ULUF0R1AgTsEb7Pew1xSYQnaX98tvziYtfL194OttzlijXBauNVZ 8 | lM0Owc/PqCPpvP1PrWrGBBB/caQOLW1AF1db85me9H6ucZnU7LB7JjCL/RAg/pPK 9 | 4tznGxQwBcBFHCeqts9NgRNyB0sgtNYRkCVs3i49ZsD13gR3a1perA== 10 | =e1PF 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-23.3.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmQYoakRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaesKQgA00hnhI4QsqHAHk0KcYl77Va/taPhzCd2 5 | qi/gkgMePbDrXup6KpCQB4SMu+/yleg2soUYJLupHcFyz7gRtfDbIT1+8ZA9WMMq 6 | zlWk2FBmNz9WScAqyDPxUuyn1avlCoEl8RmBmBEGGLb3Ny/zmbMkvO3CN4HWRoji 7 | hVkRt6rdoaM7AiXmNzIhn7NEuf7ODVeRIdMZbYNRj4+O/ZZHvamouPevAG9a7UWt 8 | gE8n1TgNRzAt17CjGXbbrncLAg5/4KSEjG+ZTvxMeUYsdQC6Mxn9+jrBmH6AnACI 9 | 4Aj19M6JV8n8X0eGIJV5XwaufJSywpVHZVzOP/xpRuiGQiWFsCMpVA== 10 | =wqeK 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-23.3.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmQYokURHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaafWpAf9FOBAthSuPndqcDt4BJ/IFsFDNFEFTZGf 5 | M+2s2OgnJB4MlM2cmCg0zbPwDMgrkDgDDiju6hGwtP6doPQ89ucyvt0HpoIqa830 6 | mmFyk53gx9HOdHKCnB5dyZFOheAdw8aPqHGu1iw9yWf1N7K8HIH9U7zGzoYyWpTV 7 | An/K0Y7Vs1HN+PjkBFP9bQEzlzbu0Lx05tTUZFsg6ZYtBXOjq0Fk7J8IjrPctJ1E 8 | QaLVBicYhoYkZ0F9AE2979gY4DHbLPg6w5sW0wqbADx8aPchOg+9DbiNw+H84aco 9 | ljrasOzpxLYYC86yM56MUIHhbGG2UZiSs7z2tf3YAi85H7OA1sHa9g== 10 | =nkkv 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-23.5.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmRRWjwRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaafKhgf/dUb2bFvWsdVbCxG6keIXPYKxsQsXzLMK 5 | Sk/dI3RLPXWu/nwFH+3JA1HH6jPoXwI7qfpvtS2Nsl3NcIWKQbzKHFU5r0k9WljT 6 | y4ALFUrRX7MCIAW/61OeBXG2XUnL4MlXE+PFfi8lu6OyL1+Z2ruds+ES00SygeXd 7 | MCbpYM1VzbScH8D1s92DsLE9RtW92XQqyRAoCebCxLXmt62CNwCLEHHX/9WmDd0N 8 | D1BAQm3QTDalYIlyIGTPTXIUV42maNmZtQP+DxlSK9r4TTyBgobXxCAe+YGcyIRh 9 | a+uKpqd3RK3H1wd/Zka02xC1tiZJDNhywcUX6UUW9IFrCaiehIiy9A== 10 | =VP5b 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic-folder-23.6.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmSZMR8RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaae4uQgA4xgfS3zRTCpTeSvgugxgxJVS9x0NMbnU 5 | IoEnNw/x9QYKppEeRVIuhtZx4N0cNasQFbF5BGzdvhOoBBNfCToo+Fm5RNRUnaPn 6 | B3n4990Vixl/QQmEo1T1+w3Oo0HtVrvwaBDTBs5tFs2Pv8WHVdnA+w2+/xjttP5n 7 | Zvdbr/UBRrIGSX4YwQGyA+9uciDEYnBPHNARV4SVncyDSYAaInlOEgqugFjQf86R 8 | 7P/BjjJ0WaFNvLOWtq+rTbjZYiBz/krguTmXIBXrXM70I4J2E+mQz0UfUqztibuv 9 | oWaPdnS9fdZdK7CKj6it3zZIIDw3yO+e5cn1N5llJjZP7gvsT19C+A== 10 | =HwRf 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.1.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmH0PJ8RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaafv8Qf8CRg71WitCxM6YzJgV4O2TKRj7EwPFqrt 5 | O6oL23zpSQD1PR20nozPB1tjQQ175AbY1wT5RvOca5tiUovDjENVI15EjVdr0eMK 6 | 4S+/2k2muKy/ylbHF9KpCmG4+3OhFQfVPmboUnrUV0psZIFvxm7Kqh2slXiVq5oh 7 | KAcBj4bXv0ohrGs+56m9WplrV2YXZD/mfBfIJBew5Q+DwsJyBr1xeGmehH/kV2Ri 8 | X7ALrAVKGOquKotObE+l8ClAvmYc/u5MxMOFyxsOzG8WpWfycTs8sg//S2546pIM 9 | UaFAEegWknBGyHjC6HohRl0BGLQu75Ip/8YT82Dqss0mmCAYsdM/rg== 10 | =2+uC 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.1.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmH0PJ0RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacLFQf9GUz2VFE0MSuugSFnd+9vXC4/6KBCytUY 5 | nP/qVXpsmHYcodin/LUm4CXh45PnuZ02LMq+w0pkkhXcQkXZahAC6vyxNO5oXfbZ 6 | DnkrrF+UpeqC3YoXxEPU21JhsZW1WP51n5utKVcnMBJzKrp/T94Xydfdvz3gDDvP 7 | ZAFqn4APnAtufLtpFQ9rEdkR54umN8zaO2Sd7Lq5h6EbQ9hoiMcBXjQI6lk8i5Zi 8 | sPtv3UIWMixNUnK55k+SoYmv8y5sksQmbrjKJb2HlNcSpNNY/T5+Yk95ZU52SnhA 9 | TTHdQKJkCoFdcJesHjdBum14QoZlUsP6oZt5FFDvf3F1sLl+f9j+kA== 10 | =cIiV 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.10.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmM4siARHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaertwf/VBk1OB5RUmYG2C+y5TFOUnCZb9cTLKqG 5 | MkkynbX2ge5EnMkBX3GmS/KH0j1sq40ZFNymsHHEmufDqxBhRBdNBqC2dZwQDCGu 6 | wC1GzpshEWUd/w2STWSopWrMwcRNdpKaXZ09yILR9OFedSQG4spno7v2gf+xpf8k 7 | D5to2cJ+t8JQs4ENGruqvsP/wvfX4cV6qH8f45o0868PtEWPHVMurJ08t8oSLuOH 8 | JJpw2oxKNye0Bw0ZUDrS+VF7jw+6eV0MTeufFvM5ygWqPX4zDnP+Wx8PsDNbH8Zz 9 | dVeGstLcfv/4cA4AM14vAOVcnk7Uiy9/GCC+s2cESE1DYLcv6PsfHw== 10 | =yxYj 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.10.1-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmM7QPURHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacWPAf+JFJfI7f2hE6sOCqcTWC9QEdWHXRxvFTw 5 | cVZszgkAGH7wUXRm6qrXD0rJ3RZyp2q7ENa3erDQn2mLYyAvc2S0BqtQ9Otj4XVT 6 | XM/6sWr9w+bdu3bBndh6NK2RLZt51xefddIa1cINoxy3g257EyMxT7QGAbP93Ph5 7 | BWBH/GM+Tz441JT5oAny+CdC5tFT4f2NWdu/hmKs9RQVpA9BaxUgT3tQ0gChvmyh 8 | wDiykbOoiwWY3HpC7KgeI4oT8VALCox5vcj4+VePY56aMwycBXGF2O5hyWiGuE9m 9 | Lh8GJJ7PZ6AGWa3NopeyzZptc6BYrNYXqHt0A+FUBQSWAHxOr4dQrQ== 10 | =oK+d 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.2.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmIKfzgRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaddmQgAus1V0Hi5lM5TDGd0vbB00uPxnSS7Gt1B 5 | EPLYSIv2SuPZ3MniBydiss1GUXna9MdZ7MOlEw7gBPfQ0+NLrxtELlIoXIcGv/y2 6 | Y+7v8nsGoWUeZ2LeProeLpsIclI4IRgR3KyZ2y5zTy9S3yiB2dIw2zKrkRGKMmCm 7 | b1cHmkaYMUa9k85E/p5PQrAJWWCAzUF6MVGcecKfyscVJEfjs0OxFdZHQZitERdp 8 | kUm7El63+xMT6eZBt9kPnGSjCu/ffqYNN4ECFHYKb+CGjev+/WWiNp83whAXSEyz 9 | g8jAKwWhEGko73REQK115DGBw0Vdnt9PHxBoC80+ZAxlok4r3sEbGA== 10 | =v9Jt 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.2.1-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmIWd8QRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaadpQAgA3LaHca22iUrhXWY5Rcnn39m2EtuJolj/ 5 | y+SX/YkLpke+RPJ5Z65rNLRwCJOt6fo/xDBnSxmLs5Uy3KP7U4h1OC2B9gGK0cQL 6 | vjS9xkBFH4AgTZ2UBmsfhL1MK+8YYeZnmk70mC/oNufe+crMO1n6N0oZxLWbh/zd 7 | GoCPFm7VgH5H9MtMmrzFbX016Wi2g4eZHrE6o8M4tpXsRsatONlL4BRkwKjgU+xq 8 | TGKhWKC+nxODzZcqYEgcWbShrqA1TLQWa8WGzf9uUvsrfqmQMvptPBC82RVMwPUu 9 | HUTFxb5m/TP91vfI0veCB0cchy7GjDQ9dHGtBG0/CmlluZdzvDMFsg== 10 | =kgwZ 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.5.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmJ+y9gRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaadYBQf+ODB4egoXGtbomyKQ1V+viozaAkb2LkAb 5 | R0UlgEEosf3swEaBfh2rfXNxuorEE3yF7nkMjWNysPt2UU/4DDXEhJpnQjenj7MR 6 | rRVReyiHrQ3QEY6cA/y3nOQ4bSpyY7sLXy23qnYGxFlAheVot9HW0owD9PLV+ATI 7 | 1rWyZjU/ci5q/6Jd0tNfmJR5Qf92LGbeosaRqW0lcAk4kR9Mv5wzcLX+me96aXij 8 | UmaSOkxzf6/3Nj3QcZG2Tq+UV7qpLiD0Vo5YfgEeiVX/ZhCUx0Hs4hlbs92RM2tn 9 | 1lEL7kGZMmkOWHE6saqqtKRc7FfDQd2vyQdOb3XP17U5KQ0FSf/EUA== 10 | =0qY/ 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.5.1-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmJ+yYsRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacQagf/U5ZmveuXbNxF+vJVPzIdVjSS9YyZPP4r 5 | V9t6H393yyt3SchMmJsrfHr4qxzQqJbbi//+vpPn8NVCY3enGn4AH9F4Uqvz4K8u 6 | ztzjd+GWOj1yNsMrvSyk2KHEAUnmWYS9D3qoNON2pew7nzW3Aw3zt+80LSW3Nb01 7 | PkcuUIOpw40JkvA+x2+rnlL5pKTCSSlzG+0OPGDzqRDpZGXKyolULkFZCxLFiMcZ 8 | 9wXcHJbyEDtqhLDAugKwZsNYNQ2+WO4QUEDQbZRzxqqBaB2dMQ6uspcFzRykyvwk 9 | 0kH+Uf7xB4LM7QbFAfuNda60IrJ1YWJaEZzEKQDoRzfg7TqsLH3MIQ== 10 | =uqVk 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.8.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmLreYsRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacp+Af9EiTLZvDs7/GO0/90gvhcnrQhKP5riB/j 5 | ++domZrGebnWGkfeCnvMtqMD0zSmHPXJ6fo+HzR351TO3s4UbwdzL/7eN5PKe1zJ 6 | aLn4/d2231dhJpyFUlfDRjfZUgD477SubTVHLak8pRg7bdBc6nI56sQb/jTGyhHc 7 | eDR5ixp5lCLfB2iw3JfEaynhMVGlPR/ApFZ27ZB0ytFNbNSbFVLlc6USePXAiBfG 8 | kEovAgZBUj2uRNCBBbn+5dmulBzrI7cqPcc0rQjJh+aiKa4Hq1SxkvlDsIoWjwe2 9 | pNSD1y7Mi+Nfk6jIovyqBMEnXnGEYH+PHORDezsM6E/jKKopwIcnjw== 10 | =E/G3 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-22.9.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmMiC4ERHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaaeKEQf8DR/fdx5rRfWNY1N/WvWlj2OQWOjvEXKT 5 | hMhRiW9rACvhcDVfNPdXvVdFrQf6GsdibA1x0y57weQ8E8VpQA/YiZCV7/X2qERQ 6 | 7Dz73oXIBZ+7IGFwU+XH0bhpn1GH4vxl2LHeFEEPrZu+DOQ7hT3lOvLdaMzWosv9 7 | oL2eRcnw9pAnYVVVXCiuGNQvK3Lo+Fm2jJl9oqOuTja2AEw3SiiRr3d8y5xuAPB8 8 | tnvCVJ9m23rUBwmz3wAOpUtz9BdZhF2WoZA3uu5FVm947n3eANgv3Liq++gpNHGz 9 | O7SfglNelyU57BwLdCt0wkpg7ggQd87aaWCQ5vx3VlvzMQ4tpTZEmw== 10 | =h+CJ 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-23.1.1-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmPYWNcRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaadbJAf9FoM/RIToe4Wo5HJbjm9tjJXb3zqASWSc 5 | FrL0I/skOnHHhsUNI2jImhMR3jnj7JFbIvsSwm6yo2vOJCIpfxgBRfMDy6tZ9tWq 6 | MOj8Tx7rr4jjokt5/I3yeUSenkNPjB+QCgNQicoxYItirA4IjvMgbSMDJMjUsX/Q 7 | yKjRaZVt0KL1Fjr8E8yAJHr30VY3jeiwJP+r6Ac5cfSJL5IvMgt66rwdxiHktIUy 8 | aWETvH483hDmLlDe6YRzaoqW2jKEneMoPENl2UOwW+8kUMIQGSrvMwoNkaTes8eI 9 | oKcZWdWPMzp+hBYSi5gagETZ0I2HysDBSbeMIPhOPlR78RI+Yx7AWw== 10 | =piIK 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-23.3.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmQYoaYRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacrBAf/Z8q5KCSQwMEl9x8xgmYGscVUoGggL4rK 5 | xdBC6IlsdvPIr6LFf4Z3HfXmUXv3YNnC9seVFPZrt+WeQTrBjGJGrFKRYx7POTfD 6 | gjpk2OXgVUF+BuyjvDt/8XUUFc4eav+T75XmSeRZlgVivUgA9qzvSpWjFVXor/ur 7 | CVgyBJAdh+Hoz3GZVE/GIbsgQ9UdnTRovVHIuVR3BVAT4x631Y2gTi5pITVhWFxZ 8 | AMEQK7eLbDcuaBAr+wZpcesRKIZn7CR/W8kvSnOk7N0vj03zu08liFMICvvW+2Av 9 | GHyiP8Gt3Upm13vy//W0fza7HESteexOjuxDehpG9KqZR1pIQFrOFA== 10 | =LkeQ 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-23.3.1-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmQYokMRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaadgPgf8DjxW1jVKPINdjobF7Bnwn21t4hkW69Xy 5 | TiLFrzjVU5VuFumPxXjC/tWAP1kbVDBZuNWtz9i7ynuuZEisoUGHogIjCUpV4BFd 6 | R+uIrypWwOlcgueV/Wr2KxKz4nSeWQtWBPoDPgCPHG3PW8Oi7awE2aJhw/8VJWp+ 7 | KtYqBrnSPB00spsExfE9ft/ZVMudvIsimKWX/1DVSs2xMOBJNZ3bY/KN8m3tRExi 8 | RgpgiUrQ3mmSArAoSudDwpr7nJ68U0xyFn9xDJiCFTx21xhIQNTj3AO3xslr30qY 9 | UY4mp4ye/vDX5Q0btCXvwU3EmG6RfSzJ0cAqUGmd2AwHe2Y8EEC0xA== 10 | =+QgM 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-23.5.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmRRWjoRHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacUgggAmicuEHb5kyRew+nXT2cHanYHzcZok0I2 5 | e7rKoAblhqV1RsK9pMzJKA1DVwkRBjyX0u0QrRHU72UJfiS0eyiHH5MV+Rl5QuSs 6 | aVPpVPpKkePL6Vv3VJ8VzeOAZuys7WOvNliUPl5y2Rft7v9V/U+XNUo5zGC4AcnO 7 | UxiE6i5r+ezgRgfFDd/0VtbJMWElaJtNIUsGBpx+9Phh3PXGi1/vpoW71/wYsThE 8 | 13OeQQIqJexphisckSjc+5ssR8nCdedyN72jRd1oJFHQyZTrVaUQoOdZA2EE/4FA 9 | 188rZbBewSDoRXFP65lqdRVaxH9NQodemFW/FWI0L3N59k1oTJEn0A== 10 | =p/IV 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /signatures/magic_folder-23.6.0-py3-none-any.whl.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | 3 | iQFFBAABCgAvFiEEnVor1WiOy4id680/wmAoAxKAaacFAmSZMR0RHG1lZWphaEBt 4 | ZWVqYWguY2EACgkQwmAoAxKAaacy2AgAxDcub2F8SEn+Vuve7h/SKjN11nazweno 5 | T05jGhHq6pnBzAllJKJTcOsPirUCzHdr3xWe2Qnmkzoxzn6CCRhilcP2z2Nhma5A 6 | aV89zmxDBU4JcnQ1Z4lpto4iUmnF20NvY6nwMmiNFHGZVrzjJTv9B59JtrYiLk/q 7 | l7MVVodC8+bsytKaXLRNBV3f1qvfr6Uf8hhEgjehS2fUREX0gblVMQAmTxOttNL/ 8 | wAJop4fwWRO40mkmiSnCcpR53bXt/+Ps1eCvEFvsOzmja6O+vB5Ny0qoiGBmHWW8 9 | iBDfnghVxpQXAHr3LPn+COPUX5lDIPjFEVyizWJKV/MY5dcdw+lWoQ== 10 | =827q 11 | -----END PGP SIGNATURE----- 12 | -------------------------------------------------------------------------------- /src/magic_folder/magicpath.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import attr 4 | 5 | 6 | @attr.s(auto_exc=True) 7 | class InvalidMangledPath(Exception): 8 | path = attr.ib() 9 | sequence = attr.ib() 10 | 11 | def __str__(self): 12 | return "Invalid escape sequence {!r} in {!r}.".format(self.sequence, self.path) 13 | 14 | def path2magic(path): 15 | return re.sub(u'[/@]', lambda m: {u'/': u'@_', u'@': u'@@'}[m.group(0)], path) 16 | 17 | def magic2path(path): 18 | try: 19 | return re.sub(u'@.', lambda m: {u'@_': u'/', u'@@': u'@'}[m.group(0)], path) 20 | except KeyError as e: 21 | raise InvalidMangledPath(path, str(e)) 22 | -------------------------------------------------------------------------------- /integration/test_magic_folder.py: -------------------------------------------------------------------------------- 1 | from os.path import join, exists 2 | 3 | # see "conftest.py" for the fixtures (e.g. "magic_folder") 4 | 5 | def test_eliot_logs_are_written(alice, bob): 6 | # The integration test configuration arranges for this logging 7 | # configuration. Verify it actually does what we want. 8 | # 9 | # The alice and bob arguments looks unused but they actually tell pytest 10 | # to set up all the magic-folder stuff. The assertions here are about 11 | # side-effects of that setup. 12 | assert exists(join(alice.node_directory, "logs", "eliot.json")) 13 | assert exists(join(bob.node_directory, "logs", "eliot.json")) 14 | -------------------------------------------------------------------------------- /docs/proposed/index.rst: -------------------------------------------------------------------------------- 1 | Proposed Specifications 2 | ======================= 3 | 4 | This directory is where we hold design notes about upcoming/proposed 5 | features. Usually this is kept in tickets on the `bug tracker`_, but 6 | sometimes we put this directly into the source tree. 7 | 8 | .. _bug tracker: https://tahoe-lafs.org/trac 9 | 10 | Most of these files are plain text, should be read from a source tree. This 11 | index only lists the files that are in .rst format. 12 | 13 | .. toctree:: 14 | :maxdepth: 1 15 | 16 | conflict-api 17 | recovery 18 | magic-folder/filesystem-integration 19 | magic-folder/remote-to-local-sync 20 | magic-folder/user-interface-design 21 | scanning-for-changes 22 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /src/magic_folder/util/encoding.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | import unicodedata 5 | import yaml 6 | 7 | 8 | def normalize(text): 9 | """ 10 | :param unicode text: some unicode string 11 | 12 | :returns: the normalized form of the unicode in `text`. 13 | """ 14 | return unicodedata.normalize("NFC", text) 15 | 16 | 17 | def load_yaml(stream): 18 | """ 19 | Parse the first YAML document in a stream. 20 | 21 | Returns string values as :py:`unicode`. 22 | """ 23 | return yaml.load(stream, yaml.SafeLoader) 24 | 25 | 26 | def dump_yaml(data, stream=None): 27 | """ 28 | Serialize an object into a YAML stream. 29 | """ 30 | return yaml.safe_dump(data, stream=stream) 31 | -------------------------------------------------------------------------------- /requirements/test.in: -------------------------------------------------------------------------------- 1 | -r base.in 2 | # Pin a specific flake8 so we don't have different folks 3 | # disagreeing on what is or is not a lint issue. We can bump 4 | # this version from time to time, but we will do it 5 | # intentionally. 6 | flake8 == 3.9.2 7 | flake8-future-import 8 | # coverage 5.0 breaks the integration tests in some opaque way. 9 | # This probably needs to be addressed in a more permanent way 10 | # eventually... 11 | coverage 12 | tox 13 | mock 14 | pytest 15 | # pytest depends on colorama on windows include it here so 16 | # pip-compile records its hashes 17 | # XXX meejah: get rid of hack in platform.txt and/or upgrade there? 18 | colorama 19 | pytest-twisted 20 | hypothesis >= 3.6.1 21 | towncrier >= 21.3.0 22 | testtools 23 | fixtures 24 | # Used to suspend tahoe on windows 25 | psutil 26 | # For testing invite/join via wormhole 27 | magic-wormhole-mailbox-server 28 | -------------------------------------------------------------------------------- /misc/coding_tools/check-debugging.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | 3 | # ./check-debugging.py src 4 | 5 | from __future__ import ( 6 | absolute_import, 7 | division, 8 | print_function, 9 | ) 10 | 11 | import sys, re, os 12 | 13 | ok = True 14 | umids = {} 15 | 16 | for starting_point in sys.argv[1:]: 17 | for root, dirs, files in os.walk(starting_point): 18 | for fn in [f for f in files if f.endswith(".py")]: 19 | fn = os.path.join(root, fn) 20 | for lineno, line in enumerate(open(fn, "r").readlines()): 21 | lineno = lineno+1 22 | mo = re.search(r"\.setDebugging\(True\)", line) 23 | if mo: 24 | print("Do not use defer.setDebugging(True) in production") 25 | print("First used here: %s:%d" % (fn, lineno)) 26 | sys.exit(1) 27 | print("No cases of defer.setDebugging(True) were found, good!") 28 | sys.exit(0) 29 | -------------------------------------------------------------------------------- /src/magic_folder/test/agentutil.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Testing helpers related to Twisted's ``IAgent``. 6 | """ 7 | 8 | from zope.interface import ( 9 | implementer, 10 | ) 11 | from zope.interface.verify import ( 12 | verifyClass, 13 | ) 14 | 15 | import attr 16 | import attr.validators 17 | 18 | from twisted.internet.defer import ( 19 | fail, 20 | ) 21 | 22 | from twisted.python.failure import ( 23 | Failure, 24 | ) 25 | 26 | from twisted.web.iweb import ( 27 | IAgent, 28 | ) 29 | 30 | @implementer(IAgent) 31 | @attr.s 32 | class FailingAgent(object): 33 | """ 34 | An ``IAgent`` implementation which returns failures for every request 35 | attempt. 36 | 37 | :ivar Failure reason: The reason to give for every failure. 38 | """ 39 | reason = attr.ib(validator=attr.validators.instance_of(Failure)) 40 | 41 | def request(self, method, url, headers=None, bodyProducer=None): 42 | return fail(self.reason) 43 | 44 | verifyClass(IAgent, FailingAgent) 45 | -------------------------------------------------------------------------------- /.github/workflows/codechecks.yml: -------------------------------------------------------------------------------- 1 | name: Code Linting 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | codechecks: 11 | strategy: 12 | matrix: 13 | python-version: 14 | - "3.9" 15 | runs-on: ubuntu-24.04 16 | steps: 17 | - name: Check out source code 18 | uses: actions/checkout@v2 19 | with: 20 | # Get enough history for the tags we get next to be meaningful. 0 21 | # means all history. 22 | fetch-depth: "0" 23 | # Checkout head of the branch of the PR, or the exact revision 24 | # specified for non-PR builds. 25 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 26 | 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip 35 | pip install tox 36 | 37 | - name: codechecks 38 | run: tox -e codechecks 39 | -------------------------------------------------------------------------------- /requirements/tox.txt: -------------------------------------------------------------------------------- 1 | # SHA1:1f60a2a0aee9d37c18c2283cf3e90068f5e301ee 2 | # 3 | # This file is autogenerated by pip-compile-multi 4 | # To update, run: 5 | # 6 | # pip-compile-multi 7 | # 8 | -r test.txt 9 | iso8601==2.1.0 \ 10 | --hash=sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df \ 11 | --hash=sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242 12 | # via python-subunit 13 | python-subunit==1.4.4 \ 14 | --hash=sha256:1079363131aa1d3f45259237265bc2e61a77e35f20edfb6e3d1d2558a2cdea34 \ 15 | --hash=sha256:27b27909cfb20c3aa59add6ff97471afd869daa3c9035ac7ef5eed8dc394f7a5 16 | # via subunitreporter 17 | subunitreporter==23.8.0 \ 18 | --hash=sha256:af710200f4d6ead8be0420083cfb588e593d72a9734e4d65e1303940a3c7c766 \ 19 | --hash=sha256:d0cdcd8d6d5682864a5165120e91d269a5faf551beba97007371841b2e69b5b3 20 | # via -r requirements/tox.in 21 | wheel==0.45.1 \ 22 | --hash=sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729 \ 23 | --hash=sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248 24 | # via -r requirements/tox.in 25 | 26 | # The following packages are considered to be unsafe in a requirements file: 27 | -------------------------------------------------------------------------------- /src/magic_folder/_coverage.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from twisted.application.service import ( 4 | Service, 5 | ) 6 | 7 | class _CoverageService(Service): 8 | def startService(self): 9 | import coverage 10 | 11 | Service.startService(self) 12 | 13 | # this doesn't change the shell's notion of the environment, but it make 14 | # the test in process_startup() succeed, which is the goal here. 15 | os.environ["COVERAGE_PROCESS_START"] = ".coveragerc" 16 | 17 | # maybe-start the global coverage, unless it already got started 18 | self.cov = coverage.process_startup() 19 | if self.cov is None: 20 | self.cov = coverage.process_startup.coverage 21 | 22 | def stopService(self): 23 | """ 24 | Make sure that coverage has stopped; internally, it depends on ataxit 25 | handlers running which doesn't always happen (Twisted's shutdown hook 26 | also won't run if os._exit() is called, but it runs more-often than 27 | atexit handlers). 28 | """ 29 | self.cov.stop() 30 | self.cov.save() 31 | 32 | 33 | def coverage_service(): 34 | """ 35 | Return a service which will arrange for coverage to be collected (or fail 36 | if the ``coverage`` package is not installed). 37 | """ 38 | return _CoverageService() 39 | -------------------------------------------------------------------------------- /docs/invite-diagram.seqdiag: -------------------------------------------------------------------------------- 1 | // to build me: 2 | // pip install seqdiag 3 | // seqdiag --no-transparency invite-diagram.seqdiag 4 | 5 | // Illustration of the flow between Laptop and Desktop devices via the 6 | // magic-wormhole mailbox when doing an invitation. 7 | 8 | diagram { 9 | laptop -> wormhole-mailbox [label="allocate"]; 10 | laptop <- wormhole-mailbox [label="1-code-words"]; 11 | 12 | laptop --> desktop [label="Out-of-Band communication of \"1-code-words\"", return="foo"]; 13 | 14 | desktop -> wormhole-mailbox [label="claim"]; 15 | desktop <- wormhole-mailbox [label="mailbox open"]; 16 | 17 | laptop -> wormhole-mailbox [label="PUT: invite{collective: readcap }"]; 18 | desktop <- wormhole-mailbox [label="GET: invite{collective: readcap }"]; 19 | 20 | ... Desktop creates Personal directory\n(one write, one read cap) ... 21 | 22 | desktop -> wormhole-mailbox [label="PUT: accept{personal: readcap }"]; 23 | laptop <- wormhole-mailbox [label="GET: accept{personal: readcap }"]; 24 | 25 | ... Laptop adds Desktop's read-only cap to Collective ... 26 | 27 | laptop -> wormhole-mailbox [label="PUT: success"]; 28 | desktop <- wormhole-mailbox [label="GET: success"]; 29 | 30 | ... Desktop has joined the Collective ... 31 | 32 | desktop -> wormhole-mailbox [label="close"]; 33 | laptop <- wormhole-mailbox [label="close"]; 34 | 35 | } -------------------------------------------------------------------------------- /docs/invite-diagram-readonly.seqdiag: -------------------------------------------------------------------------------- 1 | // to build me: 2 | // pip install seqdiag 3 | // seqdiag --no-transparency invite-diagram.seqdiag 4 | 5 | // Illustration of the flow between Laptop and Phone devices via the 6 | // magic-wormhole mailbox when doing an invitation where the "Phone" 7 | // device will be read-only (that is, not ever be able to write new 8 | // snapshots to the magic-folder) 9 | 10 | diagram { 11 | laptop -> wormhole-mailbox [label="allocate"]; 12 | laptop <- wormhole-mailbox [label="1-code-words"]; 13 | 14 | laptop --> phone [label="Out-of-Band communication of \"1-code-words\"", return="foo"]; 15 | 16 | phone -> wormhole-mailbox [label="claim"]; 17 | phone <- wormhole-mailbox [label="mailbox open"]; 18 | 19 | laptop -> wormhole-mailbox [label="PUT: invite{collective: readcap }"]; 20 | phone <- wormhole-mailbox [label="GET: invite{collective: readcap }"]; 21 | 22 | phone -> wormhole-mailbox [label="PUT: accept{ }"]; 23 | laptop <- wormhole-mailbox [label="GET: accept{ }"]; 24 | 25 | ... Laptop adds Phone Collective with an empty immutable dir... 26 | 27 | laptop -> wormhole-mailbox [label="PUT: success"]; 28 | phone <- wormhole-mailbox [label="GET: success"]; 29 | 30 | ... Phone has joined the Collective ... 31 | 32 | phone -> wormhole-mailbox [label="close"]; 33 | laptop <- wormhole-mailbox [label="close"]; 34 | } -------------------------------------------------------------------------------- /src/magic_folder/util/attrs_zope.py: -------------------------------------------------------------------------------- 1 | from attr import attrs, attrib 2 | 3 | 4 | @attrs(repr=False, slots=True, hash=True) 5 | class _ProvidesValidator: 6 | interface = attrib() 7 | 8 | def __call__(self, inst, attr, value): 9 | """ 10 | We use a callable class to be able to change the ``__repr__``. 11 | """ 12 | if not self.interface.providedBy(value): 13 | msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format( 14 | name=attr.name, interface=self.interface, value=value 15 | ) 16 | raise TypeError( 17 | msg, 18 | attr, 19 | self.interface, 20 | value, 21 | ) 22 | 23 | def __repr__(self): 24 | return f"" 25 | 26 | 27 | def provides(interface): 28 | """ 29 | A validator that raises a `TypeError` if the initializer is called 30 | with an object that does not provide the requested *interface* (checks are 31 | performed using ``interface.providedBy(value)``. 32 | 33 | :param interface: The interface to check for. 34 | :type interface: ``zope.interface.Interface`` 35 | 36 | :raises TypeError: With a human readable error message, the attribute 37 | (of type `attrs.Attribute`), the expected interface, and the 38 | value it got. 39 | """ 40 | return _ProvidesValidator(interface) 41 | -------------------------------------------------------------------------------- /integration/README: -------------------------------------------------------------------------------- 1 | Integration Tests 2 | ================= 3 | 4 | This directory contains "integration"-style tests for magic-folders. 5 | They are written using the "py.test" framework and use @pytest.fixture functions to set up pre-requisites. 6 | That is, the Tahoe processes necessary to run a grid and some magic-folders instances. 7 | 8 | 9 | Eliot, Deferreds and co-routines 10 | -------------------------------- 11 | 12 | pytest has a special "pytest_twisted.inlineCallbacks" decorator. 13 | eliot has a special "inline_callbacks" decorator. 14 | In older versions, it was possible to decorate a test with both because the pytest version just "marked" the test. 15 | In newer versions of pytest / pytest_twisted, the above is not possible. 16 | 17 | Luckily, it _is_ possible to use `@pytest_twisted.ensureDeferred` alongside the Eliot decorator. 18 | 19 | So, this means **all tests must be coroutines specified with async-def**. 20 | That is:: 21 | 22 | @eliot.twisted.inline_callbacks 23 | @pytest_twisted.ensureDeferred 24 | async def test_foo(fixture, another_fixture): 25 | with eliot.start_action(...): 26 | pass 27 | 28 | 29 | Install and Run 30 | --------------- 31 | 32 | Install: 33 | 34 | pip install -e .[test] 35 | 36 | run: 37 | 38 | py.test -s -v integration/ 39 | 40 | The fixtures also set up a "flogtool gather" process and dump all the 41 | logs from all the running processes (introducer, 5 storage nodes, 42 | alice, bob) to a tempfile. 43 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_magicpath.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Tests for ``magic_folder.magicpath``. 6 | """ 7 | 8 | from hypothesis import ( 9 | given, 10 | ) 11 | 12 | from testtools import ExpectedException 13 | from testtools.matchers import ( 14 | Equals, 15 | StartsWith, 16 | ) 17 | 18 | from .common import ( 19 | SyncTestCase, 20 | ) 21 | 22 | from .strategies import ( 23 | relative_paths, 24 | ) 25 | 26 | from ..magicpath import ( 27 | InvalidMangledPath, 28 | path2magic, 29 | magic2path, 30 | ) 31 | 32 | 33 | class MagicPath(SyncTestCase): 34 | """ 35 | Tests for handling of paths related to the contents of Magic Folders. 36 | """ 37 | @given(relative_paths()) 38 | def test_roundtrip(self, path): 39 | """ 40 | magic2path(path2magic(p)) == p 41 | """ 42 | self.assertThat( 43 | magic2path(path2magic(path)), 44 | Equals(path), 45 | ) 46 | 47 | def test_invalid(self): 48 | with ExpectedException(InvalidMangledPath): 49 | magic2path("@metadata") 50 | 51 | def test_invalid_exception_str(self): 52 | """ 53 | confirm the __str__ method of InvalidMangledPath doesn't fail 54 | """ 55 | self.assertThat( 56 | str(InvalidMangledPath("@invalid", "sequence error")), 57 | StartsWith("Invalid escape sequence") 58 | ) 59 | -------------------------------------------------------------------------------- /src/magic_folder/show_config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Implements the magic-folder init command. 6 | """ 7 | 8 | import sys 9 | from json import ( 10 | dumps, 11 | ) 12 | 13 | from nacl.encoding import ( 14 | Base32Encoder, 15 | ) 16 | 17 | from twisted.internet.defer import ( 18 | succeed, 19 | ) 20 | 21 | 22 | def magic_folder_show_config(config, stdout=None): 23 | """ 24 | Dump configuration as JSON. 25 | 26 | :param GlobalConfigDatabase config: a magic-folder config directory 27 | """ 28 | 29 | def folder_json(mf): 30 | return { 31 | "name": mf.name, 32 | "author_name": mf.author.name, 33 | "author_private_key": mf.author.verify_key.encode(Base32Encoder).decode("utf8"), 34 | "stash_path": mf.stash_path.path, 35 | } 36 | 37 | magic_folders = { 38 | name: folder_json(config.get_magic_folder(name)) 39 | for name in config.list_magic_folders() 40 | } 41 | json = { 42 | "tahoe_node_directory": config.tahoe_node_directory.path, 43 | "api_endpoint": config.api_endpoint, 44 | "api_client_endpoint": config.api_client_endpoint, 45 | "api_token": config.api_token.decode("utf8"), 46 | "wormhole_uri": config.wormhole_uri, 47 | "magic_folders": magic_folders, 48 | } 49 | if stdout is None: 50 | stdout = sys.stdout 51 | print(dumps(json, indent=4), file=stdout) 52 | return succeed(0) 53 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for ``magic_folder.endpoints``. 3 | """ 4 | 5 | from testtools.matchers import ( 6 | Equals, 7 | ) 8 | from twisted.internet.address import ( 9 | IPv4Address, 10 | IPv6Address, 11 | ) 12 | 13 | from .common import ( 14 | SyncTestCase, 15 | ) 16 | from ..endpoints import ( 17 | client_endpoint_from_address, 18 | ) 19 | 20 | 21 | class AddressTests(SyncTestCase): 22 | """ 23 | Address parsing and serializing 24 | """ 25 | 26 | def test_v4_address(self): 27 | """ 28 | An IPv4Address is converted properly 29 | """ 30 | addr = IPv4Address("TCP", "127.0.0.1", 1234) 31 | description = client_endpoint_from_address(addr) 32 | self.assertThat( 33 | description, 34 | Equals("tcp:127.0.0.1:1234") 35 | ) 36 | 37 | def test_v6_address(self): 38 | """ 39 | An IPv6Address is converted properly 40 | """ 41 | addr = IPv6Address("TCP", "::1", 1234) 42 | description = client_endpoint_from_address(addr) 43 | self.assertThat( 44 | description, 45 | Equals(r"tcp:\:\:1:1234") 46 | ) 47 | 48 | def test_udp_address(self): 49 | """ 50 | An IPv4Address that is UDP is not converted 51 | """ 52 | addr = IPv4Address("UDP", "127.0.0.1", 1234) 53 | description = client_endpoint_from_address(addr) 54 | self.assertThat( 55 | description, 56 | Equals(None) 57 | ) 58 | -------------------------------------------------------------------------------- /docs/limitations.rst: -------------------------------------------------------------------------------- 1 | .. _Known Issues in Magic-Folder: 2 | 3 | Known Issues and Limitations 4 | ============================ 5 | 6 | * If a file enters the "conflicted" state there is no way to get it out 7 | of this state currently (see `Issue 102`_) 8 | 9 | * The status WebSocket (see :ref:`status-api`) endpoint produces a lot of output when there 10 | are lots of files; see `Issue 686`_ for more. 11 | 12 | * Unicode filenames are supported on both Linux and Windows, but on 13 | Linux the local name of a file must be encoded correctly in order 14 | for it to be uploaded. The expected encoding is that printed by 15 | ``python -c "import sys; print sys.getfilesystemencoding()"``. 16 | 17 | * On Windows, ``magic-folder run`` may be unresponsive to Ctrl-C (it 18 | can only be killed using Task Manager or similar). (`#2218`_) 19 | 20 | .. _`Issue 102`: https://github.com/LeastAuthority/magic-folder/issues/102 21 | .. _`Issue 686`: https://github.com/LeastAuthority/magic-folder/issues/686 22 | .. _`#1430`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1430 23 | .. _`#1431`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1431 24 | .. _`#1432`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1432 25 | .. _`#1459`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1459 26 | .. _`#1711`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1711 27 | .. _`#2218`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2218 28 | .. _`#2219`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2219 29 | .. _`#2440`: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2440 30 | .. _`garbage collection`: https://tahoe-lafs.readthedocs.io/en/latest/garbage-collection.html 31 | -------------------------------------------------------------------------------- /src/magic_folder/util/observer.py: -------------------------------------------------------------------------------- 1 | from twisted.internet.interfaces import ( 2 | IStreamServerEndpoint, 3 | ) 4 | from twisted.internet.defer import ( 5 | succeed, 6 | Deferred, 7 | ) 8 | from twisted.python.failure import ( 9 | Failure, 10 | ) 11 | 12 | import attr 13 | from ..util.attrs_zope import ( 14 | provides, 15 | ) 16 | 17 | from zope.interface import implementer 18 | 19 | 20 | @attr.s 21 | @implementer(IStreamServerEndpoint) 22 | class ListenObserver(object): 23 | """ 24 | Calls .listen on the given endpoint and allows observers to be 25 | notified when that listen succeeds (or fails). 26 | """ 27 | _endpoint = attr.ib(validator=[provides(IStreamServerEndpoint)]) 28 | _observers = attr.ib(default=attr.Factory(list)) 29 | _listened_result = attr.ib(default=None) 30 | 31 | def observe(self): 32 | if self._listened_result is not None: 33 | return succeed(self._listened_result) 34 | self._observers.append(Deferred()) 35 | return self._observers[-1] 36 | 37 | def listen(self, protocolFactory): 38 | d = self._endpoint.listen(protocolFactory) 39 | d.addBoth(self._deliver_result) 40 | return d 41 | 42 | def _deliver_result(self, result): 43 | self._listened_result = result 44 | observers = self._observers 45 | self._observers = [] 46 | for o in observers: 47 | o.callback(result) 48 | if isinstance(result, Failure): 49 | # we've handled the error -- by passing it off to our 50 | # observer(s) -- so this chain doesn't need to anymore 51 | return None 52 | return result 53 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"] 3 | 4 | [tool.setuptools_scm] 5 | write_to = "src/magic_folder/_version.py" 6 | 7 | [tool.towncrier] 8 | package_dir = "src" 9 | package = "magic_folder" 10 | filename = "NEWS.rst" 11 | directory = "newsfragments" 12 | start_string = ".. towncrier start line" 13 | issue_format = "`#{issue} `_" 14 | underlines = [ 15 | "'", 16 | "-", 17 | "~", 18 | ] 19 | 20 | [[tool.towncrier.type]] 21 | directory = "incompat" 22 | name = "Backwards Incompatible Changes" 23 | showcontent = true 24 | 25 | [[tool.towncrier.type]] 26 | directory = "feature" 27 | name = "Features" 28 | showcontent = true 29 | 30 | [[tool.towncrier.type]] 31 | directory = "bugfix" 32 | name = "Bug Fixes" 33 | showcontent = true 34 | 35 | [[tool.towncrier.type]] 36 | directory = "installation" 37 | name = "Dependency/Installation Changes" 38 | showcontent = true 39 | 40 | [[tool.towncrier.type]] 41 | directory = "configuration" 42 | name = "Configuration Changes" 43 | showcontent = true 44 | 45 | [[tool.towncrier.type]] 46 | directory = "removed" 47 | name = "Removed Features" 48 | showcontent = true 49 | 50 | [[tool.towncrier.type]] 51 | directory = "other" 52 | name = "Other Changes" 53 | showcontent = true 54 | 55 | [[tool.towncrier.type]] 56 | directory = "minor" 57 | name = "Misc/Other" 58 | showcontent = false 59 | -------------------------------------------------------------------------------- /src/magic_folder/initialize.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Implements the magic-folder init command. 6 | """ 7 | 8 | from twisted.internet.defer import ( 9 | succeed, 10 | ) 11 | 12 | from .config import ( 13 | create_global_configuration, 14 | ) 15 | from .endpoints import ( 16 | server_endpoint_str_to_client, 17 | ) 18 | 19 | 20 | def magic_folder_initialize(config_dir, listen_endpoint_str, tahoe_node_directory, client_endpoint_str, 21 | mailbox_url): 22 | """ 23 | Initialize a magic-folder daemon configuration with the specified required options in ``config_dir``. 24 | 25 | :param FilePath config_dir: a non-existant directory in which to put configuration 26 | 27 | :param unicode listen_endpoint_str: a Twisted server-string where we 28 | will listen for REST API requests (e.g. "tcp:1234") 29 | 30 | :param FilePath tahoe_node_directory: the directory containing our 31 | Tahoe-LAFS client's configuration 32 | 33 | :param unicode client_endpoint_str: Twisted client-string to our API 34 | (or None to autoconvert the listen_endpoint) 35 | 36 | :param unicode mailbox_url: the WebSocket URL of the Magic 37 | Wormhome mailbox server to contact for wormhole sessions 38 | 39 | :return Deferred[GlobalConfigDatabase]: the configuration of the 40 | new instance. Otherwise an appropriate exception is raised. 41 | """ 42 | 43 | if client_endpoint_str is None: 44 | client_endpoint_str = server_endpoint_str_to_client(listen_endpoint_str) 45 | 46 | cfg = create_global_configuration( 47 | config_dir, 48 | listen_endpoint_str, 49 | tahoe_node_directory, 50 | client_endpoint_str, 51 | mailbox_url, 52 | ) 53 | 54 | return succeed(cfg) 55 | -------------------------------------------------------------------------------- /src/magic_folder/_endpoint_parser.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Turn Twisted endpoint description strings into HTTP(S) URLs. 6 | """ 7 | 8 | from hyperlink import ( 9 | URL, 10 | ) 11 | 12 | def endpoint_description_to_http_api_root(endpoint_description): 13 | """ 14 | Parse a Twisted endpoint description string and return a ``DecodedURL`` an 15 | HTTP client could use to talk to an HTTP server listening on that 16 | endpoint. 17 | 18 | This currently supports only **tcp** and **ssl** endpoints. 19 | 20 | :param str endpoint_description: The endpoint description string. 21 | 22 | :return DecodedURL: A URL for reaching the given endpoint. 23 | """ 24 | parts = endpoint_description.split(u":") 25 | return _ENDPOINT_CONVERTERS[parts[0].lower()](parts[1:]) 26 | 27 | 28 | def _tcp_to_http_api_root(parts): 29 | """ 30 | Construct an HTTP URL. 31 | """ 32 | port, host = _get_tcpish_parts(parts) 33 | return URL( 34 | scheme=u"http", 35 | host=host, 36 | port=port, 37 | ).get_decoded_url() 38 | 39 | 40 | def _ssl_to_https_api_root(parts): 41 | """ 42 | Construct an HTTPS URL. 43 | """ 44 | port, host = _get_tcpish_parts(parts) 45 | return URL( 46 | scheme=u"https", 47 | host=host, 48 | port=port, 49 | ).get_decoded_url() 50 | 51 | 52 | def _get_tcpish_parts(parts): 53 | """ 54 | Split up the details of an endpoint with a host and port number (like 55 | **tcp** and **ssl**). 56 | """ 57 | port_number = int(parts.pop(0)) 58 | kwargs = dict(part.split(u"=", 1) for part in parts) 59 | interface = kwargs.get(u"interface", u"127.0.0.1") 60 | if interface == u"0.0.0.0": 61 | interface = u"127.0.0.1" 62 | return port_number, interface 63 | 64 | 65 | _ENDPOINT_CONVERTERS = { 66 | u"tcp": _tcp_to_http_api_root, 67 | u"ssl": _ssl_to_https_api_root, 68 | } 69 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_util_attrs_zope.py: -------------------------------------------------------------------------------- 1 | import zope.interface 2 | from attr import ( 3 | Attribute, 4 | ) 5 | from attr._make import ( 6 | NOTHING, 7 | ) 8 | from magic_folder.util.attrs_zope import ( 9 | provides, 10 | ) 11 | from testtools import ( 12 | ExpectedException, 13 | ) 14 | 15 | from .common import ( 16 | SyncTestCase, 17 | ) 18 | 19 | 20 | class IFoo(zope.interface.Interface): 21 | """ 22 | An interface. 23 | """ 24 | 25 | def f(): 26 | """ 27 | A function called f. 28 | """ 29 | 30 | 31 | def simple_attr(name): 32 | return Attribute( 33 | name=name, 34 | default=NOTHING, 35 | validator=None, 36 | repr=True, 37 | cmp=None, 38 | eq=True, 39 | hash=None, 40 | init=True, 41 | converter=None, 42 | kw_only=False, 43 | inherited=False, 44 | ) 45 | 46 | 47 | class TestProvides(SyncTestCase): 48 | """ 49 | Tests for `provides`. 50 | """ 51 | 52 | def test_success(self): 53 | """ 54 | Nothing happens if value provides requested interface. 55 | """ 56 | 57 | @zope.interface.implementer(IFoo) 58 | class C(object): 59 | def f(self): 60 | pass 61 | 62 | v = provides(IFoo) 63 | v(None, simple_attr("x"), C()) 64 | 65 | def test_fail(self): 66 | """ 67 | Raises `TypeError` if interfaces isn't provided by value. 68 | """ 69 | value = object() 70 | a = simple_attr("x") 71 | 72 | v = provides(IFoo) 73 | with ExpectedException(TypeError): 74 | v(None, a, value) 75 | 76 | def test_repr(self): 77 | """ 78 | Returned validator has a useful `__repr__`. 79 | """ 80 | v = provides(IFoo) 81 | assert ( 82 | "".format( 83 | interface=IFoo 84 | ) 85 | ) == repr(v) 86 | -------------------------------------------------------------------------------- /docs/backdoors.rst: -------------------------------------------------------------------------------- 1 | .. -*- coding: utf-8-with-signature -*- 2 | 3 | Statement on Backdoors 4 | ====================== 5 | 6 | October 5, 2010 7 | 8 | The New York Times has `recently reported`_ that the current U.S. administration is proposing a bill that would apparently, 9 | if passed, 10 | require communication systems to facilitate government wiretapping and access to encrypted data. 11 | 12 | (login required; username/password pairs available at `bugmenot`_). 13 | 14 | .. _recently reported: https://www.nytimes.com/2010/09/27/us/27wiretap.html 15 | .. _bugmenot: http://www.bugmenot.com/view/nytimes.com 16 | 17 | Commentary by the `Electronic Frontier Foundation`_, `Peter Suderman / Reason`_, `Julian Sanchez / Cato Institute`_. 18 | 19 | .. _Electronic Frontier Foundation: https://www.eff.org/deeplinks/2010/09/government-seeks 20 | .. _Peter Suderman / Reason: http://reason.com/blog/2010/09/27/obama-administration-frustrate 21 | .. _Julian Sanchez / Cato Institute: http://www.cato-at-liberty.org/designing-an-insecure-internet/ 22 | 23 | The core Magic-Folder developers promise never to change Magic-Folder to facilitate government access to data stored or transmitted by it. 24 | Even if it were desirable to facilitate such access -- 25 | which it is not -- 26 | we believe it would not be technically feasible to do so without severely compromising Magic-Folder's security against other attackers. 27 | There have been many examples in which backdoors intended for use by government have introduced vulnerabilities exploitable by other parties 28 | (a notable example being the Greek cellphone eavesdropping scandal in 2004/5). 29 | RFCs `1984`_ and `2804`_ elaborate on the security case against such backdoors. 30 | 31 | .. _1984: https://tools.ietf.org/html/rfc1984 32 | .. _2804: https://tools.ietf.org/html/rfc2804 33 | 34 | Note that since Magic-Folder is open-source software, 35 | forks by people other than the current core developers are possible. 36 | In that event, 37 | we would try to persuade any such forks to adopt a similar policy. 38 | 39 | The following Magic-Folder developers agree with this statement: 40 | 41 | Jean-Paul Calderone 42 | -------------------------------------------------------------------------------- /requirements/base.in: -------------------------------------------------------------------------------- 1 | # zope.interface >= 3.6.0 is required for Twisted >= 12.1.0. 2 | # zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435). 3 | zope.interface >= 3.6.0, != 3.6.3, != 3.6.4 4 | 5 | humanize >= 1.0.0 6 | 7 | eliot 8 | 9 | attrs<24.1.0 10 | 11 | # WebSocket library for twisted and asyncio 12 | # (python 3.8 support ends at 23.1.2) 13 | autobahn >= 19.5.2 14 | 15 | hyperlink 16 | 17 | # Of course, we depend on Twisted. Let Tahoe-LAFS' Twisted dependency 18 | # declaration serve, though. Otherwise we have to be careful to agree on 19 | # which extras to pull in. 20 | # 21 | # Although there are still some allmydata.* imports (so we depend on 22 | # tahoe-lafs) these are confied to allmydata.uri so this dependency is 23 | # open-ended. 24 | # 25 | # See also https://github.com/LeastAuthority/magic-folder/issues/676 which covers 26 | # getting rid of these final imports 27 | 28 | tahoe-lafs >= 1.18.0 29 | 30 | 31 | # something to do with Eliot? 32 | # see https://github.com/tahoe-lafs/magic-folder/issues/765 33 | twisted < 24.7.0 34 | 35 | # twisted-based HTTP client 36 | treq 37 | 38 | # find the default location for configuration on different OSes 39 | platformdirs 40 | 41 | # Python utilities that were originally extracted from tahoe 42 | # We use them directly, rather than the re-exports from allmydata 43 | pyutil >= 3.3.0 44 | 45 | # This is the version of cryptography required by tahoe-lafs 46 | cryptography >= 2.6, <43.0.0 47 | 48 | # last py2 release of klein 49 | klein>=20.6.0 50 | 51 | # Loading old magic-folders config for migration 52 | # Minimum version is the version packaged in the nix snapshot we use. 53 | #PyYAML >= 5.1.1 54 | 55 | # used for double-checking if a process is actually running when our 56 | # PID-file still exists 57 | psutil >= 5.8.0 58 | filelock >= 3.8.0 59 | 60 | # see https://github.com/agronholm/cbor2/issues/208 61 | cbor2 != 5.6.0 62 | 63 | # somehow "incremental" makes this a req that pip-compile-multi 64 | # doesn't find when pinning (but that --require-hashes mode later 65 | # does find) 66 | tomli 67 | exceptiongroup 68 | importlib-metadata 69 | importlib-resources>=5 70 | 71 | -------------------------------------------------------------------------------- /docs/release-process.rst: -------------------------------------------------------------------------------- 1 | .. _release-process: 2 | 3 | Magic-Folder Release Process 4 | ============================ 5 | 6 | There is no schedule for magic-folders releases. 7 | We endeavor to keep ``main`` as always releasable. 8 | 9 | 10 | Versioning Scheme 11 | ----------------- 12 | 13 | We use a kind of Calendar Versioning (`https://calver.org/`_): 14 | `YY.MM.NN` where these values are: 15 | 16 | * `YY`: the last two digits of the current year; 17 | * `MM`: the two-digit month; 18 | * `NN`: a number that starts at 0 and increases for every release in a given month. 19 | 20 | 21 | API Stability and Compatibility 22 | ------------------------------- 23 | 24 | The recommended API is the HTTP API; there is a command-line wrapper of this called `magic-folder-api` which _should_ be in sync. 25 | There is no supported Python API. 26 | 27 | **Currently we make no stability guarantees.** 28 | 29 | Once we change the above statement, the version numbers in the protocols will be updated upon any breaking changes. 30 | Any such changes will also be noted in the release notes. 31 | 32 | No matter the above statement, anyting in the `/experimental` tree of the HTTP API may change in any release. 33 | 34 | Integrations should: 35 | * run the Python daemon as a "black box" 36 | * not depend on any on-disc files 37 | * use the HTTP API to communicate 38 | 39 | The `magic-folder-api` command is intended as a convenience around the HTTP API and _should_ be in sync with that API (if it is not, that is a bug). 40 | Generally, this endeavors to return the same information in the same way as the HTTP API itself (usually JSON). 41 | 42 | The `magic-folder` command and sub-commands are mostly intended for "human" use so parsing their output *should not* be considered stable. 43 | For automated use it is preferable to use the "low-level" `magic-folder-api` or the HTTP API instead. 44 | (Please reach out if your needs are not served by the latter). 45 | 46 | 47 | Updating the Version 48 | -------------------- 49 | 50 | The version is stored as signed Git tags. 51 | `setuptools_scm` handles turning the Git tag into a Python version. 52 | 53 | 54 | Making a Release 55 | ================ 56 | 57 | The exact process for creating a release is in the `DEVELOPERS` file. 58 | There are also explicit low-level steps in the top-level `Makefile`. 59 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_strategies.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Tests for Hypothesis strategies for the test suite. 6 | """ 7 | 8 | from hypothesis import ( 9 | given, 10 | assume, 11 | ) 12 | 13 | from testtools.matchers import ( 14 | Equals, 15 | ) 16 | 17 | from twisted.python.filepath import ( 18 | FilePath, 19 | ) 20 | 21 | from .common import ( 22 | SyncTestCase, 23 | ) 24 | 25 | from .strategies import ( 26 | tahoe_lafs_chk_capabilities, 27 | tahoe_lafs_dir_capabilities, 28 | path_segments, 29 | ) 30 | 31 | class StrategyTests(SyncTestCase): 32 | """ 33 | Tests for various strategies. 34 | """ 35 | @given(tahoe_lafs_chk_capabilities()) 36 | def test_chk_roundtrips(self, cap): 37 | """ 38 | Values built by ``tahoe_lafs_chk_capabilities`` round-trip through ASCII 39 | and ``allmydata.uri.from_string`` and their ``to_string`` method. 40 | """ 41 | serialized = cap._tahoe_cap.to_string().decode("ascii") 42 | self.assertThat( 43 | cap.danger_real_capability_string(), 44 | Equals(serialized), 45 | ) 46 | 47 | @given(tahoe_lafs_dir_capabilities()) 48 | def test_dir_roundtrips(self, cap): 49 | """ 50 | Values built by ``tahoe_lafs_dir_capabilities`` round-trip through ASCII 51 | and ``allmydata.uri.from_string`` and their ``to_string`` method. 52 | """ 53 | serialized = cap._tahoe_cap.to_string().decode("ascii") 54 | self.assertThat( 55 | cap.danger_real_capability_string(), 56 | Equals(serialized), 57 | ) 58 | 59 | @given(path_segments()) 60 | def test_legal_path_segments(self, name): 61 | """ 62 | Path segments build by ``path_segments`` are legal for use in the 63 | filesystem. 64 | """ 65 | # Try to avoid accidentally scribbling all over the filesystem in the 66 | # test runner's environment if path_segments() ends up building 67 | # unfortunate values (/etc/passwd, /root/.bashrc, etc). 68 | assume(u"../" not in name) 69 | temp = FilePath(self.mktemp()) 70 | temp.makedirs() 71 | 72 | # Now ask the platform if this path is alright or not. 73 | with temp.child(name).open("w"): 74 | pass 75 | -------------------------------------------------------------------------------- /integration/test_status.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pytest_twisted 3 | from eliot.twisted import ( 4 | inline_callbacks, 5 | ) 6 | 7 | from magic_folder.util.capabilities import ( 8 | Capability, 9 | ) 10 | 11 | from .util import ( 12 | twisted_sleep, 13 | ) 14 | 15 | 16 | @inline_callbacks 17 | @pytest_twisted.ensureDeferred 18 | async def test_multiple_outstanding_downloads(request, reactor, alice, temp_filepath): 19 | """ 20 | The status API shows many outstanding downloads during a simulated 21 | recovery flow. 22 | """ 23 | 24 | filenames = ["one_____", "two_____", "three___"] 25 | magic0 = temp_filepath.child("outstanding0") 26 | magic0.makedirs() 27 | 28 | # create a folder with several files in it 29 | await alice.add(request, "outstanding0", magic0.path, author="laptop") 30 | for fname in filenames: 31 | p = magic0.child(fname) 32 | with p.open("w") as f: 33 | f.write(fname.encode("utf8") * 1024*1024*5) 34 | await alice.add_snapshot("outstanding0", p.path) 35 | 36 | alice_folders = await alice.list_(True) 37 | zero_cap = Capability.from_string(alice_folders["outstanding0"]["upload_dircap"]).to_readonly().danger_real_capability_string() 38 | 39 | # create a folder with no files in it 40 | magic1 = temp_filepath.child("outstanding1") 41 | magic1.makedirs() 42 | await alice.add(request, "outstanding1", magic1.path, author="desktop") 43 | 44 | # add the "other" folder as a participant .. simulate recovery 45 | await alice.add_participant("outstanding1", "old", zero_cap) 46 | 47 | # monitor the "downloads" status for up to 10 seconds, and collect 48 | # any relpath's mentiond there -- we should see (at some point) 49 | # everything in "filenames" 50 | start = reactor.seconds() 51 | noticed = set() 52 | while (reactor.seconds() - start < 10) and len(noticed) != len(filenames): 53 | status_data = await alice.status() 54 | status = json.loads(status_data) 55 | downloads = [ 56 | down 57 | for down in status["events"] 58 | if down["kind"].startswith("download-") 59 | ] 60 | noticed = noticed.union({ 61 | d["relpath"] 62 | for d in downloads 63 | }) 64 | await twisted_sleep(reactor, .2) 65 | 66 | print("noticed downloads: {}".format(noticed)) 67 | assert noticed == set(filenames) 68 | -------------------------------------------------------------------------------- /src/magic_folder/list.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Implements ```magic-folder list``` command. 6 | """ 7 | 8 | import json 9 | 10 | from eliot.twisted import ( 11 | inline_callbacks, 12 | ) 13 | 14 | @inline_callbacks 15 | def magic_folder_list(reactor, config, client, output, as_json=False, include_secret_information=False): 16 | """ 17 | List folders associated with a node. 18 | 19 | :param GlobalConfigDatabase config: our configuration 20 | 21 | :param MagicFolderClient client: a client that connects to the 22 | magic-folder API. 23 | 24 | :param output: a file-like object to which the output will be written 25 | 26 | :param bool as_json: return answer in JSON 27 | 28 | :param bool include_secret_information: include sensitive private 29 | information (such as long-term keys) if True (default: False). 30 | 31 | :return: JSON response from `GET /v1/magic-folder`. 32 | """ 33 | mf_info = yield client.list_folders(include_secret_information) 34 | 35 | if as_json: 36 | output.write(u"{}\n".format(json.dumps(mf_info, indent=4))) 37 | return 38 | _list_human(mf_info, output, include_secret_information) 39 | 40 | 41 | def _list_human(info, output, include_secrets): 42 | """ 43 | List our magic-folders for a human user. 44 | """ 45 | if include_secrets: 46 | template = ( 47 | u" location: {magic_path}\n" 48 | u" stash-dir: {stash_path}\n" 49 | u" author: {author[name]} (private_key: {author[signing_key]})\n" 50 | u" collective: {collective_dircap}\n" 51 | u" personal: {upload_dircap}\n" 52 | u" updates: every {poll_interval}s\n" 53 | u" admin: {is_admin}\n" 54 | ) 55 | else: 56 | template = ( 57 | u" location: {magic_path}\n" 58 | u" stash-dir: {stash_path}\n" 59 | u" author: {author[name]} (public_key: {author[verify_key]})\n" 60 | u" updates: every {poll_interval}s\n" 61 | u" admin: {is_admin}\n" 62 | ) 63 | 64 | if info: 65 | output.write(u"Configured magic-folders:\n") 66 | for name, details in info.items(): 67 | output.write(u"{}:\n".format(name)) 68 | output.write(template.format(**details)) 69 | else: 70 | output.write(u"No magic-folders") 71 | -------------------------------------------------------------------------------- /src/magic_folder/util/wrap.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | class _Wrap(object): 4 | """ 5 | A wrapper class that directs calls to a child object, except for 6 | explicit overrides. 7 | """ 8 | 9 | def __init__(self, original, **kwargs): 10 | self._original = original 11 | self._wrapper_enabled = kwargs.pop("_wrapper_enabled", True) 12 | self._overrides = kwargs 13 | 14 | def enable_wrapper(self): 15 | self._wrapper_enabled = True 16 | 17 | def __getattr__(self, name): 18 | over = self._overrides.get(name, None) 19 | if not self._wrapper_enabled: 20 | over = None 21 | if over is not None: 22 | return over 23 | return getattr(self._original, name) 24 | 25 | 26 | class _DelayedWrap(object): 27 | """ 28 | A wrapper class that directs calls to a child object, except for 29 | explicit overrides. Until .enable_wrapper() is called, it does 30 | _not_ override any calls. 31 | """ 32 | 33 | def __init__(self, original, **kwargs): 34 | self._original = original 35 | self._wrapper_enabled = False 36 | self._overrides = kwargs 37 | 38 | def enable_wrapper(self): 39 | self._wrapper_enabled = True 40 | 41 | def __getattr__(self, name): 42 | over = self._overrides.get(name, None) 43 | if not self._wrapper_enabled: 44 | over = None 45 | if over is not None: 46 | return over 47 | return getattr(self._original, name) 48 | 49 | 50 | def wrap_frozen(original, **kwargs): 51 | """ 52 | :param object original: the original immutable object to wrap 53 | 54 | :param dict kwargs: mapping names to values, all these are 55 | overridden in the returned wrapper. 56 | 57 | :returns: an object that behaves like the original except for the 58 | overridden attributes found in kwargs. 59 | """ 60 | return _Wrap(original, **kwargs) 61 | 62 | 63 | def delayed_wrap_frozen(original, **kwargs): 64 | """ 65 | :param object original: the original immutable object to wrap 66 | 67 | :param dict kwargs: mapping names to values, all these are 68 | overridden in the returned wrapper (after .enabled_wrapper() 69 | is called) 70 | 71 | :returns: an object that behaves like the original. After 72 | .enable_wrapper() is called on this object, any attributes found 73 | in kwargs will be overridden. 74 | """ 75 | return _DelayedWrap(original, **kwargs) 76 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: default release release-test release-upload 2 | 3 | default: 4 | @echo "This is only for building releases thus far" 5 | @echo "Select:" 6 | @echo " make release" 7 | @echo " make release-test" 8 | @echo " make release-upload" 9 | @echo "It will run 'pip install' in your current venv" 10 | 11 | release: 12 | @echo "Is checkout clean?" 13 | git diff-files --quiet 14 | git diff-index --quiet --cached HEAD -- 15 | 16 | @echo "Install required build software" 17 | python3 -m pip install --editable .[build] 18 | 19 | @echo "Test README" 20 | python3 setup.py check -r -s 21 | 22 | @echo "Update NEWS" 23 | python3 -m towncrier build --yes --version `python3 misc/build_helpers/update-version.py --no-tag` 24 | git add -u 25 | git commit -m "update NEWS for release" 26 | 27 | @echo "Bump version and create tag" 28 | python3 misc/build_helpers/update-version.py 29 | 30 | @echo "Build and sign wheel" 31 | python3 setup.py bdist_wheel 32 | gpg --pinentry=loopback -u meejah@meejah.ca --armor --detach-sign dist/magic_folder-`git describe --abbrev=0`-py3-none-any.whl 33 | ls dist/*`git describe --abbrev=0`* 34 | 35 | @echo "Build and sign source-dist" 36 | python3 setup.py sdist 37 | gpg --pinentry=loopback -u meejah@meejah.ca --armor --detach-sign dist/magic-folder-`git describe --abbrev=0`.tar.gz 38 | ls dist/*`git describe --abbrev=0`* 39 | 40 | release-test: 41 | gpg --no-auto-check-trustdb --verify dist/magic-folder-`git describe --abbrev=0`.tar.gz.asc 42 | gpg --no-auto-check-trustdb --verify dist/magic_folder-`git describe --abbrev=0`-py3-none-any.whl.asc 43 | virtualenv testmf_venv 44 | testmf_venv/bin/pip install dist/magic_folder-`git describe --abbrev=0`-py3-none-any.whl 45 | testmf_venv/bin/magic-folder --version 46 | testmf_venv/bin/magic-folder-api --version 47 | testmf_venv/bin/pip uninstall -y magic_folder 48 | testmf_venv/bin/pip install dist/magic-folder-`git describe --abbrev=0`.tar.gz 49 | testmf_venv/bin/magic-folder --version 50 | testmf_venv/bin/magic-folder-api --version 51 | rm -rf testmf_venv 52 | 53 | release-upload: 54 | twine upload --config-file pypirc-magicfolder dist/magic_folder-`git describe --abbrev=0`-py3-none-any.whl dist/magic_folder-`git describe --abbrev=0`-py3-none-any.whl.asc dist/magic-folder-`git describe --abbrev=0`.tar.gz dist/magic-folder-`git describe --abbrev=0`.tar.gz.asc 55 | git push origin-push main `git describe --abbrev=0` 56 | 57 | coverage: 58 | -coverage erase 59 | MAGIC_FOLDER_HYPOTHESIS_PROFILE=magic-folder-fast coverage run -m twisted.trial magic_folder 60 | coverage combine 61 | git diff main.. | cuv diff - 62 | -------------------------------------------------------------------------------- /integration/test_list.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from twisted.python.filepath import ( 4 | FilePath, 5 | ) 6 | 7 | import pytest_twisted 8 | 9 | from eliot.twisted import ( 10 | inline_callbacks, 11 | ) 12 | from eliot import ( 13 | start_action, 14 | ) 15 | 16 | from . import util 17 | 18 | # see "conftest.py" for the fixtures (e.g. "magic_folder") 19 | 20 | @inline_callbacks 21 | @pytest_twisted.ensureDeferred 22 | async def test_list(request, reactor, tahoe_venv, base_dir, introducer_furl, flog_gatherer, wormhole): 23 | """ 24 | 'magic-folder list' happy-path works 25 | """ 26 | 27 | with start_action(action_type=u"integration:test_list:zelda", include_args=[], include_result=False): 28 | zelda = await util.MagicFolderEnabledNode.create( 29 | reactor, 30 | tahoe_venv, 31 | request, 32 | base_dir, 33 | introducer_furl, 34 | flog_gatherer, 35 | name="zelda", 36 | tahoe_web_port="tcp:9982:interface=localhost", 37 | magic_folder_web_port="tcp:19982:interface=localhost", 38 | wormhole_url=wormhole.url, 39 | storage=True, 40 | ) 41 | 42 | output = await util._magic_folder_runner( 43 | reactor, request, "zelda", 44 | [ 45 | "--config", zelda.magic_config_directory, 46 | "list", 47 | ], 48 | ) 49 | assert output.strip() == "No magic-folders" 50 | 51 | magic_dir = FilePath(base_dir).child("zelda-magic") 52 | magic_dir.makedirs() 53 | 54 | output = await util._magic_folder_runner( 55 | reactor, request, "zelda", 56 | [ 57 | "--config", zelda.magic_config_directory, 58 | "add", 59 | "--author", "laptop", 60 | "--name", "workstuff", 61 | magic_dir.path, 62 | ], 63 | ) 64 | 65 | output = await util._magic_folder_runner( 66 | reactor, request, "zelda", 67 | [ 68 | "--config", zelda.magic_config_directory, 69 | "list", 70 | "--json", 71 | ], 72 | ) 73 | data = json.loads(output) 74 | 75 | assert list(data.keys()) == ["workstuff"] 76 | assert data["workstuff"]["name"] == "workstuff" 77 | assert int(data["workstuff"]["poll_interval"]) == 60 78 | assert data["workstuff"]["magic_path"] == magic_dir.path 79 | assert data["workstuff"]["is_admin"] is True 80 | 81 | # make sure we didn't reveal secrets 82 | assert "signing_key" not in data["workstuff"]["author"] 83 | assert "upload_dircap" not in data["workstuff"] 84 | assert "collective_dircap" not in data["workstuff"] 85 | -------------------------------------------------------------------------------- /src/magic_folder/util/file.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Utilties for dealing with on disk files. 6 | """ 7 | 8 | import os 9 | import stat 10 | from errno import ENOENT 11 | 12 | import attr 13 | from twisted.python.filepath import FilePath 14 | 15 | 16 | @attr.s(frozen=True, order=False) 17 | class PathState(object): 18 | """ 19 | The filesystem information we use to check if a file has changed. 20 | """ 21 | 22 | size = attr.ib(validator=attr.validators.instance_of((int, type(None)))) 23 | mtime_ns = attr.ib(validator=attr.validators.instance_of((int, type(None)))) 24 | ctime_ns = attr.ib(validator=attr.validators.instance_of((int, type(None)))) 25 | 26 | 27 | @attr.s(frozen=True, order=False) 28 | class PathInfo(object): 29 | """ 30 | The stat information of a file on disk. 31 | """ 32 | 33 | is_dir = attr.ib(validator=attr.validators.instance_of(bool)) 34 | is_file = attr.ib(validator=attr.validators.instance_of(bool)) 35 | is_link = attr.ib(validator=attr.validators.instance_of(bool)) 36 | exists = attr.ib(validator=attr.validators.instance_of(bool)) 37 | state = attr.ib( 38 | validator=attr.validators.optional(attr.validators.instance_of(PathState)) 39 | ) 40 | 41 | 42 | def seconds_to_ns(t): 43 | return int(t * 1000000000) 44 | 45 | 46 | def ns_to_seconds(t): 47 | """ 48 | :param int t: nanoseconds 49 | :returns int: the seconds representation of 't' 50 | """ 51 | if t is None: 52 | return None 53 | return int(t) // 1000000000 54 | 55 | 56 | def ns_to_seconds_float(t): 57 | """ 58 | :param float t: nanoseconds 59 | :returns float: the seconds representation of 't' 60 | """ 61 | if t is None: 62 | return None 63 | return float(t) / 1000000000.0 64 | 65 | 66 | def get_pathinfo(path): 67 | # type: (FilePath) -> PathInfo 68 | try: 69 | statinfo = os.lstat(path.path) 70 | mode = statinfo.st_mode 71 | is_file = stat.S_ISREG(mode) 72 | if is_file: 73 | path_state = PathState( 74 | size=statinfo.st_size, 75 | mtime_ns=seconds_to_ns(statinfo.st_mtime), 76 | ctime_ns=seconds_to_ns(statinfo.st_ctime), 77 | ) 78 | else: 79 | path_state = None 80 | return PathInfo( 81 | is_dir=stat.S_ISDIR(mode), 82 | is_file=is_file, 83 | is_link=stat.S_ISLNK(mode), 84 | exists=True, 85 | state=path_state, 86 | ) 87 | except OSError as e: 88 | if e.errno == ENOENT: 89 | return PathInfo( 90 | is_dir=False, 91 | is_file=False, 92 | is_link=False, 93 | exists=False, 94 | state=None, 95 | ) 96 | raise 97 | -------------------------------------------------------------------------------- /misc/build_helpers/update-version.py: -------------------------------------------------------------------------------- 1 | # 2 | # this updates the (tagged) version of the software 3 | # 4 | # we use YY.MM. so the process to update the version is to 5 | # take today's date, start with counter at 0 and increment counter 6 | # until we _don't_ find a tag like that. 7 | # 8 | # e.g. v22.1.0 is the first release in January, 2022 and v22.1.1 is 9 | # the second release in January, 2022, etc. 10 | # 11 | # Any "options" are hard-coded in here (e.g. the GnuPG key to use) 12 | # 13 | 14 | author = "meejah " 15 | 16 | 17 | import sys 18 | import time 19 | import itertools 20 | from datetime import datetime 21 | 22 | from dulwich.repo import Repo 23 | from dulwich.porcelain import ( 24 | tag_list, 25 | tag_create, 26 | status, 27 | ) 28 | 29 | from twisted.internet.task import ( 30 | react, 31 | ) 32 | from twisted.internet.defer import ( 33 | ensureDeferred, 34 | ) 35 | 36 | 37 | def existing_tags(git): 38 | versions = list(v.decode("utf8") for v in tag_list(git)) 39 | return versions 40 | 41 | 42 | def create_new_version(git): 43 | now = datetime.now() 44 | versions = existing_tags(git) 45 | 46 | for counter in itertools.count(): 47 | version = "{}.{}.{}".format(now.year - 2000, now.month, counter) 48 | if version not in versions: 49 | return version 50 | 51 | 52 | async def main(reactor): 53 | git = Repo(".") 54 | 55 | # including untracked files can be very slow (if there are lots, 56 | # like in virtualenvs) and we don't care anyway 57 | st = status(git, untracked_files="no") 58 | if any(st.staged.values()) or st.unstaged: 59 | print("unclean checkout; aborting") 60 | raise SystemExit(1) 61 | 62 | v = create_new_version(git) 63 | if "--no-tag" in sys.argv: 64 | print(v) 65 | return 66 | 67 | print("Existing tags: {}".format(" ".join(existing_tags(git)))) 68 | print("New tag will be {}".format(v)) 69 | 70 | # the "tag time" is seconds from the epoch .. we quantize these to 71 | # the start of the day in question, in UTC. 72 | now = datetime.now() 73 | s = now.utctimetuple() 74 | ts = int( 75 | time.mktime( 76 | time.struct_time((s.tm_year, s.tm_mon, s.tm_mday, 0, 0, 0, 0, s.tm_yday, 0)) 77 | ) 78 | ) 79 | tag_create( 80 | repo=git, 81 | tag=v.encode("utf8"), 82 | author=author.encode("utf8"), 83 | message="Release {}".format(v).encode("utf8"), 84 | annotated=True, 85 | objectish=b"HEAD", 86 | sign=author.encode("utf8"), 87 | tag_time=ts, 88 | tag_timezone=0, 89 | ) 90 | 91 | print("Tag created locally, it is not pushed") 92 | print("To push it run something like:") 93 | print(" git push origin {}".format(v)) 94 | 95 | 96 | if __name__ == "__main__": 97 | react(lambda r: ensureDeferred(main(r))) 98 | -------------------------------------------------------------------------------- /integration/test_invite.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test invite + join workflow 3 | """ 4 | 5 | from eliot.twisted import ( 6 | inline_callbacks, 7 | ) 8 | import pytest_twisted 9 | 10 | from .util import ( 11 | await_file_contents, 12 | ) 13 | 14 | 15 | @inline_callbacks 16 | @pytest_twisted.ensureDeferred 17 | async def test_invite_join(request, reactor, temp_filepath, alice, bob, wormhole): 18 | """ 19 | - alice creates a new folder 20 | - alice invites bob via wormhole 21 | - bob accepts the invite 22 | - we observe a file added by alice arriving at bob 23 | """ 24 | magic_a = temp_filepath.child("inviter_magic") 25 | magic_a.makedirs() 26 | await alice.add(request, "inviter", magic_a.path, scan_interval=1, poll_interval=1) 27 | print("added alice") 28 | 29 | code, magic_proto, process_transport = await alice.invite("inviter", "bobby") 30 | print(code, magic_proto, process_transport) 31 | 32 | magic_b = temp_filepath.child("invited_magic") 33 | magic_b.makedirs() 34 | await bob.join(code, "invited", magic_b.path, "bob", poll_interval=1, scan_interval=1) 35 | 36 | def cleanup_bob(): 37 | pytest_twisted.blockon(bob.leave("invited")) 38 | request.addfinalizer(cleanup_bob) 39 | 40 | await magic_proto.exited 41 | print("bob invited to alice's folder") 42 | 43 | # confirm that the folders are paired: 44 | 45 | # second, add something to bob and it should appear in alice 46 | content1 = b"from bobby\n" * 1000 47 | magic_b.child("file_from_bob").setContent(content1) 48 | 49 | await await_file_contents( 50 | magic_a.child("file_from_bob").path, 51 | content1, 52 | ) 53 | 54 | # first, add something to alice and it should appear in bob 55 | content0 = b"from alice\n" * 1000 56 | magic_a.child("file_from_alice").setContent(content0) 57 | 58 | await await_file_contents( 59 | magic_b.child("file_from_alice").path, 60 | content0, 61 | ) 62 | 63 | 64 | @inline_callbacks 65 | @pytest_twisted.ensureDeferred 66 | async def test_invite_then_cancel(request, reactor, temp_filepath, alice, wormhole): 67 | """ 68 | - alice creates a new folder 69 | - alice invites bob via wormhole 70 | - alice cancels the invite 71 | """ 72 | magic_a = temp_filepath.child("cancel_magic") 73 | magic_a.makedirs() 74 | await alice.add(request, "eniac", magic_a.path, scan_interval=1, poll_interval=1) 75 | print("added alice") 76 | 77 | code, magic_proto, process_transport = await alice.invite("eniac", "kay") 78 | print(code, magic_proto, process_transport) 79 | 80 | # ensure we can see it 81 | invites = await alice.list_invites("eniac") 82 | assert len(invites) == 1 83 | 84 | # delete / cancel the invite 85 | await alice.cancel_invite("eniac", invites[0]["id"]) 86 | 87 | # the invite should be gone now 88 | invites = await alice.list_invites("eniac") 89 | assert len(invites) == 0 90 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_util_database.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Utilties for dealing with sqlite. 6 | """ 7 | 8 | import sqlite3 9 | 10 | import attr 11 | from testtools import ExpectedException 12 | from testtools.matchers import Equals 13 | 14 | from ..util.database import ( 15 | ClosedDatabase, 16 | LockableDatabase, 17 | RecusiveTransaction, 18 | WithCursorGenerator, 19 | with_cursor, 20 | ) 21 | from .common import SyncTestCase 22 | 23 | 24 | @attr.s 25 | class WithDatabase(object): 26 | _database = attr.ib(converter=LockableDatabase) 27 | 28 | 29 | class LockableDatabaseTests(SyncTestCase): 30 | def test_transaction_when_closed(self): 31 | """ 32 | Trying to call :py:`LockableDatabase.transaction` after the database has 33 | been closed raises py:`ClosedDatabase`. 34 | """ 35 | database = LockableDatabase(sqlite3.connect(":memory:")) 36 | database.close() 37 | 38 | with self.assertRaises(ClosedDatabase): 39 | with database.transaction(): 40 | pass 41 | 42 | 43 | class WithCursorTests(SyncTestCase): 44 | def test_recursive(self): 45 | """ 46 | Trying to call a :py:`with_cursor` decorated function from another such 47 | function raises :py:`RecusiveTransaction`. 48 | """ 49 | 50 | class Config(WithDatabase): 51 | @with_cursor 52 | def inner(self, cursor): 53 | pass 54 | 55 | @with_cursor 56 | def outer(self, cursor): 57 | self.inner() 58 | 59 | config = Config(sqlite3.connect(":memory:")) 60 | 61 | with ExpectedException( 62 | RecusiveTransaction, ".*when calling 'inner'.*from 'outer'.*" 63 | ): 64 | config.outer() 65 | 66 | def test_excption_rollback(self): 67 | """ 68 | Raising an exception from a :py:`with_cursor` decorated function rolls 69 | back the transaction. 70 | """ 71 | 72 | class Config(WithDatabase): 73 | @with_cursor 74 | def f(self, cursor): 75 | cursor.execute("INSERT INTO [table] VALUES (1)") 76 | raise Exception() 77 | 78 | database = sqlite3.connect(":memory:") 79 | database.execute("CREATE TABLE [table] (value BOOL NOT NULL)") 80 | config = Config(database) 81 | with ExpectedException(Exception): 82 | config.f() 83 | 84 | self.assertThat( 85 | database.execute("SELECT * FROM [table]").fetchall(), 86 | Equals([]), 87 | ) 88 | 89 | def test_generator_function(self): 90 | """ 91 | Decorating a generator function with :py:`with_cursor` fails. 92 | """ 93 | with self.assertRaises(WithCursorGenerator): 94 | 95 | @with_cursor 96 | def f(): 97 | yield 98 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | ) 9 | 10 | import os 11 | from setuptools import find_packages, setup 12 | 13 | 14 | def load_requirements(filename): 15 | basedir = os.path.dirname(os.path.abspath(__file__)) 16 | with open(os.path.join(basedir, "requirements", filename), "r") as f: 17 | return [ 18 | line.rstrip("\n") 19 | for line in f.readlines() 20 | if not line.startswith(("#", "-r")) and line.rstrip("\n") 21 | ] 22 | 23 | 24 | install_requires = load_requirements("base.in") 25 | test_requires = load_requirements("test.in") 26 | build_requires = load_requirements("build.in") 27 | 28 | 29 | trove_classifiers = [ 30 | "Development Status :: 5 - Production/Stable", 31 | "Environment :: Console", 32 | "Environment :: Web Environment", 33 | "License :: OSI Approved :: GNU General Public License (GPL)", 34 | "License :: DFSG approved", 35 | "License :: Other/Proprietary License", 36 | "Intended Audience :: Developers", 37 | "Intended Audience :: End Users/Desktop", 38 | "Intended Audience :: System Administrators", 39 | "Operating System :: Microsoft", 40 | "Operating System :: Microsoft :: Windows", 41 | "Operating System :: Unix", 42 | "Operating System :: POSIX :: Linux", 43 | "Operating System :: POSIX", 44 | "Operating System :: MacOS :: MacOS X", 45 | "Operating System :: OS Independent", 46 | "Natural Language :: English", 47 | "Programming Language :: Python", 48 | "Topic :: Utilities", 49 | "Topic :: System :: Systems Administration", 50 | "Topic :: System :: Filesystems", 51 | "Topic :: System :: Distributed Computing", 52 | "Topic :: Software Development :: Libraries", 53 | "Topic :: System :: Archiving :: Mirroring", 54 | "Topic :: System :: Archiving", 55 | ] 56 | 57 | 58 | setup( 59 | name="magic-folder", 60 | # no version= because setuptools_scm 61 | description="Tahoe-LAFS-based file synchronization", 62 | long_description=open("README.rst", "r").read(), 63 | author="the Tahoe-LAFS developers, the Magic-Folder developers", 64 | author_email="tahoe-dev@tahoe-lafs.org", 65 | url="https://github.com/LeastAuthority/magic-folder/", 66 | license="GNU GPL", # see README.rst -- there is an alternative licence 67 | package_dir={"": "src"}, 68 | packages=find_packages("src") + ["twisted.plugins", "magic_folder.test.plugins"], 69 | classifiers=trove_classifiers, 70 | install_requires=install_requires, 71 | extras_require={ 72 | "test": test_requires, 73 | "build": build_requires, 74 | }, 75 | include_package_data=True, 76 | entry_points={ 77 | "console_scripts": [ 78 | "magic-folder = magic_folder.cli:_entry", 79 | "magic-folder-api = magic_folder.api_cli:_entry", 80 | ], 81 | }, 82 | ) 83 | -------------------------------------------------------------------------------- /src/magic_folder/migrate.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Implements the 'magic-folder migrate' command. 6 | """ 7 | 8 | from twisted.python.filepath import ( 9 | FilePath, 10 | ) 11 | from twisted.internet.defer import ( 12 | succeed, 13 | ) 14 | 15 | from .util.encoding import load_yaml 16 | from .util.capabilities import ( 17 | Capability, 18 | ) 19 | 20 | from .config import ( 21 | create_global_configuration, 22 | ) 23 | from .snapshot import ( 24 | create_local_author, 25 | ) 26 | from .endpoints import ( 27 | server_endpoint_str_to_client, 28 | ) 29 | 30 | 31 | def magic_folder_migrate(config_dir, listen_endpoint_str, tahoe_node_directory, author_name, 32 | client_endpoint_str): 33 | """ 34 | From an existing Tahoe-LAFS 1.14.0 or earlier configuration we 35 | initialize a new magic-folder using the relevant configuration 36 | found there. This cannot invent a listening-endpoint (hence one 37 | must be passed here). 38 | 39 | :param FilePath config_dir: a non-existant directory in which to put configuration 40 | 41 | :param unicode listen_endpoint_str: a Twisted server-string where we 42 | will listen for REST API requests (e.g. "tcp:1234") 43 | 44 | :param FilePath tahoe_node_directory: existing Tahoe-LAFS 45 | node-directory with at least one configured magic folder. 46 | 47 | :param unicode author_name: the name of our author (will be used 48 | for each magic-folder we create from the "other" config) 49 | 50 | :param unicode client_endpoint_str: Twisted client-string to our API 51 | (or None to autoconvert the listen_endpoint) 52 | 53 | :return Deferred[GlobalConfigDatabase]: the newly migrated 54 | configuration or an exception upon error. 55 | """ 56 | 57 | if client_endpoint_str is None: 58 | client_endpoint_str = server_endpoint_str_to_client(listen_endpoint_str) 59 | 60 | config = create_global_configuration( 61 | config_dir, 62 | listen_endpoint_str, 63 | tahoe_node_directory, 64 | client_endpoint_str, 65 | ) 66 | 67 | # now that we have the global configuration we find all the 68 | # configured magic-folders and migrate them. 69 | magic_folders = load_yaml( 70 | tahoe_node_directory.child("private").child("magic_folders.yaml").open("r"), 71 | ) 72 | for mf_name, mf_config in magic_folders['magic-folders'].items(): 73 | author = create_local_author(author_name) 74 | 75 | config.create_magic_folder( 76 | mf_name, 77 | FilePath(mf_config[u'directory']), 78 | author, 79 | Capability.from_string(mf_config[u'collective_dircap']), 80 | Capability.from_string(mf_config[u'upload_dircap']), 81 | int(mf_config[u'poll_interval']), # is this always available? 82 | # tahoe-lafs's magic-folder implementation didn't have scan-interval 83 | # so use poll-interval for it as well. 84 | int(mf_config[u'poll_interval']), 85 | ) 86 | 87 | return succeed(config) 88 | -------------------------------------------------------------------------------- /integration/test_general_cli.py: -------------------------------------------------------------------------------- 1 | from os.path import join 2 | from json import ( 3 | loads, 4 | ) 5 | import base64 6 | 7 | import pytest_twisted 8 | from eliot.twisted import ( 9 | inline_callbacks, 10 | ) 11 | 12 | from . import util 13 | 14 | # see "conftest.py" for the fixtures (e.g. "magic_folder") 15 | 16 | 17 | @inline_callbacks 18 | @pytest_twisted.ensureDeferred 19 | async def test_daemon_inititialize(request, reactor, temp_filepath): 20 | """ 21 | 'magic-folder init' happy-path works 22 | """ 23 | 24 | node_dir = temp_filepath.child("daemon") 25 | tahoe_dir = temp_filepath.child("tahoe") 26 | tahoe_dir.makedirs() 27 | tahoe_dir.child("tahoe.cfg").setContent(b"# a fake config\n") 28 | tahoe_dir.child("node.url").setContent(b"http://localhost:1234/") 29 | 30 | await util._magic_folder_runner( 31 | reactor, request, "daemon", 32 | [ 33 | "--config", node_dir.path, 34 | "init", 35 | "--listen-endpoint", "tcp:1234", 36 | "--node-directory", tahoe_dir.path, 37 | ], 38 | ) 39 | 40 | output = await util._magic_folder_runner( 41 | reactor, request, "daemon", 42 | [ 43 | "--config", node_dir.path, 44 | "show-config", 45 | ], 46 | ) 47 | config = loads(output) 48 | 49 | assert config["api_endpoint"] == "tcp:1234" 50 | assert config["tahoe_node_directory"] == tahoe_dir.path 51 | assert config["magic_folders"] == dict() 52 | # the API token should at least be base64-decodable and result in 32 bytes of entropy 53 | assert len(base64.urlsafe_b64decode(config["api_token"].encode("utf8"))) == 32 54 | 55 | 56 | @inline_callbacks 57 | @pytest_twisted.ensureDeferred 58 | async def test_daemon_migrate(request, reactor, alice, temp_filepath): 59 | """ 60 | 'magic-folder migrate' happy-path works 61 | """ 62 | 63 | node_dir = temp_filepath.child("test-daemon-migrate") 64 | 65 | # if we're depending on a "new" tahoe (which we should) then 66 | # there's no "tahoe magic-folder" to create "legacy" config for us 67 | # to migrate. So, we create an (empty) config. 68 | with open(join(alice.node_directory, "private", "magic_folders.yaml"), "w") as f: 69 | f.write("magic-folders: {}\n") 70 | 71 | await util._magic_folder_runner( 72 | reactor, request, "migrate", 73 | [ 74 | "--config", node_dir.path, 75 | "migrate", 76 | "--listen-endpoint", "tcp:1234", 77 | "--node-directory", alice.node_directory, 78 | "--author", "test", 79 | ], 80 | ) 81 | 82 | output = await util._magic_folder_runner( 83 | reactor, request, "migrate", 84 | [ 85 | "--config", node_dir.path, 86 | "show-config", 87 | ], 88 | ) 89 | config = loads(output) 90 | 91 | assert config["api_endpoint"] == "tcp:1234" 92 | assert config["magic_folders"] == dict() 93 | # the API token should at least be base64-decodable and result in 32 bytes of entropy 94 | assert len(base64.urlsafe_b64decode(config["api_token"].encode("utf8"))) == 32 95 | -------------------------------------------------------------------------------- /src/magic_folder/endpoints.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities related to Twisted endpoint (and endpoint-strings) 3 | """ 4 | 5 | import re 6 | from typing import Optional 7 | 8 | from twisted.internet.address import IPv4Address, IPv6Address 9 | 10 | from twisted.internet.endpoints import ( 11 | _parse as twisted_endpoint_parse, 12 | ) 13 | from twisted.internet.interfaces import IAddress 14 | 15 | 16 | class CannotConvertEndpointError(Exception): 17 | """ 18 | Failed to convert a server endpoint-string into a corresponding 19 | client one. 20 | """ 21 | 22 | def _quote_endpoint_argument(s): 23 | # type: (str) -> str 24 | """ 25 | Twisted endpoint strings cannot contain colon characters inside 26 | individual pieces of the endpoint string (because they're 27 | :-delimiated). 28 | 29 | :returns: `s` with all : characters replaced with backslash-: 30 | """ 31 | return re.sub( 32 | r"[\:]", 33 | lambda m: r"\{}".format(m.group(0)), 34 | s 35 | ) 36 | 37 | def client_endpoint_from_address(address): 38 | # type: (IAddress) -> Optional[str] 39 | """ 40 | Turn certain kinds of IAddress into a Twisted client-style 41 | endpoint string. Supports only TCP on IPv4 or IPv6. 42 | 43 | :returns: str like "tcp::" for and `address` of 44 | type IPV4Address or IPv6Address. None otherwise. 45 | """ 46 | if isinstance(address, (IPv4Address, IPv6Address)) and address.type == "TCP": 47 | return "tcp:{host}:{port}".format( 48 | host=_quote_endpoint_argument(address.host), 49 | port=address.port, 50 | ) 51 | return None 52 | 53 | 54 | def server_endpoint_str_to_client(server_ep): 55 | """ 56 | Attempt to convert a Twisted server endpoint-string into the 57 | corresponding client-type one. 58 | 59 | :returns: a Twisted client endpoint-string 60 | 61 | :raises: CannotConvertEndpointError upon failure 62 | """ 63 | # so .. we could either re-create the code that splits a Twisted 64 | # client/server string into pieces or: 65 | args, kwargs = twisted_endpoint_parse(server_ep) 66 | # the first arg is the "kind" of endpoint, e.g. tcp, ... 67 | kind = args[0] 68 | args = args[1:] 69 | converters = { 70 | "tcp": _tcp_endpoint_to_client, 71 | "unix": _unix_endpoint_to_client, 72 | } 73 | try: 74 | converter = converters[kind] 75 | except KeyError: 76 | raise CannotConvertEndpointError( 77 | "Cannot covert server endpoint of type '{}' to client".format(kind) 78 | ) 79 | return converter(args, kwargs) 80 | 81 | 82 | def _tcp_endpoint_to_client(args, kwargs): 83 | """ 84 | convert a 'tcp:' server endpoint-string to client 85 | """ 86 | host = kwargs.get(u"interface", None) or u"127.0.0.1" 87 | port = args[0] 88 | if port == "0": 89 | return None 90 | return u"tcp:{}:{}".format(host, port) 91 | 92 | 93 | def _unix_endpoint_to_client(args, kwargs): 94 | """ 95 | convert a 'unix:' server endpoint-string to client 96 | """ 97 | address = args[0] 98 | return u"unix:{}".format(address) 99 | -------------------------------------------------------------------------------- /misc/build_helpers/platform-pins.py: -------------------------------------------------------------------------------- 1 | """ 2 | Post-process pip-compile-multi output to handle platform specific dependencies. 3 | 4 | pip-compile (and thus pip-compile-multi) don't support generating lock files 5 | for platforms other than the current one. They suggest[1] generating a lock 6 | file for each environment seperately. 7 | 8 | However, the only platform-specific (transitive) dependencies are 9 | colorama and pywin32 -- and now several others :(. To avoid having to 10 | maintain separate sets of lock files per-platform, we modify the 11 | generated lock files to the platform specific dependencies (using 12 | environment markers). This is based loosely on an idea from [2]. 13 | 14 | We have a hand-generated platform-specific requirements lockfile with appropriate 15 | environment markers. After generating a lock file with pip-compile-multi, we 16 | add a reference to the platform-specific requirements lockfile. 17 | 18 | [1] https://github.com/jazzband/pip-tools#cross-environment-usage-of-requirementsinrequirementstxt-and-pip-compile 19 | [2] https://github.com/jazzband/pip-tools/issues/826#issuecomment-748459788 20 | """ 21 | from __future__ import absolute_import, division, print_function, unicode_literals 22 | 23 | from twisted.python import usage 24 | from twisted.python.filepath import FilePath 25 | 26 | 27 | class Options(usage.Options): 28 | synopsis = "Usage: platform-pins.py [--remove] BASE_REQUIREMENTS PLATFORM_REQUIREMENTS" 29 | optFlags = [ 30 | ("remove", "", "Remove platform pins."), 31 | ] 32 | def parseArgs(self, base_requirements, platform_requirements): 33 | self["base_requirements"] = FilePath(base_requirements) 34 | self["platform_requirements"] = FilePath(platform_requirements) 35 | 36 | 37 | HEADER = """ 38 | ### THIS IS A GENERATED FILE 39 | # 40 | # Include pinned platform dependencies as neither pip-compile nor 41 | # pip-compile-multi handles them. 42 | # See https://github.com/jazzband/pip-tools/issues/826#issuecomment-748459788 43 | # Run 'tox -e pin-requirements' to regenerate this file. 44 | """.strip() 45 | 46 | 47 | def main(base_requirements, platform_requirements, remove): 48 | if base_requirements.parent() != platform_requirements.parent(): 49 | print("ERROR: Requirement files must be in the same directory.") 50 | raise SystemExit(1) 51 | 52 | original_reqs = base_requirements.getContent().decode("utf8") 53 | if remove: 54 | lines = original_reqs.splitlines() 55 | # pip-compile-multi generates files with this header 56 | # strip everything before it 57 | while not lines[0].startswith("# SHA1:"): 58 | lines.pop(0) 59 | new_reqs = "\n".join(lines) + "\n" 60 | else: 61 | new_reqs = "\n".join([HEADER, "-r {}".format(platform_requirements.basename()), original_reqs]) 62 | 63 | with base_requirements.open("w") as f: 64 | f.write(new_reqs.encode("utf8")) 65 | 66 | 67 | if __name__ == "__main__": 68 | import sys 69 | config = Options() 70 | try: 71 | config.parseOptions() 72 | except usage.UsageError as e: 73 | print('{}: {}'.format(sys.argv[0], e)) 74 | raise SystemExit(1) 75 | main(**config) 76 | -------------------------------------------------------------------------------- /integration/test_same_files.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testing synchronizing files between participants 3 | """ 4 | 5 | import json 6 | 7 | import pytest_twisted 8 | from eliot.twisted import ( 9 | inline_callbacks, 10 | ) 11 | 12 | from magic_folder.magicpath import ( 13 | magic2path, 14 | ) 15 | from magic_folder.util.capabilities import ( 16 | Capability, 17 | ) 18 | from .util import ( 19 | twisted_sleep, 20 | ) 21 | 22 | 23 | @inline_callbacks 24 | @pytest_twisted.ensureDeferred 25 | async def test_identical_files(request, reactor, temp_filepath, alice, bob): 26 | """ 27 | Create several copies of a moderately-sized file under different 28 | file-names and synchronize it 29 | """ 30 | 31 | magic = temp_filepath 32 | 33 | KILO_OF_DATA = b"I am JPEG data!!" * (1024 // 16) 34 | assert len(KILO_OF_DATA) >= 2**10, "isn't actually a kibibyte" 35 | 36 | def create_random_cat_pic(path, kilobytes): 37 | with path.open("w") as f: 38 | for _ in range(kilobytes): 39 | f.write(KILO_OF_DATA) 40 | 41 | print("creating test data") 42 | cat_names = [ 43 | 'zero.jpeg', 44 | 'one.jpeg', 45 | 'two.jpeg', 46 | 'three.jpeg', 47 | 'four.jpeg', 48 | ] 49 | 50 | for top_level in cat_names: 51 | create_random_cat_pic(magic.child(top_level), 256) 52 | 53 | # add this as a new folder 54 | await alice.add(request, "sames", magic.path) 55 | 56 | # perform a scan, which will create LocalSnapshots for all the 57 | # files we already created in the magic-folder (but _not_ upload 58 | # them, necessarily, yet) 59 | print("start scan") 60 | await alice.scan("sames") 61 | print("scan done") 62 | 63 | def find_uploads(data): 64 | pending = [] 65 | for event in data["events"]: 66 | if event["kind"].startswith("upload-"): 67 | pending.append(event) 68 | return pending 69 | 70 | # wait for a limited time to be complete 71 | uploads = 0 72 | for _ in range(10): 73 | data = json.loads(await alice.status()) 74 | uploads = len(find_uploads(data)) 75 | if uploads == 0: 76 | break 77 | await twisted_sleep(reactor, 1) 78 | assert uploads == 0, "Should be finished uploading" 79 | 80 | errors = [ 81 | evt for evt in data["events"] 82 | if evt["kind"] == "error" 83 | ] 84 | assert errors == [], "Expected zero errors: {}".format(errors) 85 | 86 | recent = await alice.client.recent_changes("sames") 87 | actual_cats = {cat["relpath"] for cat in recent} 88 | expected = set(cat_names) 89 | assert expected == actual_cats, "Data mismatch" 90 | 91 | # confirm that we can navigate Collective -> alice and find the 92 | # correct Snapshots (i.e. one for every cat-pic) 93 | folders = await alice.list_(True) 94 | 95 | files = await alice.tahoe_client().list_directory( 96 | Capability.from_string(folders["sames"]["upload_dircap"]) 97 | ) 98 | names = { 99 | magic2path(k) 100 | for k in files.keys() 101 | if k not in {"@metadata"} 102 | } 103 | assert expected == names, "Data mismatch" 104 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | Magic Folder 3 | ============ 4 | 5 | .. Please view a nicely formatted version of this documentation at 6 | https://magic-folder.readthedocs.io/en/latest/ 7 | 8 | Magic Folder is a `Tahoe-LAFS`_ front-end that synchronizes local 9 | directories on two or more clients. It uses a Tahoe-LAFS grid for 10 | storage. A daemon scans for local changes and polls the Tahoe-LAFS 11 | grid for remote changes. Whenever a file is created or changed under 12 | the local directory of one of the clients, the change is propagated to 13 | the grid and then to the other clients. 14 | 15 | Users of this project must be comfortable with the command-line. Users 16 | wanting a GUI should try `Gridsync`_ (which uses Magic Folder behind 17 | the scenes). 18 | 19 | .. warning:: 20 | 21 | Releases are ongoing but we do not yet commit to a particular 22 | stable API. That said, many parts are in place and used by projects 23 | such as `Gridsync`_ (and we do not expect substantial changes). 24 | 25 | We encourage adventorous users and fellow developers to 26 | experiment. Integration is via an authenticated localhost :ref:`http-api`. 27 | 28 | Other participants to a synchronized folder are invited using `Magic Wormhole`_. 29 | This alows the use of easy-to-transcribe (yet still secure) codes to 30 | facilitate end-to-end encrypted communication between the two 31 | devices. (Note this means contacting a `Mailbox Server`_ run by a 32 | third party). 33 | 34 | 35 | Feedback Please 36 | =============== 37 | 38 | We are very interested in feedback on how well this feature works for 39 | you. We welcome suggestions to improve its usability, functionality, 40 | and reliability. Please file issues you find with Magic Folder at the 41 | `GitHub project`_, or chat with us on IRC in the channel 42 | ``#tahoe-lafs`` on ``irc.freenode.net``. 43 | 44 | 45 | History of Magic Folder 46 | ======================= 47 | 48 | The implementation of the "drop-upload" frontend, on which Magic 49 | Folder is based, was written as a prototype at the First International 50 | Tahoe-LAFS Summit in June 2011. In 2015, with the support of a grant 51 | from the `Open Technology Fund`_, it was redesigned and extended to 52 | support synchronization between clients. It should work on all major 53 | platforms. 54 | 55 | Subsequent to that, Magic Folder was made into a stand-alone project 56 | (and daemon) with substantial changes including an improved datamodel 57 | -- allowing support for robust conflict detection, among other 58 | features. Some of this work was supported by an `Open Technology 59 | Fund`_ grant. 60 | 61 | 62 | .. _`Open Technology Fund`: https://www.opentech.fund/ 63 | .. _`Tahoe-LAFS`: https://tahoe-lafs.org/ 64 | .. _`GitHub project`: https://github.com/LeastAuthority/magic-folder 65 | .. _`Gridsync`: https://github.com/gridsync/gridsync/ 66 | .. _`Magic Wormhole`: https://github.com/magic-wormhole/magic-wormhole 67 | .. _`Mailbox Server`: https://github.com/magic-wormhole/magic-wormhole-mailbox-server 68 | 69 | 70 | Contents 71 | -------- 72 | 73 | .. toctree:: 74 | :maxdepth: 1 75 | 76 | CODE_OF_CONDUCT 77 | usage 78 | invites 79 | limitations 80 | releases 81 | backdoors 82 | development 83 | interface 84 | config 85 | snapshots 86 | datamodel 87 | downloader 88 | release-process 89 | leif-design 90 | proposed/index 91 | -------------------------------------------------------------------------------- /src/magic_folder/test/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Least Authority TFA GmbH. 2 | # See COPYING.* for details. 3 | 4 | """ 5 | The unit test package for Magic Folder. 6 | 7 | This also does some test-only related setup. The expectation is that this 8 | code will never be loaded under real usage. 9 | """ 10 | 11 | from sys import ( 12 | stderr, 13 | ) 14 | 15 | from foolscap.logging.incident import IncidentQualifier 16 | 17 | 18 | class NonQualifier(IncidentQualifier, object): 19 | def check_event(self, ev): 20 | return False 21 | 22 | def disable_foolscap_incidents(): 23 | # Foolscap-0.2.9 (at least) uses "trailing delay" in its default incident 24 | # reporter: after a severe log event is recorded (thus triggering an 25 | # "incident" in which recent events are dumped to a file), a few seconds 26 | # of subsequent events are also recorded in the incident file. The timer 27 | # that this leaves running will cause "Unclean Reactor" unit test 28 | # failures. The simplest workaround is to disable this timer. Note that 29 | # this disables the timer for the entire process: do not call this from 30 | # regular runtime code; only use it for unit tests that are running under 31 | # Trial. 32 | #IncidentReporter.TRAILING_DELAY = None 33 | # 34 | # Also, using Incidents more than doubles the test time. So we just 35 | # disable them entirely. 36 | from foolscap.logging.log import theLogger 37 | iq = NonQualifier() 38 | theLogger.setIncidentQualifier(iq) 39 | 40 | # we disable incident reporting for all unit tests. 41 | disable_foolscap_incidents() 42 | 43 | 44 | def _configure_hypothesis(): 45 | from os import environ 46 | 47 | from hypothesis import ( 48 | HealthCheck, 49 | settings, 50 | ) 51 | 52 | # if you add more profiles here, note that profile names aren't 53 | # namespaced in any way and Hypothesis allows profile name 54 | # collisions to pass silently, then more or less randomly picks 55 | # one definition to use when you try to activate the name. So 56 | # please prefix any other profiles in here with "magic-folder-" 57 | # for a somewhat lower chance of collision. 58 | 59 | settings.register_profile( 60 | "magic-folder-fast", 61 | max_examples=1, 62 | # see magic-folder-ci profile below for justification 63 | suppress_health_check=[ 64 | HealthCheck.too_slow, 65 | ], 66 | deadline=60*10*1000, # _some_ number that's not "forever" (milliseconds) 67 | ) 68 | 69 | settings.register_profile( 70 | "magic-folder-ci", 71 | suppress_health_check=[ 72 | # CPU resources available to CI builds typically varies 73 | # significantly from run to run making it difficult to determine 74 | # if "too slow" data generation is a result of the code or the 75 | # execution environment. Prevent these checks from 76 | # (intermittently) failing tests that are otherwise fine. 77 | HealthCheck.too_slow, 78 | ], 79 | # With the same reasoning, disable the test deadline. 80 | deadline=60*10*1000, # _some_ number that's not "forever" (milliseconds) 81 | ) 82 | 83 | profile_name = environ.get("MAGIC_FOLDER_HYPOTHESIS_PROFILE", "default") 84 | print("Loading Hypothesis profile {}".format(profile_name), file=stderr) 85 | settings.load_profile(profile_name) 86 | _configure_hypothesis() 87 | 88 | from eliot import to_file 89 | to_file(open("eliot.log", "w", encoding="utf8")) 90 | -------------------------------------------------------------------------------- /requirements/platform.txt: -------------------------------------------------------------------------------- 1 | # This file contains pinned platform specific requirements of magic-folder. 2 | # We add a reference to it to the generated requirement file. 3 | # See https://github.com/jazzband/pip-tools/issues/826#issuecomment-748459788 4 | # The hashes here were generated by running pip-compile on windows, with a file 5 | # containing: 6 | # colorama==0.4.4 7 | # pywin32=228 8 | # colorama is a transitive dependency of tqdm (which, itself, is a dependency of 9 | # magic-wormhole) while pywin32 228 is the last py2 supporting verison of pywin32. 10 | # We add the platform marker manually so we can include this file unconditionally. 11 | 12 | # in order to figure out the pins, you can use the "hashin" tool with 13 | # the specific packages required -- that is, all the windows-only or 14 | # macos-only ones, essentially: 15 | # 16 | # hashin -r requirements/platform.txt 'pywin32 ; sys_platform == "win32"' 17 | # hashin -r requirements/platform.txt 'colorama ; sys_platform == "win32"' 18 | # hashin -r requirements/platform.txt 'twisted-iocpsupport ; sys_platform == "win32"' 19 | # hashin -r requirements/platform.txt 'atomicwrites ; sys_platform == "win32"' 20 | # 21 | # ...will put the stuff you need in _this_ file 22 | 23 | pywin32==303; sys_platform == "win32" \ 24 | --hash=sha256:2a09632916b6bb231ba49983fe989f2f625cea237219530e81a69239cd0c4559 \ 25 | --hash=sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51 \ 26 | --hash=sha256:5f9ec054f5a46a0f4dfd72af2ce1372f3d5a6e4052af20b858aa7df2df7d355b \ 27 | --hash=sha256:6fed4af057039f309263fd3285d7b8042d41507343cd5fa781d98fcc5b90e8bb \ 28 | --hash=sha256:793bf74fce164bcffd9d57bb13c2c15d56e43c9542a7b9687b4fccf8f8a41aba \ 29 | --hash=sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34 \ 30 | --hash=sha256:7d3271c98434617a11921c5ccf74615794d97b079e22ed7773790822735cc352 \ 31 | --hash=sha256:aad484d52ec58008ca36bd4ad14a71d7dd0a99db1a4ca71072213f63bf49c7d9 \ 32 | --hash=sha256:b1675d82bcf6dbc96363fca747bac8bff6f6e4a447a4287ac652aa4b9adc796e \ 33 | --hash=sha256:c268040769b48a13367221fced6d4232ed52f044ffafeda247bd9d2c6bdc29ca \ 34 | --hash=sha256:d9b5d87ca944eb3aa4cd45516203ead4b37ab06b8b777c54aedc35975dec0dee \ 35 | --hash=sha256:fcf44032f5b14fcda86028cdf49b6ebdaea091230eb0a757282aa656e4732439 36 | twisted-iocpsupport==1.0.2; sys_platform == "win32" \ 37 | --hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \ 38 | --hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \ 39 | --hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \ 40 | --hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \ 41 | --hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \ 42 | --hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \ 43 | --hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \ 44 | --hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \ 45 | --hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \ 46 | --hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \ 47 | --hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \ 48 | --hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415 49 | atomicwrites==1.4.0; sys_platform == "win32" \ 50 | --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \ 51 | --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a 52 | -------------------------------------------------------------------------------- /docs/CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | Contributor Covenant Code of Conduct 2 | ==================================== 3 | 4 | Our Pledge 5 | ---------- 6 | 7 | In the interest of fostering an open and welcoming environment, we as 8 | contributors and maintainers pledge to making participation in our 9 | project and our community a harassment-free experience for everyone, 10 | regardless of age, body size, disability, ethnicity, sex 11 | characteristics, gender identity and expression, level of experience, 12 | education, socio-economic status, nationality, personal appearance, 13 | race, religion, or sexual identity and orientation. 14 | 15 | Our Standards 16 | ------------- 17 | 18 | Examples of behavior that contributes to creating a positive environment 19 | include: 20 | 21 | - Using welcoming and inclusive language 22 | - Being respectful of differing viewpoints and experiences 23 | - Gracefully accepting constructive criticism 24 | - Focusing on what is best for the community 25 | - Showing empathy towards other community members 26 | 27 | Examples of unacceptable behavior by participants include: 28 | 29 | - The use of sexualized language or imagery and unwelcome sexual 30 | attention or advances 31 | - Trolling, insulting/derogatory comments, and personal or political 32 | attacks 33 | - Public or private harassment 34 | - Publishing others’ private information, such as a physical or 35 | electronic address, without explicit permission 36 | - Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | Our Responsibilities 40 | -------------------- 41 | 42 | Project maintainers are responsible for clarifying the standards of 43 | acceptable behavior and are expected to take appropriate and fair 44 | corrective action in response to any instances of unacceptable behavior. 45 | 46 | Project maintainers have the right and responsibility to remove, edit, 47 | or reject comments, commits, code, wiki edits, issues, and other 48 | contributions that are not aligned to this Code of Conduct, or to ban 49 | temporarily or permanently any contributor for other behaviors that they 50 | deem inappropriate, threatening, offensive, or harmful. 51 | 52 | Scope 53 | ----- 54 | 55 | This Code of Conduct applies both within project spaces and in public 56 | spaces when an individual is representing the project or its community. 57 | Examples of representing a project or community include using an 58 | official project e-mail address, posting via an official social media 59 | account, or acting as an appointed representative at an online or 60 | offline event. Representation of a project may be further defined and 61 | clarified by project maintainers. 62 | 63 | Enforcement 64 | ----------- 65 | 66 | Instances of abusive, harassing, or otherwise unacceptable behavior may 67 | be reported by contacting the project team at 68 | mf-conduct@leastauthority.com. All complaints will be reviewed and 69 | investigated and will result in a response that is deemed necessary and 70 | appropriate to the circumstances. The project team is obligated to 71 | maintain confidentiality with regard to the reporter of an incident. 72 | Further details of specific enforcement policies may be posted 73 | separately. 74 | 75 | Project maintainers who do not follow or enforce the Code of Conduct in 76 | good faith may face temporary or permanent repercussions as determined 77 | by other members of the project’s leadership. 78 | 79 | Attribution 80 | ----------- 81 | 82 | This Code of Conduct is adapted from the `Contributor 83 | Covenant `__, version 1.4, 84 | available at 85 | https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 86 | 87 | For answers to common questions about this code of conduct, see 88 | https://www.contributor-covenant.org/faq 89 | -------------------------------------------------------------------------------- /integration/test_kitties.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testing synchronizing files between participants 3 | """ 4 | 5 | import json 6 | import random 7 | 8 | import pytest_twisted 9 | from eliot.twisted import ( 10 | inline_callbacks, 11 | ) 12 | 13 | from magic_folder.magicpath import ( 14 | magic2path, 15 | ) 16 | from magic_folder.util.capabilities import ( 17 | Capability, 18 | ) 19 | from .util import ( 20 | twisted_sleep, 21 | ) 22 | 23 | 24 | @inline_callbacks 25 | @pytest_twisted.ensureDeferred 26 | async def test_kittens(request, reactor, temp_filepath, alice): 27 | """ 28 | Create a series of large files -- including in sub-directories -- 29 | for an initial, new magic-folder. (This simulates the 'Cat Pics' 30 | test data collection used by GridSync). 31 | """ 32 | 33 | magic = temp_filepath 34 | 35 | KILO_OF_DATA = b"I am JPEG data!!" * (1024 // 16) 36 | assert len(KILO_OF_DATA) >= 2**10, "isn't actually a kibibyte" 37 | 38 | def create_random_cat_pic(path, kilobytes): 39 | with path.open("w") as f: 40 | for _ in range(kilobytes): 41 | f.write(KILO_OF_DATA) 42 | 43 | print("creating test data") 44 | cat_names = [ 45 | 'Garfield.jpeg', 46 | 'Cheshire.jpeg', 47 | 'Grumpy.jpeg', 48 | 'lolcat.jpeg', 49 | 'Waffles.jpeg', 50 | ] 51 | 52 | for top_level in cat_names: 53 | size = random.randrange(200, 356) 54 | create_random_cat_pic(magic.child(top_level), size) 55 | print(" {} {}KiB".format(top_level, size)) 56 | 57 | magic.child("subdir").makedirs() 58 | for sub_level in cat_names: 59 | size = random.randrange(60, 200) 60 | create_random_cat_pic(magic.child("subdir").child(sub_level), size) 61 | print(" subdir/{} {}KiB".format(sub_level, size)) 62 | 63 | # add this as a new folder 64 | await alice.add(request, "kitties", magic.path) 65 | 66 | # perform a scan, which will create LocalSnapshots for all the 67 | # files we already created in the magic-folder (but _not_ upload 68 | # them, necessarily, yet) 69 | print("start scan") 70 | await alice.scan("kitties") 71 | print("scan done") 72 | 73 | def find_uploads(data): 74 | pending = [] 75 | for event in data["events"]: 76 | if event["kind"].startswith("upload-"): 77 | pending.append(event) 78 | return pending 79 | 80 | # wait for a limited time to be complete 81 | uploads = 0 82 | for _ in range(10): 83 | st = await alice.status() 84 | data = json.loads(st.strip()) 85 | uploads = len(find_uploads(data)) 86 | if uploads == 0: 87 | break 88 | await twisted_sleep(reactor, 1) 89 | assert uploads == 0, "Should be finished uploading" 90 | 91 | errors = [ 92 | evt for evt in data["events"] 93 | if evt["kind"] == "error" 94 | ] 95 | assert errors == [], "Expected zero errors: {}".format(errors) 96 | 97 | recent = await alice.client.recent_changes("kitties") 98 | actual_cats = {cat["relpath"] for cat in recent} 99 | expected = set(cat_names + ["subdir/{}".format(n) for n in cat_names]) 100 | assert expected == actual_cats, "Data mismatch" 101 | 102 | # confirm that we can navigate Collective -> alice and find the 103 | # correct Snapshots (i.e. one for every cat-pic) 104 | folders = await alice.list_(True) 105 | 106 | files = await alice.tahoe_client().list_directory( 107 | Capability.from_string(folders["kitties"]["upload_dircap"]) 108 | ) 109 | names = { 110 | magic2path(k) 111 | for k in files.keys() 112 | if k not in {"@metadata"} 113 | } 114 | assert expected == names, "Data mismatch" 115 | -------------------------------------------------------------------------------- /.github/workflows/macos.yaml: -------------------------------------------------------------------------------- 1 | name: MacOS Testing 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | unit-tests: 11 | runs-on: "macos-14" 12 | strategy: 13 | matrix: 14 | python-version: 15 | - "3.9" 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | with: 20 | # Get enough history for the tags we get next to be meaningful. 0 21 | # means all history. 22 | fetch-depth: "0" 23 | # Checkout head of the branch of the PR, or the exact revision 24 | # specified for non-PR builds. 25 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 26 | 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip 35 | pip install wheel tox eliot-tree cuvner 36 | 37 | - name: Test with tox 38 | env: 39 | MAGIC_FOLDER_HYPOTHESIS_PROFILE: "magic-folder-ci" 40 | run: | 41 | tox -e py39-coverage 42 | 43 | - name: Coverage report 44 | shell: bash 45 | run: git diff origin/main.. | cuv diff - 46 | continue-on-error: true 47 | 48 | - name: Generate eliot-tree. 49 | if: ${{ always() }} 50 | run: | 51 | if [[ -f eliot.log ]]; then 52 | eliot-tree --field-limit=0 eliot.log > eliot-tree.txt 53 | fi 54 | 55 | - name: Upload eliot.log in case of failure 56 | uses: "actions/upload-artifact@v4" 57 | if: ${{ always() }} 58 | with: 59 | if-no-files-found: "warn" 60 | name: unit-test-${{ matrix.os.runs-on }}-${{ matrix.python-version }} 61 | path: "eliot*" 62 | 63 | - uses: codecov/codecov-action@v1 64 | with: 65 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 66 | file: "./coverage.xml" 67 | 68 | integration-tests: 69 | runs-on: "macos-14" 70 | strategy: 71 | matrix: 72 | python-version: 73 | - "3.9" 74 | tahoe-version: 75 | - "1_17" 76 | - "1_18" 77 | - "1_19" 78 | # - "master" 79 | 80 | steps: 81 | - uses: actions/checkout@v2 82 | with: 83 | fetch-depth: "0" 84 | # Checkout head of the branch of the PR, or the exact revision 85 | # specified for non-PR builds. 86 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 87 | # Get tags not fetched by the checkout action, needed for auto-versioning. 88 | - run: "git fetch origin +refs/tags/*:refs/tags/*" 89 | - name: Set up Python ${{ matrix.python-version }} 90 | uses: actions/setup-python@v2 91 | with: 92 | python-version: ${{ matrix.python-version }} 93 | - name: Install dependencies 94 | run: | 95 | python -m pip install --upgrade pip 96 | pip install wheel tox eliot-tree 97 | - name: Test with tox 98 | run: | 99 | tox -e integration-${{ matrix.tahoe-version }} 100 | 101 | - name: Generate eliot-tree. 102 | if: ${{ always() }} 103 | run: | 104 | if [[ -f eliot.log ]]; then 105 | eliot-tree --field-limit=0 eliot.log > eliot-tree.txt 106 | fi 107 | 108 | - name: Upload eliot.log in case of failure 109 | uses: "actions/upload-artifact@v4" 110 | if: ${{ always() }} 111 | with: 112 | if-no-files-found: "warn" 113 | name: integration-${{ matrix.os.runs-on }}-${{ matrix.python-version }}-tahoe-${{ matrix.tahoe-version }} 114 | path: "eliot*" 115 | 116 | - uses: codecov/codecov-action@v2 117 | with: 118 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 119 | file: "./coverage.xml" 120 | flags: "integration" 121 | -------------------------------------------------------------------------------- /.github/workflows/windows.yml: -------------------------------------------------------------------------------- 1 | name: Windows Testing 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | unit-tests: 11 | 12 | runs-on: "windows-2022" 13 | strategy: 14 | matrix: 15 | python-version: 16 | - "3.9" 17 | 18 | steps: 19 | - name: Check out source code 20 | uses: actions/checkout@v2 21 | with: 22 | # Get enough history for the tags we get next to be meaningful. 0 23 | # means all history. 24 | fetch-depth: "0" 25 | # Checkout head of the branch of the PR, or the exact revision 26 | # specified for non-PR builds. 27 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 28 | 29 | - name: Set up Python ${{ matrix.python-version }} 30 | uses: actions/setup-python@v2 31 | with: 32 | python-version: ${{ matrix.python-version }} 33 | 34 | - name: Install dependencies 35 | run: | 36 | python -m pip install --upgrade pip 37 | pip install wheel tox eliot-tree 38 | 39 | - name: Test with tox 40 | run: tox -e py39-coverage 41 | env: 42 | MAGIC_FOLDER_HYPOTHESIS_PROFILE: "magic-folder-ci" 43 | 44 | - name: Upload eliot.log in case of failure 45 | uses: "actions/upload-artifact@v4" 46 | with: 47 | if-no-files-found: "warn" 48 | name: unit-test-${{ matrix.os.runs-on }}-${{ matrix.python-version }} 49 | path: "eliot*" 50 | 51 | - name: Upload coverage report 52 | uses: codecov/codecov-action@v1 53 | with: 54 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 55 | file: "./coverage.xml" 56 | 57 | integration-tests: 58 | 59 | runs-on: "windows-2022" 60 | strategy: 61 | matrix: 62 | python-version: 63 | - "3.9" 64 | 65 | tahoe-version: 66 | - "1_17" 67 | - "1_18" 68 | - "1_19" 69 | # - "master" 70 | 71 | steps: 72 | - name: Check out source code 73 | uses: actions/checkout@v2 74 | with: 75 | # Get enough history for the tags we get next to be meaningful. 0 76 | # means all history. 77 | fetch-depth: "0" 78 | # Checkout head of the branch of the PR, or the exact revision 79 | # specified for non-PR builds. 80 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 81 | # Get tags not fetched by the checkout action, needed for auto-versioning. 82 | - run: "git fetch origin +refs/tags/*:refs/tags/*" 83 | 84 | - name: Set up Python ${{ matrix.python-version }} 85 | uses: actions/setup-python@v2 86 | with: 87 | python-version: ${{ matrix.python-version }} 88 | 89 | - name: Install dependencies 90 | run: | 91 | python -m pip install --upgrade pip 92 | pip install wheel tox eliot-tree cuvner 93 | 94 | - name: Test with tox 95 | run: tox -e integration-${{ matrix.tahoe-version }} 96 | 97 | - name: Coverage report 98 | shell: bash 99 | run: git diff origin/main.. | cuv diff - 100 | continue-on-error: true 101 | 102 | - name: Generate eliot-tree. 103 | if: ${{ always() }} 104 | shell: bash 105 | continue-on-error: true 106 | run: | 107 | if [[ -f eliot.log ]]; then 108 | eliot-tree --field-limit=0 eliot.log > eliot-tree.txt 109 | fi 110 | 111 | - name: Upload eliot.log in case of failure 112 | uses: "actions/upload-artifact@v4" 113 | if: ${{ always() }} 114 | with: 115 | if-no-files-found: "warn" 116 | name: integration-${{ matrix.os.runs-on }}-${{ matrix.python-version }}-tahoe-${{ matrix.tahoe-version }} 117 | path: "eliot*" 118 | 119 | - uses: codecov/codecov-action@v2 120 | with: 121 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 122 | file: "./coverage.xml" 123 | flags: "integration" 124 | -------------------------------------------------------------------------------- /docs/proposed/conflict-api.rst: -------------------------------------------------------------------------------- 1 | .. -*- coding: utf-8 -*- 2 | 3 | .. _conflicts: 4 | 5 | Conflict API 6 | ============ 7 | 8 | Audience 9 | -------- 10 | 11 | This document is aimed at programmers working on magic-folder. It is a proposed design. 12 | 13 | 14 | Motivation 15 | ---------- 16 | 17 | Under normal operation, magic-folders scans other participants' Snapshots and reflects those changes locally; sometimes, these changes can conflict. 18 | 19 | It is desirable to have an explicit API for noticing and resolving conflicts. 20 | The core of magic-folders operation with users is the filesystem. 21 | This makes the filesystem also an API and conflicts must be part of that API. 22 | The command-line interface shall have sub-commands for listing and resolving conficts; these commands will use the HTTP APIs. 23 | 24 | 25 | Filesystem API for Conflicts and Resolution 26 | ------------------------------------------- 27 | 28 | When a Snapshot is found to conflict with a particluar local file (say ``foo``) a "conflict file" is written beside it, reflecting the other participants' content (``foo.conflict-laptop``). 29 | That is, ``foo.conflict-laptop`` indicates that the participant "``laptop``" has a conflicting update 30 | The file ``foo.conflict-laptop`` will contain the downloaded ``.content`` of "``laptop``"'s Snapshot. 31 | The content of ``foo`` remains what it was when the conflict was detected. 32 | (Note that when multiple participants exist it's possible to have multiple ``*.confict-*`` files pertaining to a single local file). 33 | 34 | List Conflicts 35 | ~~~~~~~~~~~~~~ 36 | 37 | One can use normal directory-browsing tools such as ``ls`` to notice conflict files. 38 | 39 | 40 | Resolve a Conflict 41 | ~~~~~~~~~~~~~~~~~~ 42 | 43 | When _all_ ``.confict-*`` files for a given root are deleted, the conflict is deemed resolved. 44 | The resolution is whatever the contents of ```` are currently. 45 | 46 | So, to resolve a conflict as "take theirs", one could run: ``mv foo.conflict-laptop foo`` if there was a single conflict from participant "``laptop``". 47 | 48 | To resolve a conflict as "take mine", one simply deletes ``foo.conflict-laptop`` if there was a single conflict from participant "``laptop``". 49 | 50 | 51 | 52 | HTTP API for Conflicts and Resolution 53 | ------------------------------------- 54 | 55 | 56 | ``GET /v1/magic-folder//conflicts`` 57 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 58 | 59 | Returns a list (possibly empty) of local filesystem paths corresponding to each Snapshot that is currently in a Conflict state in the given magic-folder. 60 | 61 | Our content is in the path itself. 62 | The conflicting "other" content is in ``.conflict-`` where ```` is the petname of the participant who is provided the conflicted content. 63 | 64 | This endpoint returns a JSON dict mapping any local conflicted ``relpath`` to a list of authors. 65 | Following this example:: 66 | 67 | { 68 | "foo": ["laptop"] 69 | } 70 | 71 | This indicates that a single file ``foo`` has a conflict with a single other participant ``laptop``. 72 | 73 | 74 | 75 | ``POST /v1/magic-folder//resolve-conflict`` 76 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 77 | 78 | A JSON body is passed to this endpoint, following this example:: 79 | 80 | { 81 | "relpath": "local/path", 82 | "resolution": "author" 83 | } 84 | 85 | The ``relpath`` key is required. 86 | It must be a filesystem path relative to the selected magic-folder. 87 | 88 | The ``resolution`` key is required. 89 | It must be the name of a current participant in the given magic-folder. 90 | 91 | It is an error if the given ``relpath`` in the given magic-folder is not currently in a conflicted state. 92 | 93 | If the ``resolution`` is our user-name then all conflict files are deleted new (local) Snapshot is created (with parents corresponding to all conflicting participants). 94 | 95 | If instead the resolution is some other participant, then the content of ``.conflict-`` is moved to ```` and any other conflict files are deleted. 96 | Then a new (local) Snapshot is created (with parents corresponding to all conflicting participants). 97 | 98 | The response is delayed until the local state tracking the new Snapshot has been created. 99 | 100 | The response code is **CREATED** and the **Content-Type** is ``application/json``. 101 | 102 | The response body follows the form of this example:: 103 | 104 | {} 105 | -------------------------------------------------------------------------------- /misc/build_helpers/run-deprecations.py: -------------------------------------------------------------------------------- 1 | from __future__ import ( 2 | absolute_import, 3 | division, 4 | print_function, 5 | ) 6 | 7 | import sys, os, io, re 8 | from twisted.internet import reactor, protocol, task, defer 9 | from twisted.python.procutils import which 10 | from twisted.python import usage 11 | 12 | # run the command with python's deprecation warnings turned on, capturing 13 | # stderr. When done, scan stderr for warnings, write them to a separate 14 | # logfile (so the buildbot can see them), and return rc=1 if there were any. 15 | 16 | class Options(usage.Options): 17 | optParameters = [ 18 | ["warnings", None, None, "file to write warnings into at end of test run"], 19 | ["package", None, None, "Python package to which to restrict warning collection"] 20 | ] 21 | 22 | def parseArgs(self, command, *args): 23 | self["command"] = command 24 | self["args"] = list(args) 25 | 26 | description = """Run as: 27 | python run-deprecations.py [--warnings=STDERRFILE] [--package=PYTHONPACKAGE ] COMMAND ARGS.. 28 | """ 29 | 30 | class RunPP(protocol.ProcessProtocol): 31 | def outReceived(self, data): 32 | self.stdout.write(data) 33 | sys.stdout.write(data) 34 | def errReceived(self, data): 35 | self.stderr.write(data) 36 | sys.stderr.write(data) 37 | def processEnded(self, reason): 38 | signal = reason.value.signal 39 | rc = reason.value.exitCode 40 | self.d.callback((signal, rc)) 41 | 42 | 43 | def make_matcher(options): 44 | """ 45 | Make a function that matches a line with a relevant deprecation. 46 | 47 | A deprecation warning line looks something like this:: 48 | 49 | somepath/foo/bar/baz.py:43: DeprecationWarning: Foo is deprecated, try bar instead. 50 | 51 | Sadly there is no guarantee warnings begin at the beginning of a line 52 | since they are written to output without coordination with whatever other 53 | Python code is running in the process. 54 | 55 | :return: A one-argument callable that accepts a string and returns 56 | ``True`` if it contains an interesting warning and ``False`` 57 | otherwise. 58 | """ 59 | pattern = r".*\.py[oc]?:\d+: (Pending)?DeprecationWarning: .*" 60 | if options["package"]: 61 | pattern = r".*/{}/".format( 62 | re.escape(options["package"]), 63 | ) + pattern 64 | expression = re.compile(pattern) 65 | def match(line): 66 | return expression.match(line) is not None 67 | return match 68 | 69 | 70 | @defer.inlineCallbacks 71 | def run_command(main): 72 | config = Options() 73 | config.parseOptions() 74 | 75 | command = config["command"] 76 | if "/" in command: 77 | # don't search 78 | exe = command 79 | else: 80 | executables = which(command) 81 | if not executables: 82 | raise ValueError("unable to find '%s' in PATH (%s)" % 83 | (command, os.environ.get("PATH"))) 84 | exe = executables[0] 85 | 86 | env = os.environ.copy() 87 | env["PYTHONWARNINGS"] = "default::DeprecationWarning" 88 | 89 | pp = RunPP() 90 | pp.d = defer.Deferred() 91 | pp.stdout = io.BytesIO() 92 | pp.stderr = io.BytesIO() 93 | reactor.spawnProcess(pp, exe, [exe] + config["args"], env=env) 94 | (signal, rc) = yield pp.d 95 | 96 | match = make_matcher(config) 97 | 98 | # maintain ordering, but ignore duplicates (for some reason, either the 99 | # 'warnings' module or twisted.python.deprecate isn't quashing them) 100 | already = set() 101 | warnings = [] 102 | def add(line): 103 | if line in already: 104 | return 105 | already.add(line) 106 | warnings.append(line) 107 | 108 | pp.stdout.seek(0) 109 | for line in pp.stdout.readlines(): 110 | if match(line): 111 | add(line) # includes newline 112 | 113 | pp.stderr.seek(0) 114 | for line in pp.stderr.readlines(): 115 | if match(line): 116 | add(line) 117 | 118 | if warnings: 119 | if config["warnings"]: 120 | with open(config["warnings"], "wb") as f: 121 | print("".join(warnings), file=f) 122 | print("ERROR: %d deprecation warnings found" % len(warnings)) 123 | sys.exit(1) 124 | 125 | print("no deprecation warnings") 126 | if signal: 127 | sys.exit(signal) 128 | sys.exit(rc) 129 | 130 | 131 | task.react(run_command) 132 | -------------------------------------------------------------------------------- /integration/test_add.py: -------------------------------------------------------------------------------- 1 | import pytest_twisted 2 | from twisted.internet.defer import ( 3 | DeferredList, 4 | ) 5 | from eliot.twisted import ( 6 | inline_callbacks, 7 | ) 8 | 9 | from magic_folder.util.capabilities import ( 10 | Capability, 11 | ) 12 | 13 | from .util import await_file_contents, ensure_file_not_created 14 | 15 | 16 | @inline_callbacks 17 | @pytest_twisted.ensureDeferred 18 | async def test_add(request, reactor, alice): 19 | """ 20 | 'magic-folder add' happy-path works 21 | """ 22 | 23 | await alice.add( 24 | request, 25 | "test", 26 | alice.magic_directory, 27 | author="laptop", 28 | ) 29 | 30 | config = await alice.show_config() 31 | 32 | assert "test" in config["magic_folders"] 33 | mf_config = config["magic_folders"]["test"] 34 | assert mf_config["name"] == "test" 35 | assert mf_config["author_name"] == "laptop" 36 | expected_keys = ["stash_path", "author_private_key"] 37 | assert all( 38 | k in mf_config 39 | for k in expected_keys 40 | ) 41 | 42 | 43 | @inline_callbacks 44 | @pytest_twisted.ensureDeferred 45 | async def test_leave(request, reactor, temp_filepath, alice, bob): 46 | """ 47 | After leaving a magic folder, its contents are no longer 48 | synced. 49 | """ 50 | magic = temp_filepath 51 | original_folder = magic.child("cats") 52 | recover_folder = magic.child("cats_recover") 53 | original_folder.makedirs() 54 | recover_folder.makedirs() 55 | 56 | # add our magic-folder and re-start 57 | await alice.add(request, "original", original_folder.path) 58 | alice_folders = await alice.list_(True) 59 | 60 | content0 = b"zero\n" * 1000 61 | original_folder.child("grumpy").setContent(content0) 62 | await alice.add_snapshot("original", "grumpy") 63 | 64 | await bob.add(request, "recovery", recover_folder.path, cleanup=False) 65 | 66 | # add the 'original' magic-folder as a participant in the 67 | # 'recovery' folder 68 | alice_cap = Capability.from_string(alice_folders["original"]["upload_dircap"]).to_readonly().danger_real_capability_string() 69 | await bob.add_participant("recovery", "alice", alice_cap) 70 | 71 | await await_file_contents( 72 | recover_folder.child("grumpy").path, 73 | content0, 74 | timeout=25, 75 | ) 76 | 77 | await bob.leave("recovery") 78 | 79 | content1 = b"one\n" * 1000 80 | original_folder.child("sylvester").setContent(content1) 81 | await alice.add_snapshot("original", "sylvester") 82 | 83 | await ensure_file_not_created( 84 | recover_folder.child("sylvester").path, 85 | timeout=25, 86 | ) 87 | 88 | 89 | @inline_callbacks 90 | @pytest_twisted.ensureDeferred 91 | async def test_leave_many(request, reactor, temp_filepath, alice): 92 | """ 93 | Many magic-folders can be added and left in rapid succession 94 | 95 | See also https://github.com/LeastAuthority/magic-folder/issues/587 96 | """ 97 | existing = await alice.list_() 98 | assert len(existing) == 0, "why there folders?" 99 | names = [ 100 | "folder_{}".format(x) 101 | for x in range(10) 102 | ] 103 | 104 | for name in names: 105 | folder = temp_filepath.child(name) 106 | folder.makedirs() 107 | 108 | await alice.add(request, name, folder.path, cleanup=False) 109 | 110 | alice_folders = await alice.list_(True) 111 | assert set(alice_folders.keys()) == set(names) 112 | 113 | # try and ensure that the folders are "doing some work" by adding 114 | # files to them all (sizes are in KiB) 115 | fake_files = ( 116 | ('zero', 100), # 100K 117 | ('one', 10000), # 10M 118 | ) 119 | for fname, size in fake_files: 120 | for name in names: 121 | with temp_filepath.child(name).child(fname).open("wb") as f: 122 | for _ in range(size): 123 | f.write(b"xxxxxxx\n" * (1024 // 8)) 124 | 125 | # initiate a scan on them all 126 | scans = await DeferredList([ 127 | alice.scan_folder(name) 128 | for name in names 129 | ]) 130 | assert all(ok for ok, _ in scans), "at least one scan failed" 131 | 132 | leaves = await DeferredList([ 133 | alice.leave(name) 134 | for name in names 135 | ]) 136 | assert all(ok for ok, _ in leaves), "at least one leave() failed" 137 | 138 | alice_folders = await alice.list_(True) 139 | assert not alice_folders, "should be zero folders" 140 | -------------------------------------------------------------------------------- /src/magic_folder/pid.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | import os 5 | import psutil 6 | from contextlib import ( 7 | contextmanager, 8 | ) 9 | # the docs are a little misleading, but this is either WindowsFileLock 10 | # or UnixFileLock depending upon the platform we're currently on 11 | from filelock import FileLock, Timeout 12 | 13 | 14 | class ProcessInTheWay(Exception): 15 | """ 16 | our pidfile points at a running process 17 | """ 18 | 19 | 20 | class InvalidPidFile(Exception): 21 | """ 22 | our pidfile isn't well-formed 23 | """ 24 | 25 | 26 | class CannotRemovePidFile(Exception): 27 | """ 28 | something went wrong removing the pidfile 29 | """ 30 | 31 | 32 | def _pidfile_to_lockpath(pidfile): 33 | """ 34 | internal helper. 35 | :returns FilePath: a path to use for file-locking the given pidfile 36 | """ 37 | return pidfile.sibling("{}.lock".format(pidfile.basename())) 38 | 39 | 40 | def parse_pidfile(pidfile): 41 | """ 42 | :param FilePath pidfile: 43 | :returns tuple: 2-tuple of pid, creation-time as int, float 44 | :raises InvalidPidFile: on error 45 | """ 46 | with pidfile.open("r") as f: 47 | content = f.read().decode("utf8").strip() 48 | try: 49 | pid, starttime = content.split() 50 | pid = int(pid) 51 | starttime = float(starttime) 52 | except ValueError: 53 | raise InvalidPidFile( 54 | "found invalid PID file in {}".format( 55 | pidfile 56 | ) 57 | ) 58 | return pid, starttime 59 | 60 | 61 | @contextmanager 62 | def check_pid_process(pidfile, log, find_process=None): 63 | """ 64 | If another instance appears to be running already, raise an 65 | exception. Otherwise, write our PID + start time to the pidfile 66 | and arrange to delete it upon exit. 67 | 68 | :param FilePath pidfile: the file to read/write our PID from. 69 | 70 | :param Logger log: a way to tell the user things 71 | 72 | :param Callable find_process: None, or a custom way to get a 73 | Process objet (usually for tests) 74 | """ 75 | find_process = psutil.Process if find_process is None else find_process 76 | lock_path = _pidfile_to_lockpath(pidfile) 77 | 78 | try: 79 | with FileLock(lock_path.path, timeout=2): 80 | # check if we have another instance running already 81 | if pidfile.exists(): 82 | pid, starttime = parse_pidfile(pidfile) 83 | try: 84 | # if any other process is running at that PID, let the 85 | # user decide if this is another magic-older 86 | # instance. Automated programs may use the start-time to 87 | # help decide this (if the PID is merely recycled, the 88 | # start-time won't match). 89 | find_process(pid) 90 | raise Exception( 91 | "A process is already running as PID {}".format(pid) 92 | ) 93 | except psutil.NoSuchProcess: 94 | log.info( 95 | "'{pidpath}' refers to {pid} that isn't running", 96 | pidpath=pidfile.path, 97 | pid=pid, 98 | ) 99 | # nothing is running at that PID so it must be a stale file 100 | pidfile.remove() 101 | 102 | # write our PID + start-time to the pid-file 103 | pid = os.getpid() 104 | starttime = find_process(pid).create_time() 105 | with pidfile.open("w") as f: 106 | f.write("{} {}\n".format(pid, starttime).encode("utf8")) 107 | 108 | except Timeout: 109 | # this "except" matches trying to acquire the lock 110 | raise ProcessInTheWay( 111 | "Another process is still locking {}".format(pidfile.path) 112 | ) 113 | 114 | yield # setup completed, await cleanup 115 | 116 | log.debug("Removing {pidpath}", pidpath=pidfile.path) 117 | try: 118 | with FileLock(lock_path.path, timeout=2): 119 | try: 120 | pidfile.remove() 121 | except Exception as e: 122 | raise CannotRemovePidFile( 123 | "Couldn't remove '{pidfile}': {err}.".format( 124 | pidfile=pidfile.path, 125 | err=e, 126 | ) 127 | ) 128 | 129 | except Timeout: 130 | raise ProcessInTheWay( 131 | "Another process is still locking {}".format(pidfile.path) 132 | ) 133 | -------------------------------------------------------------------------------- /integration/test_tahoe_objects.py: -------------------------------------------------------------------------------- 1 | from twisted.python.filepath import ( 2 | FilePath, 3 | ) 4 | from twisted.internet.defer import ( 5 | DeferredList, 6 | ) 7 | 8 | import pytest_twisted 9 | 10 | from eliot.twisted import ( 11 | inline_callbacks, 12 | ) 13 | from testtools.assertions import assert_that 14 | from testtools.matchers import ( 15 | Equals, 16 | HasLength, 17 | AllMatch, 18 | MatchesAll, 19 | AfterPreprocessing, 20 | ) 21 | 22 | from . import util 23 | 24 | # see "conftest.py" for the fixtures (e.g. "magic_folder") 25 | 26 | 27 | @inline_callbacks 28 | @pytest_twisted.ensureDeferred 29 | async def test_list_tahoe_objects(request, reactor, tahoe_venv, base_dir, introducer_furl, 30 | flog_gatherer, wormhole): 31 | """ 32 | the 'tahoe-objects' API works concurrently 33 | (see also ticket #570) 34 | """ 35 | 36 | yolandi = await util.MagicFolderEnabledNode.create( 37 | reactor, 38 | tahoe_venv, 39 | request, 40 | base_dir, 41 | introducer_furl, 42 | flog_gatherer, 43 | name="yolandi", 44 | tahoe_web_port="tcp:9983:interface=localhost", 45 | magic_folder_web_port="tcp:19983:interface=localhost", 46 | wormhole_url=wormhole.url, 47 | storage=True, 48 | ) 49 | number_of_folders = 20 50 | folder_names = ["workstuff{}".format(n) for n in range(number_of_folders)] 51 | 52 | # make a bunch of folders 53 | for folder_name in folder_names: 54 | magic_dir = FilePath(base_dir).child(folder_name) 55 | magic_dir.makedirs() 56 | 57 | await yolandi.client.add_folder( 58 | folder_name, 59 | author_name="yolandi", 60 | local_path=magic_dir, 61 | poll_interval=10, 62 | scan_interval=10, 63 | ) 64 | 65 | # concurrently put 1 file into each folder and immediately create 66 | # a snapshot for it via an API call 67 | files = [] 68 | for folder_num, folder_name in enumerate(folder_names): 69 | magic_dir = FilePath(base_dir).child(folder_name) 70 | with magic_dir.child("a_file_name").open("w") as f: 71 | f.write("data {:02d}\n".format(folder_num).encode("utf8") * 100) 72 | files.append( 73 | yolandi.client.add_snapshot( 74 | folder_name, 75 | "a_file_name", 76 | ) 77 | ) 78 | 79 | # Each folder should produce [416, 800, 190] for the sizes -- this 80 | # is (Snapshot-size, content-size and metadata-size) for the one 81 | # file we've put in. .. except the first one depends on 82 | # Snapshot's implementation and the last one depends on metadata 83 | # details, so we only want to assert that they're all the same. 84 | # expected_results = [[416, 800, 190]] * number_of_folders 85 | 86 | # The "if res else None" clauses below are because we use this in 87 | # the loop (to potentially succeed early), and some of the results 88 | # may be empty for a few iterations / seconds 89 | matches_expected_results = MatchesAll( 90 | # this says that all the content capabilities (2nd item) 91 | # should be size 800 92 | AfterPreprocessing( 93 | lambda results: [res[1] if res else None for res in results], 94 | AllMatch(Equals(800)) 95 | ), 96 | # this says that there should be exactly one thing in the set 97 | # of all the pairs of the Snapshot (1st item) and metadata 98 | # (3rd item) sizes .. that is, that all the Snapshot sizes are 99 | # the same and all the metadata sizes are the same. 100 | AfterPreprocessing( 101 | lambda results: {(res[0], res[2]) if res else None for res in results}, 102 | HasLength(1) 103 | ) 104 | ) 105 | 106 | # try for 15 seconds to get what we expect. we're waiting for each 107 | # of the magic-folders to upload their single "a_file_name" items 108 | # so that they each have one Snapshot in Tahoe-LAFS 109 | for _ in range(15): 110 | await util.twisted_sleep(reactor, 1) 111 | results = await DeferredList([ 112 | yolandi.client.tahoe_objects(folder_name) 113 | for folder_name in folder_names 114 | ]) 115 | # if any of the queries fail, we fail the test 116 | errors = [ 117 | fail 118 | for ok, fail in results 119 | if not ok 120 | ] 121 | assert errors == [], "At least one /tahoe-objects query failed" 122 | 123 | actual_results = [ 124 | result 125 | for ok, result in results 126 | if ok 127 | ] 128 | # exit early if we'll pass the test 129 | if matches_expected_results.match(actual_results) is None: 130 | break 131 | 132 | # check the results 133 | assert_that(actual_results, matches_expected_results) 134 | -------------------------------------------------------------------------------- /src/magic_folder/test/cli/test_api_cli.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Tests for `magic-folder-api`. 6 | """ 7 | 8 | from eliot.twisted import inline_callbacks 9 | from testtools.matchers import Equals 10 | from twisted.internet import reactor 11 | from twisted.python.filepath import FilePath 12 | 13 | from ..common import AsyncTestCase 14 | from ..fixtures import MagicFolderNode 15 | from .common import api_cli, cli 16 | 17 | 18 | class ScanMagicFolder(AsyncTestCase): 19 | def api_cli(self, argv): 20 | return api_cli(argv, self.node.global_config, self.node.http_client) 21 | 22 | def cli(self, argv): 23 | return cli(argv, self.node.global_config, self.node.http_client) 24 | 25 | @inline_callbacks 26 | def setUp(self): 27 | """ 28 | Create a Tahoe-LAFS node which can contain some magic folder configuration 29 | and run it. 30 | """ 31 | yield super(ScanMagicFolder, self).setUp() 32 | 33 | self.magic_path = FilePath(self.mktemp()) 34 | self.magic_path.makedirs() 35 | self.folder_name = "default" 36 | folders = { 37 | self.folder_name: { 38 | "magic-path": self.magic_path, 39 | "author-name": "author", 40 | "poll-interval": 60, 41 | "scan-interval": None, 42 | "admin": True, 43 | } 44 | } 45 | 46 | self.config_dir = FilePath(self.mktemp()) 47 | self.node = MagicFolderNode.create( 48 | reactor, 49 | self.config_dir, 50 | folders=folders, 51 | start_folder_services=False, 52 | ) 53 | 54 | self.folder_config = self.node.global_config.get_magic_folder(self.folder_name) 55 | self.folder_service = self.node.global_service.get_folder_service( 56 | self.folder_name 57 | ) 58 | 59 | self.folder_service.local_snapshot_service.startService() 60 | self.addCleanup(self.folder_service.local_snapshot_service.stopService) 61 | 62 | self.folder_service.uploader_service.startService() 63 | self.addCleanup(self.folder_service.uploader_service.stopService) 64 | 65 | def clean(): 66 | folder = self.node.global_service.get_folder_service(self.folder_name) 67 | return folder.scanner_service._file_factory.finish() 68 | self.addCleanup(clean) 69 | 70 | @inline_callbacks 71 | def test_scan_magic_folder(self): 72 | """ 73 | Scanning a magic folder creates a snapshot of new files. 74 | """ 75 | relpath = "file" 76 | local = self.magic_path.child(relpath) 77 | local.setContent(b"content") 78 | 79 | outcome = yield self.api_cli( 80 | [ 81 | u"scan", 82 | u"--folder", 83 | self.folder_name, 84 | ], 85 | ) 86 | self.assertThat( 87 | outcome.succeeded(), 88 | Equals(True), 89 | ) 90 | 91 | snapshot_paths = self.folder_config.get_all_snapshot_paths() 92 | self.assertThat( 93 | snapshot_paths, 94 | Equals({relpath}), 95 | ) 96 | 97 | @inline_callbacks 98 | def test_scan_magic_folder_missing_name(self): 99 | """ 100 | If a folder is not passed to ``magic-folder-api scan``, 101 | an error is returned. 102 | """ 103 | relpath = "file" 104 | local = self.magic_path.child(relpath) 105 | local.setContent(b"content") 106 | 107 | outcome = yield self.api_cli( 108 | [ 109 | u"scan", 110 | ], 111 | ) 112 | self.assertThat( 113 | outcome.succeeded(), 114 | Equals(False), 115 | ) 116 | self.assertIn( 117 | "--folder / -n is required", 118 | outcome.stderr, 119 | ) 120 | 121 | @inline_callbacks 122 | def test_poll_magic_folder(self): 123 | """ 124 | Polling a magic folder causes the remote to be downloaded 125 | """ 126 | 127 | outcome = yield self.api_cli( 128 | [ 129 | u"poll", 130 | u"--folder", 131 | self.folder_name, 132 | ], 133 | ) 134 | self.assertThat( 135 | outcome.succeeded(), 136 | Equals(True), 137 | ) 138 | 139 | @inline_callbacks 140 | def test_poll_magic_folder_missing_name(self): 141 | """ 142 | If a folder is not passed to ``magic-folder-api poll``, 143 | an error is returned. 144 | """ 145 | relpath = "file" 146 | local = self.magic_path.child(relpath) 147 | local.setContent(b"content") 148 | 149 | outcome = yield self.api_cli( 150 | [ 151 | u"poll", 152 | ], 153 | ) 154 | self.assertThat( 155 | outcome.succeeded(), 156 | Equals(False), 157 | ) 158 | self.assertIn( 159 | "--folder / -n is required", 160 | outcome.stderr, 161 | ) 162 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_util_file.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The Magic-Folder Developers 2 | # See COPYING for details. 3 | 4 | """ 5 | Utilties for dealing with files. 6 | """ 7 | 8 | import os 9 | import time 10 | 11 | from testtools.matchers import ( 12 | AfterPreprocessing, 13 | Equals, 14 | IsInstance, 15 | LessThan, 16 | MatchesAll, 17 | MatchesStructure, 18 | Not, 19 | ) 20 | from twisted.python import runtime 21 | from twisted.python.filepath import FilePath 22 | 23 | from ..util.file import PathInfo, PathState, get_pathinfo, ns_to_seconds 24 | from .common import SyncTestCase, skipIf 25 | 26 | 27 | class PathInfoTests(SyncTestCase): 28 | def test_dir(self): 29 | """ 30 | :py:`get_pathinfo` returns a :py:`PathInfo` when given a directory. 31 | """ 32 | path = FilePath(self.mktemp()) 33 | path.createDirectory() 34 | path_info = get_pathinfo(path) 35 | self.assertThat( 36 | path_info, 37 | MatchesStructure.byEquality( 38 | is_dir=True, 39 | is_file=False, 40 | is_link=False, 41 | exists=True, 42 | state=None, 43 | ), 44 | ) 45 | 46 | @skipIf( 47 | runtime.platformType == "win32", "windows does not have unprivileged symlinks" 48 | ) 49 | def test_symlink(self): 50 | """ 51 | :py:`get_pathinfo` returns a :py:`PathInfo` when given a symlink. 52 | """ 53 | dest = FilePath(self.mktemp()) 54 | dest.setContent(b"content") 55 | path = FilePath(self.mktemp()) 56 | dest.linkTo(path) 57 | path_info = get_pathinfo(path) 58 | self.assertThat( 59 | path_info, 60 | MatchesStructure.byEquality( 61 | is_dir=False, 62 | is_file=False, 63 | is_link=True, 64 | exists=True, 65 | state=None, 66 | ), 67 | ) 68 | 69 | @skipIf(runtime.platformType == "win32", "windows does not have named pipe files") 70 | def test_fifo(self): 71 | """ 72 | :py:`get_pathinfo` returns a :py:`PathInfo` when given a named pipe. 73 | """ 74 | path = FilePath(self.mktemp()) 75 | os.mkfifo(path.path) 76 | path_info = get_pathinfo(path) 77 | self.assertThat( 78 | path_info, 79 | MatchesAll( 80 | IsInstance(PathInfo), 81 | MatchesStructure.byEquality( 82 | is_dir=False, 83 | is_file=False, 84 | is_link=False, 85 | exists=True, 86 | state=None, 87 | ), 88 | ), 89 | ) 90 | 91 | def test_non_existant(self): 92 | """ 93 | :py:`get_pathinfo` returns a :py:`PathInfo` when given path that does 94 | not exist. 95 | """ 96 | path = FilePath(self.mktemp()) 97 | path_info = get_pathinfo(path) 98 | self.assertThat( 99 | path_info, 100 | MatchesAll( 101 | IsInstance(PathInfo), 102 | MatchesStructure.byEquality( 103 | is_dir=False, 104 | is_file=False, 105 | is_link=False, 106 | exists=False, 107 | state=None, 108 | ), 109 | ), 110 | ) 111 | 112 | def test_file(self): 113 | """ 114 | :py:`get_pathinfo` returns a :py:`PathInfo` when given regulare file. 115 | """ 116 | # this fails sometimes with int(time.time()) .. I believe 117 | # because there isn't enough resolution to always "round down" 118 | # to the right second when the time is right near the 119 | # boundary. So see this, change below to int(time.time()) 120 | # without the float fudging and run: 121 | # 122 | # python -m twisted.trial -u magic_folder.test.test_util_file 123 | # 124 | # to run this until it fails 125 | now = int(time.time() - 0.5) 126 | match_after_now = AfterPreprocessing( 127 | ns_to_seconds, 128 | Not(LessThan(now)), 129 | ) 130 | 131 | content = b"content" 132 | path = FilePath(self.mktemp()) 133 | path.setContent(content) 134 | path_info = get_pathinfo(path) 135 | 136 | self.assertThat( 137 | path_info, 138 | MatchesAll( 139 | IsInstance(PathInfo), 140 | MatchesStructure( 141 | is_dir=Equals(False), 142 | is_file=Equals(True), 143 | is_link=Equals(False), 144 | exists=Equals(True), 145 | state=MatchesAll( 146 | IsInstance(PathState), 147 | MatchesStructure( 148 | size=Equals(len(content)), 149 | mtime_ns=match_after_now, 150 | ctime_ns=match_after_now, 151 | ), 152 | ), 153 | ), 154 | ), 155 | ) 156 | -------------------------------------------------------------------------------- /docs/proposed/recovery.rst: -------------------------------------------------------------------------------- 1 | .. -*- coding: utf-8 -*- 2 | 3 | .. _recovery: 4 | 5 | Recovery (From Backup) 6 | ====================== 7 | 8 | 9 | Audience 10 | -------- 11 | 12 | This document is aimed at programmers working on integrating with magic-folder. 13 | 14 | 15 | Motivation 16 | ---------- 17 | 18 | It is useful to be able to back up enough information in order to recover a magic-folder on a new device. 19 | This document describes both the process of gathering the informatino to back up and using it to re-create a similar magic-folder. 20 | 21 | 22 | Overview 23 | -------- 24 | 25 | A magic-folder consists of a Collective DMD containing sub-directories pointing at the read-capabilities of one or more participant's Personal DMDs. 26 | If this magic-folder instance is the "admin" instance, it will contain a write-capability for the Collective. 27 | It will also contain a write-capablity corresponding to the Personal DMD. 28 | 29 | The rough process of "recovery" is: 30 | 31 | - create a new, fresh Magic Folder on the new instance 32 | - add the "recovery" instance as a participant in this new Magic Folder 33 | 34 | The new instance's local magic-path will fill up with all the files in the old instance as the downloader gets to them. 35 | In a usual case of "recovery" from e.g. a lost or broken device, this participant would not produce new Snapshots. 36 | However, if it does start producing new Snapshots the Magic Folder will continue to synchronize and detect conflicts as normal. 37 | 38 | **Note:** this document is only concerned with single-participant Magic Folders for now and does not cover recovery of other participants. 39 | 40 | 41 | Gathering the Information 42 | ------------------------- 43 | 44 | In order to recover a Magic Folder on a new device you will need some information about it: 45 | 46 | - the Collective DMD (either write-capability or read-capability) 47 | - the Personal DMD (usually a write-capability but you could choose to only save the read-capability corresponding to it) 48 | 49 | This can be gathered for all folders at once with a ``magic-folder`` command:: 50 | 51 | $ magic-folder list --json --include-secret-information 52 | 53 | Running that command will produce a JSON ``object`` containing one key for each Magic Folder. 54 | These will point to a ``object`` containing all the information about this Magic Folder. 55 | The relevant keys here are: 56 | - ``"upload-dircap"``: our Personal DMD write-capability 57 | - ``"collective-dircap"``: a write-capability or read-capability to the Collective DMD (not used in this revision of this document but it is useful to save) 58 | 59 | You could choose to keep _all_ of the JSON from the ``magic-folder list`` incantation. 60 | 61 | **Note:** these capability-strings are **secret information** and must be stored securely. 62 | Anyone who passively observes them can access all files in the Magic Folder forever and may impersonate this device (if they have access to the same Tahoe grid). 63 | 64 | 65 | Restoring a Magic Folder 66 | ------------------------ 67 | 68 | Let us assume you have saved the JSON for a single Magic Folder (as returned from ``magic-folder list`` incantaion as above) which looks like:: 69 | 70 | "default": { 71 | "name": "default", 72 | "author": { 73 | "name": "laptop", 74 | "signing_key": "UF5FBIDFVAXOX3XEZVPTEEGKZBEMIRGKO2T5BFWXNGQNASGEC2BA====", 75 | "verify_key": "HRBIWJZMREI7I3MYIEUZ4MOWMCGH5SMFHCDW3VAT3EWHUPXJJ76Q====" 76 | }, 77 | "upload_dircap": "URI:DIR2:6izqnla37sdnefnukjs7dbta64:cefhacik6stdiyq577l5zy4jpvnmw7bepriqxydad6voc2fiqt2a", 78 | "poll_interval": 120, 79 | "is_admin": true, 80 | "collective_dircap": "URI:DIR2:oxpqd6wlmxkjona3elv6rdxqnm:cqdkh6vlbwbkgtqi7wwiwojudbeqaitmrd2435p3vh2ez2kazb4q", 81 | "stash_path": "/home/meejah/.config/magic-folder/laptop/default/stash", 82 | "magic_path": "/home/meejah/cat-photos" 83 | } 84 | 85 | Further we assume that your new device or magic-folders instance is in ``~/magicfolder2`` and that you've correctly set this up and have the daemon running. 86 | 87 | First, we create a new Magic Folder into which we'll recover:: 88 | 89 | $ magic-folder --config ~/magicfolder2 add --author desktop --name recovered_cats --poll-interval 120 ~/favourite-kitties 90 | Created magic-folder named 'recovered_cats' 91 | 92 | Here we only describe the case where you are the admin and there is only a single participant in the original Magic Folder (other cases are to-be-designed still). 93 | 94 | Use the ``magic-folder-api`` command to add a new participant to the just-created Magic Folder: 95 | 96 | $ magic-folder-api --config ~/magicfolder2 add-participant --author-name laptop --folder recovered_cats --personal-dmd URI:DIR2-RO:xst3xp2fgqxsrq6vvx4dsgl5zy:cefhacik6stdiyq577l5zy4jpvnmw7bepriqxydad6voc2fiqt2a 97 | {} 98 | 99 | We use the same name ``"laptop"`` here, but you could chose something different (as long as it's not the name of our local participant, which is ``"desktop"`` in this example). 100 | One way to transform the write-capability of the Personal DMD into a read-capability is to use ``magic_folder.util.capabilities.to_readonly_capability`` (it is an error to pass a write-capability to the ``add-participant`` API). 101 | -------------------------------------------------------------------------------- /.github/workflows/linux.yml: -------------------------------------------------------------------------------- 1 | name: Linux Testing 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | unit-tests: 11 | strategy: 12 | matrix: 13 | os: 14 | - runs-on: ubuntu-24.04 15 | python-version: 16 | - "3.9" 17 | - "3.10" 18 | - "3.11" 19 | - "3.12" 20 | runs-on: ${{ matrix.os.runs-on }} 21 | steps: 22 | - name: Check out source code 23 | uses: actions/checkout@v2 24 | with: 25 | # Get enough history for the tags we get next to be meaningful. 0 26 | # means all history. 27 | fetch-depth: "0" 28 | # Checkout head of the branch of the PR, or the exact revision 29 | # specified for non-PR builds. 30 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 31 | 32 | - name: Set up Python ${{ matrix.python-version }} 33 | uses: actions/setup-python@v2 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | 37 | - name: Use pip cache 38 | uses: actions/cache@v4 39 | with: 40 | path: ~\AppData\Local\pip\Cache 41 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} 42 | restore-keys: | 43 | ${{ runner.os }}-pip- 44 | 45 | - name: fix Debian 46 | run: sudo apt-get install -y python3-pip 47 | 48 | - name: Install dependencies 49 | run: | 50 | python -m pip install --upgrade pip 51 | pip install wheel tox eliot-tree cuvner 52 | 53 | - name: Test with tox 54 | run: tox -e coverage 55 | env: 56 | MAGIC_FOLDER_HYPOTHESIS_PROFILE: "magic-folder-ci" 57 | 58 | - name: Coverage graph 59 | shell: bash 60 | run: cuv graph 61 | continue-on-error: true 62 | 63 | - name: Coverage report 64 | shell: bash 65 | run: | 66 | git diff origin/main..HEAD > p 67 | cuv report p 68 | continue-on-error: true 69 | 70 | - name: Coverage details 71 | shell: bash 72 | run: | 73 | git diff origin/main..HEAD > p 74 | cuv diff p 75 | continue-on-error: true 76 | 77 | - name: Generate eliot-tree. 78 | if: ${{ always() }} 79 | shell: bash 80 | run: | 81 | if [[ -f eliot.log ]]; then 82 | eliot-tree --field-limit=0 eliot.log > eliot-tree.txt 83 | fi 84 | 85 | - name: Upload eliot.log in case of failure 86 | uses: "actions/upload-artifact@v4" 87 | with: 88 | if-no-files-found: "warn" 89 | name: unit-test-${{ matrix.os.runs-on }}-${{ matrix.python-version }} 90 | path: "eliot*" 91 | 92 | - name: Upload coverage report 93 | uses: codecov/codecov-action@v2 94 | with: 95 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 96 | file: "./coverage.xml" 97 | 98 | 99 | 100 | integration-tests: 101 | strategy: 102 | matrix: 103 | os: 104 | - runs-on: ubuntu-24.04 105 | python-version: 106 | - "3.9" 107 | tahoe-version: 108 | - "1_17" 109 | - "1_18" 110 | - "1_19" 111 | - "master" 112 | runs-on: ${{ matrix.os.runs-on }} 113 | 114 | steps: 115 | - name: Check out source code 116 | uses: actions/checkout@v2 117 | with: 118 | # Get enough history for the tags we get next to be meaningful. 0 119 | # means all history. 120 | fetch-depth: "0" 121 | # Checkout head of the branch of the PR, or the exact revision 122 | # specified for non-PR builds. 123 | ref: "${{ github.event.pull_request.head.sha || github.sha }}" 124 | # Get tags not fetched by the checkout action, needed for auto-versioning. 125 | - run: "git fetch origin +refs/tags/*:refs/tags/*" 126 | 127 | - name: Set up Python ${{ matrix.python-version }} 128 | uses: actions/setup-python@v2 129 | with: 130 | python-version: ${{ matrix.python-version }} 131 | 132 | - name: Use pip cache 133 | uses: actions/cache@v4 134 | with: 135 | path: ~\AppData\Local\pip\Cache 136 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} 137 | restore-keys: | 138 | ${{ runner.os }}-pip- 139 | 140 | - name: Install dependencies 141 | run: | 142 | python -m pip install --upgrade pip 143 | pip install wheel tox eliot-tree 144 | 145 | - name: Test with tox 146 | run: tox -e integration-${{ matrix.tahoe-version }} 147 | 148 | - name: Generate eliot-tree. 149 | if: ${{ always() }} 150 | shell: bash 151 | run: | 152 | if [[ -f eliot.log ]]; then 153 | eliot-tree --field-limit=0 eliot.log > eliot-tree.txt 154 | fi 155 | 156 | - name: Upload eliot.log in case of failure 157 | uses: "actions/upload-artifact@v4" 158 | if: ${{ always() }} 159 | with: 160 | if-no-files-found: "warn" 161 | name: integration-${{ matrix.os.runs-on }}-${{ matrix.python-version }}-tahoe-${{ matrix.tahoe-version }} 162 | path: "eliot*" 163 | 164 | - uses: codecov/codecov-action@v2 165 | with: 166 | token: "322d708d-8283-4827-b605-ccf02bfecf70" 167 | file: "./coverage.xml" 168 | flags: "integration" 169 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_pid.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | import re 5 | import sys 6 | from subprocess import ( 7 | Popen, 8 | PIPE, 9 | ) 10 | 11 | from twisted.logger import ( 12 | Logger, 13 | ) 14 | from twisted.python.filepath import ( 15 | FilePath, 16 | ) 17 | from testtools.matchers import ( 18 | Always, 19 | Equals, 20 | Contains, 21 | ContainsDict, 22 | AllMatch, 23 | HasLength, 24 | ) 25 | from twisted.internet.testing import ( 26 | EventLoggingObserver, 27 | ) 28 | from hypothesis import ( 29 | given, 30 | assume, 31 | ) 32 | from hypothesis.strategies import ( 33 | text, 34 | ) 35 | 36 | from .common import ( 37 | SyncTestCase, 38 | ) 39 | from ..pid import ( 40 | check_pid_process, 41 | InvalidPidFile, 42 | ProcessInTheWay, 43 | _pidfile_to_lockpath, 44 | ) 45 | 46 | 47 | class _FakeProcess: 48 | """ 49 | Enough of psutil.Process to test check_pid_process 50 | """ 51 | running = True 52 | 53 | def __init__(self, pid): 54 | self.pid = pid 55 | 56 | def create_time(self): 57 | return 123.4 58 | 59 | def terminate(self): 60 | self.running = False 61 | 62 | 63 | class TestPidObserver(SyncTestCase): 64 | """ 65 | Confirm operation of magic_folder.pid functions 66 | """ 67 | 68 | def test_happy(self): 69 | """ 70 | normal operation of pid-file writing 71 | """ 72 | pidfile = FilePath(self.mktemp()) 73 | log = Logger() 74 | with check_pid_process(pidfile, log, find_process=_FakeProcess): 75 | self.assertThat( 76 | pidfile.exists(), 77 | Equals(True), 78 | ) 79 | self.assertThat( 80 | pidfile.exists(), 81 | Equals(False), 82 | ) 83 | 84 | def test_not_running(self): 85 | """ 86 | a pid-file refers to a non-running process 87 | """ 88 | pidfile = FilePath(self.mktemp()) 89 | pidfile.setContent(b"65537 1234.5") # "impossible" process-id .. right? 90 | obs = EventLoggingObserver() 91 | log = Logger() 92 | log.observer = obs 93 | with check_pid_process(pidfile, log): 94 | pass 95 | 96 | events = list(obs) 97 | 98 | # both logged events should have a "pidpath" kwarg 99 | self.assertThat(events, HasLength(2)) 100 | self.assertThat( 101 | events, 102 | AllMatch( 103 | ContainsDict({ 104 | "pidpath": Always(), 105 | }), 106 | ) 107 | ) 108 | 109 | def test_existing(self): 110 | """ 111 | a pid-file refers to a running process so we should exit 112 | """ 113 | pidfile = FilePath(self.mktemp()) 114 | pidfile.setContent(b"0 0.0\n") 115 | obs = EventLoggingObserver() 116 | log = Logger() 117 | log.observer = obs 118 | 119 | with self.assertRaises(Exception) as ctx: 120 | with check_pid_process(pidfile, log, find_process=_FakeProcess): 121 | pass 122 | self.assertThat( 123 | str(ctx.exception), 124 | Contains("already running") 125 | ) 126 | 127 | good_file_content_re = re.compile(r"\w[0-9]*\w[0-9]*\w") 128 | 129 | @given(text()) 130 | def test_invalid_pidfile(self, bad_content): 131 | """ 132 | an invalid PID file produces and error 133 | """ 134 | assume(not self.good_file_content_re.match(bad_content)) 135 | bad_content = b"not pids" 136 | pidfile = FilePath("pidfile") 137 | pidfile.setContent(bad_content) 138 | 139 | with self.assertRaises(InvalidPidFile): 140 | with check_pid_process(pidfile, Logger()): 141 | pass 142 | 143 | 144 | class PidFileLocking(SyncTestCase): 145 | """ 146 | The pidfile locking implementation prevents multiple processes 147 | from opening the file at once 148 | """ 149 | 150 | def test_locking(self): 151 | """ 152 | Fail to create a pidfile if another process has the lock already. 153 | """ 154 | # this can't just be "our" process because the locking library 155 | # allows the same process to acquire a lock multiple times. 156 | pidfile = FilePath(self.mktemp()) 157 | lockfile = _pidfile_to_lockpath(pidfile) 158 | 159 | with open("other_lock.py", "w") as f: 160 | f.write( 161 | "\n".join([ 162 | "import filelock, time, sys", 163 | "with filelock.FileLock(sys.argv[1], timeout=1):", 164 | " sys.stdout.write('.\\n')", 165 | " sys.stdout.flush()", 166 | " time.sleep(10)", 167 | ]) 168 | ) 169 | proc = Popen( 170 | [sys.executable, "other_lock.py", lockfile.path], 171 | stdout=PIPE, 172 | stderr=PIPE, 173 | ) 174 | # make sure our subprocess has had time to acquire the lock 175 | # for sure (from the "." it prints) 176 | proc.stdout.read(2) 177 | 178 | # acquiring the same lock should fail; it is locked by the subprocess 179 | with self.assertRaises(ProcessInTheWay): 180 | with check_pid_process(pidfile, Logger()): 181 | pass 182 | proc.terminate() 183 | -------------------------------------------------------------------------------- /src/magic_folder/test/test_common.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Least Authority TFA GmbH. 2 | # See COPYING.* for details. 3 | 4 | """ 5 | Tests for ``magic_folder.test.common``. 6 | """ 7 | 8 | import os 9 | import os.path 10 | from sys import ( 11 | modules, 12 | ) 13 | from stat import ( 14 | S_IWUSR, 15 | S_IWGRP, 16 | S_IWOTH, 17 | ) 18 | 19 | from unittest import ( 20 | TestCase, 21 | ) 22 | 23 | from testtools import ( 24 | ExpectedException, 25 | ) 26 | from testtools.matchers import ( 27 | Not, 28 | Contains, 29 | Equals, 30 | ) 31 | 32 | from .common import ( 33 | SyncTestCase, 34 | skipIf 35 | ) 36 | from ..common import ( 37 | atomic_makedirs, 38 | ) 39 | 40 | from twisted.python.filepath import ( 41 | Permissions, 42 | FilePath, 43 | ) 44 | 45 | from twisted.python import runtime 46 | 47 | 48 | class SyncTestCaseTests(TestCase): 49 | """ 50 | Tests for ``magic_folder.test.common.SyncTestCase``. 51 | """ 52 | def setUp(self): 53 | self.case = SyncTestCase() 54 | 55 | def test_mktemp_str(self): 56 | """ 57 | ``SyncTestCase.mktemp`` returns ``str``. 58 | """ 59 | tmp = self.case.mktemp() 60 | self.assertTrue( 61 | isinstance(tmp, str), 62 | "Expected str but {!r} is instance of {}".format( 63 | tmp, 64 | type(tmp), 65 | ), 66 | ) 67 | 68 | def test_mktemp_identifies_case(self): 69 | """ 70 | ``SyncTestCase.mktemp`` returns a path associated with the selected test. 71 | """ 72 | tmp = self.case.mktemp() 73 | actual_segments = tmp.split(os.sep) 74 | case_segments = [ 75 | "magic_folder", 76 | "test", 77 | "common", 78 | ] 79 | self.assertTrue( 80 | is_sublist(case_segments, actual_segments), 81 | "Expected to find {!r} in temporary path {!r}".format( 82 | case_segments, 83 | actual_segments, 84 | ), 85 | ) 86 | 87 | def test_parent_is_directory(self): 88 | """ 89 | ``SyncTestCase.mktemp`` returns a path the parent of which exists and is a 90 | directory. 91 | """ 92 | tmp = self.case.mktemp() 93 | self.assertTrue( 94 | os.path.isdir(os.path.split(tmp)[0]), 95 | "Expected parent of {!r} to be a directory".format(tmp), 96 | ) 97 | 98 | @skipIf(runtime.platformType == "win32", "windows does not have unix-like permissions") 99 | def test_parent_writeable(self): 100 | """ 101 | ``SyncTestCase.mktemp`` returns a path the parent of which is writeable 102 | only by the owner. 103 | """ 104 | tmp = self.case.mktemp() 105 | stat = os.stat(os.path.split(tmp)[0]) 106 | self.assertEqual( 107 | stat.st_mode & (S_IWUSR | S_IWGRP | S_IWOTH), 108 | S_IWUSR, 109 | "Expected parent permissions to allow only owner writes, " 110 | "got {} instead.".format( 111 | Permissions(stat.st_mode), 112 | ), 113 | ) 114 | 115 | def test_does_not_exist(self): 116 | """ 117 | ``SyncTestCase.mktemp`` returns a path which does not exist. 118 | """ 119 | tmp = self.case.mktemp() 120 | self.assertFalse( 121 | os.path.exists(tmp), 122 | "Expected {!r} not to exist".format(tmp), 123 | ) 124 | 125 | def is_sublist(needle, haystack): 126 | """ 127 | Determine if a list exists as a sublist of another list. 128 | 129 | :param list needle: The list to seek. 130 | :param list haystack: The list in which to seek. 131 | :return bool: ``True`` if and only if ``needle`` is a sublist of 132 | ``haystack``. 133 | """ 134 | for i in range(len(haystack) - len(needle)): 135 | if needle == haystack[i:i + len(needle)]: 136 | return True 137 | return False 138 | 139 | 140 | class OtherTests(SyncTestCase): 141 | """ 142 | Tests for other behaviors that don't obviously fit anywhere better. 143 | """ 144 | def test_allmydata_test_not_loaded(self): 145 | """ 146 | ``allmydata.test`` makes Hypothesis profile changes that are not 147 | compatible with our test suite. If it is loaded, this is a problem. 148 | Verify that it is not loaded. 149 | """ 150 | self.assertThat( 151 | modules, 152 | Not(Contains("allmydata.test")), 153 | ) 154 | 155 | 156 | class TestAtomicMakedirs(SyncTestCase): 157 | """ 158 | Confirm cleanup behavior for atomic_makedirs context-manager 159 | """ 160 | 161 | def test_happy(self): 162 | """ 163 | on a normal exit atomic_makedirs creates the dir 164 | """ 165 | temp = FilePath(self.mktemp()) 166 | with atomic_makedirs(temp): 167 | pass 168 | self.assertThat( 169 | temp.exists(), 170 | Equals(True) 171 | ) 172 | 173 | def test_exception(self): 174 | """ 175 | Upon error atomic_makedirs should erase the directory 176 | """ 177 | temp = FilePath(self.mktemp()) 178 | with ExpectedException(RuntimeError, "just testing"): 179 | with atomic_makedirs(temp): 180 | raise RuntimeError("just testing") 181 | self.assertThat( 182 | temp.exists(), 183 | Equals(False) 184 | ) 185 | -------------------------------------------------------------------------------- /src/magic_folder/test/eliotutil.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tools aimed at the interaction between tests and Eliot. 3 | """ 4 | 5 | __all__ = [ 6 | "RUN_TEST", 7 | "EliotLoggedRunTest", 8 | "eliot_logged_test", 9 | ] 10 | 11 | from functools import ( 12 | wraps, 13 | partial, 14 | ) 15 | 16 | import attr 17 | 18 | from eliot import ( 19 | ActionType, 20 | Field, 21 | ) 22 | from eliot.testing import capture_logging 23 | from eliot.twisted import DeferredContext 24 | 25 | from twisted.internet.defer import ( 26 | maybeDeferred, 27 | ) 28 | 29 | _NAME = Field.for_types( 30 | u"name", 31 | [str], 32 | u"The name of the test.", 33 | ) 34 | 35 | RUN_TEST = ActionType( 36 | u"run-test", 37 | [_NAME], 38 | [], 39 | u"A test is run.", 40 | ) 41 | 42 | 43 | def eliot_logged_test(f): 44 | """ 45 | Decorate a test method to run in a dedicated Eliot action context. 46 | 47 | The action will finish after the test is done (after the returned Deferred 48 | fires, if a Deferred is returned). It will note the name of the test 49 | being run. 50 | 51 | All messages emitted by the test will be validated. They will still be 52 | delivered to the global logger. 53 | """ 54 | # A convenient, mutable container into which nested functions can write 55 | # state to be shared among them. 56 | class storage(object): 57 | pass 58 | 59 | @wraps(f) 60 | def run_and_republish(self, *a, **kw): 61 | # Unfortunately the only way to get at the global/default logger... 62 | # This import is delayed here so that we get the *current* default 63 | # logger at the time the decorated function is run. 64 | from eliot._output import _DEFAULT_LOGGER as default_logger 65 | 66 | def republish(): 67 | # The memory logger captures both the raw messages and the 68 | # serializer. We pass them both to the global logger, so that the 69 | # expected serialization is preserved. 70 | logger = storage.logger 71 | for msg, serializer in zip(logger.messages, logger.serializers): 72 | # We copy the message here, as the cleanup function that 73 | # capture_logging installs will also try to serialize all 74 | # messages. 75 | default_logger.write(msg, serializer) 76 | 77 | @capture_logging(None) 78 | def run(self, logger): 79 | # Record the MemoryLogger for later message extraction. 80 | storage.logger = logger 81 | # Give the test access to the logger as well. It would be just 82 | # fine to pass this as a keyword argument to `f` but implementing 83 | # that now will give me conflict headaches so I'm not doing it. 84 | self.eliot_logger = logger 85 | # Arrange for all messages written to the memory logger that 86 | # `capture_logging` installs to be re-written to the global/default 87 | # logger so they might end up in a log file somewhere, if someone 88 | # wants. This has to be done *before* the cleanup function capture_logging 89 | # runs, as that function can serialize all, some or none of the captured 90 | # messages. 91 | self.addCleanup(republish) 92 | return f(self, *a, **kw) 93 | 94 | 95 | # Begin an action that should comprise all messages from the decorated 96 | # test method. 97 | with RUN_TEST(name=self.id()).context() as action: 98 | # When the test method Deferred fires, the RUN_TEST action is 99 | # done. However, we won't have re-published the MemoryLogger 100 | # messages into the global/default logger when this Deferred 101 | # fires. So we need to delay finishing the action until that has 102 | # happened. Record the action so we can do that. 103 | storage.action = action 104 | 105 | # Support both Deferred-returning and non-Deferred-returning 106 | # tests. 107 | return DeferredContext(maybeDeferred(run, self)).addActionFinish() 108 | 109 | return run_and_republish 110 | 111 | 112 | @attr.s 113 | class EliotLoggedRunTest(object): 114 | """ 115 | A *RunTest* implementation which surrounds test invocation with an 116 | Eliot-based action. 117 | 118 | This *RunTest* composes with another for convenience. 119 | 120 | :ivar case: The test case to run. 121 | 122 | :ivar handlers: Pass-through for the wrapped *RunTest*. 123 | :ivar last_resort: Pass-through for the wrapped *RunTest*. 124 | 125 | :ivar _run_tests_with_factory: A factory for the other *RunTest*. 126 | """ 127 | _run_tests_with_factory = attr.ib() 128 | case = attr.ib() 129 | handlers = attr.ib(default=None) 130 | last_resort = attr.ib(default=None) 131 | 132 | @classmethod 133 | def make_factory(cls, delegated_run_test_factory): 134 | return partial(cls, delegated_run_test_factory) 135 | 136 | @property 137 | def eliot_logger(self): 138 | return self.case.eliot_logger 139 | 140 | @eliot_logger.setter 141 | def eliot_logger(self, value): 142 | self.case.eliot_logger = value 143 | 144 | def addCleanup(self, *a, **kw): 145 | return self.case.addCleanup(*a, **kw) 146 | 147 | def id(self): 148 | return self.case.id() 149 | 150 | @eliot_logged_test 151 | def run(self, result=None): 152 | return self._run_tests_with_factory( 153 | self.case, 154 | self.handlers, 155 | self.last_resort, 156 | ).run(result) 157 | -------------------------------------------------------------------------------- /CREDITS: -------------------------------------------------------------------------------- 1 | This is at least a partial credits-file of people that have contributed to 2 | the Tahoe-LAFS project. It is formatted to allow easy grepping and 3 | beautification by scripts. The fields are: name (N), email (E), 4 | web-address (W), PGP key ID and fingerprint (P), physical location (S), 5 | and description (D). 6 | 7 | Thanks. 8 | ---------- 9 | 10 | N: Brian Warner 11 | E: warner@lothar.com 12 | P: A476 E2E6 1188 0C98 5B3C 3A39 0386 E81B 11CA A07A 13 | D: main developer 14 | 15 | N: Zooko 16 | E: zooko@zooko.com 17 | D: main developer 18 | 19 | N: Daira Hopwood 20 | E: daira@jacaranda.org 21 | P: 3D6A 08E9 1262 3E9A 00B2 1BDC 067F 4920 98CF 2762 22 | S: Manchester, UK 23 | D: main developer 24 | 25 | N: Faried Nawaz 26 | E: self@node.pk 27 | W: http://www.hungry.com/~fn/ 28 | P: 0x09ECEC06, 19 41 1B 3E 25 98 F5 0A 0D 50 F9 37 1B 98 1A FF 09 EC EC 06 29 | D: added private publish, added display of file size 30 | 31 | N: Arno Waschk 32 | E: hamamatsu@gmx.de 33 | W: www.arnowaschk.de 34 | D: improve logging, documentation, testing/porting/packaging (cygwin) 35 | 36 | N: Arc O Median 37 | D: bug reports 38 | 39 | N: RobK 40 | D: code 41 | 42 | N: Nathan Wilcox 43 | E: nejucomo@gmail.com 44 | D: unit tests, attack example, documentation, Linux FUSE interface 45 | 46 | N: Mike Booker 47 | D: documentation (README.win32) 48 | 49 | N: David Reid 50 | E: dreid@dreid.org 51 | D: make the provisioning page work in py2exe and py2app packages 52 | 53 | N: Paul Gerhardt 54 | D: don't emit error to stdout when testing for error in make check-deps 55 | 56 | N: Armin Rigo 57 | D: Linux FUSE interface "b" 58 | 59 | N: Justin Boreta 60 | D: user testing and docs 61 | 62 | N: Chris Galvan 63 | E: cgalvan@enthought.com 64 | D: packaging, buildbot 65 | 66 | N: François Deppierraz 67 | E: francois@ctrlaltdel.ch 68 | D: encodings, duplicity, debugging, FUSE, docs, FreeBSD, WUI, ARM, NEWS 69 | 70 | N: Larry Hosken 71 | E: tahoe@lahosken.san-francisco.ca.us 72 | D: make cp -r ignore dangling symlinks 73 | 74 | N: Toby Murray 75 | E: toby.murray@comlab.ox.ac.uk 76 | D: add web.ambient_upload_authority option, remove it 77 | 78 | N: Shawn Willden 79 | E: shawn-tahoe@willden.org 80 | D: mathematical analysis, code review, Win32 documentation 81 | 82 | N: Nils Durner 83 | E: ndurner@googlemail.com 84 | D: security bug report, darcsver fix, Windows build docs, ftpd docs, bzr patch 85 | 86 | N: Kevin Reid 87 | E: kpreid@switchb.org 88 | D: security bug report, W3 standards, new improved WUI style 89 | 90 | N: Alberto Berti 91 | E: alberto@metapensiero.it 92 | W: http://www.metapensiero.it 93 | S: via Filatoi, 1 38068 Rovereto (TN), Italy 94 | D: improvements to the CLI: exclude patterns, usage text 95 | 96 | N: DarKnesS_WOlF 97 | D: patch Makefile to build .debs for Ubuntu Intrepid 98 | 99 | N: Kevan Carstensen 100 | E: kevan@isnotajoke.com 101 | D: Tahoe-LAFS Hacker; MDMF, security, other improvements, code review, docs 102 | 103 | N: Marc Tooley 104 | W: http://rune.ca 105 | P: 0xD5A7EE69911DF5CF 106 | D: port to NetBSD, help debugging Crypto++ bug 107 | 108 | N: Sam Mason 109 | D: edited docs/running.rst 110 | 111 | N: Jacob Appelbaum 112 | E: jacob@appelbaum.com 113 | W: http://www.appelbaum.net/ 114 | P: 12E4 04FF D3C9 31F9 3405 2D06 B884 1A91 9D0F ACE4 115 | D: Debian packaging including init scripts 116 | 117 | N: Jeremy Visser 118 | D: Ubuntu packaging, usability testing 119 | 120 | N: Jeremy Fitzhardinge 121 | D: better support for HTTP range queries 122 | 123 | N: Frédéric Marti 124 | E: freestorm77@gmail.com 125 | P: 0xD703AE08, F1 82 35 BB FF D8 96 0B 68 E2 91 2F C4 B8 6A 42 D7 03 AE 08 126 | S: Lausanne - Switzerland 127 | D: fix layout issue and server version numbers in WUI 128 | 129 | N: Jacob Lyles 130 | E: jacob.lyles@gmail.com 131 | D: fixed bug in WUI with Python 2.5 and a system clock set far in the past 132 | 133 | N: Ravi Pinjala 134 | E: ravi@p-static.net 135 | D: converted docs from .txt to .rst 136 | 137 | N: Josh Wilcox 138 | D: docs, return None from get_stats() when there aren't enough samples 139 | 140 | N: Vince_II 141 | D: fix incorrect name of other doc file in docs/configuration.rst 142 | 143 | N: Markus Reichelt 144 | E: mr@mareichelt.com 145 | W: http://mareichelt.com/ 146 | P: DCB3 281F 38B0 711A 41C0 DC20 EE8D 363D 1687 9738 147 | D: packaging for Slackware on SlackBuilds.org, bug reports 148 | 149 | N: Peter Le Bek 150 | E: peter@hyperplex.org 151 | P: 0x9BAC3E97, 79CA 34B3 7272 A3CF 82AC 5655 F55A 5B63 9BAC 3E97 152 | D: mtime in ftpd 153 | 154 | N: Andrew Miller 155 | E: amiller@dappervision.com 156 | W: http://soc1024.com 157 | P: 0xE3787A7250538F3F DEB3 132A 7FBA 37A5 03AC A462 E378 7A72 5053 8F3F 158 | D: bugfixes, patches 159 | 160 | N: Frederick B 161 | E: freddy@flashpad ? 162 | D: fix wrong index in tahoe cp --verbose 163 | 164 | N: Patrick McDonald 165 | E: marlowe@antagonism.org 166 | D: documentation 167 | 168 | N: Mark Berger 169 | E: mark.berger.j@gmail.com 170 | D: servers of happiness 171 | 172 | N: Leif Ryge 173 | E: leif@synthesize.us 174 | D: bugfixes, documentation, web UI 175 | 176 | N: Tony Arcieri 177 | E: tony.arcieri@gmail.com 178 | D: web UI improvements 179 | 180 | N: A. Montero 181 | E: amontero@tinet.org 182 | D: documentation 183 | 184 | N: Ramakrishnan Muthukrishnan 185 | E: ram@rkrishnan.org 186 | D: Mac OS X packaging, Debian package maintainer 187 | 188 | N: Loose Cannon 189 | E: lcstyle@gmail.com 190 | D: fix the Download! button on the Welcome page 191 | 192 | N: Jean-Paul Calderone 193 | E: exarkun@twistedmatrix.com 194 | D: support SFTP public key authentication. 195 | 196 | N: David Stainton 197 | E: dstainton415@gmail.com 198 | D: various bug-fixes and features 199 | 200 | N: meejah 201 | E: meejah@meejah.ca 202 | P: 0xC2602803128069A7, 9D5A 2BD5 688E CB88 9DEB CD3F C260 2803 1280 69A7 203 | D: various bug-fixes and features 204 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Magic Folder 2 | ============ 3 | 4 | Magic Folder for Tahoe-LAFS is a Free and Open file synchronization system. 5 | It detects local changes to files and uploads those changes to a Tahoe-LAFS grid. 6 | It monitors a Tahoe-LAFS grid and downloads changes to the local filesystem. 7 | 8 | |readthedocs| |gha_linux| |gha_macos| |gha_windows| |codecov| 9 | 10 | Installing 11 | ========== 12 | 13 | for integrators 14 | ^^^^^^^^^^^^^^^ 15 | 16 | When packaging magic-folder, please install using our pinned requirements, 17 | which are tested in CI. This should be done in a virtualenv, or other 18 | isolated python environment, so as to not interfere with system or user 19 | python packages:: 20 | 21 | git clone https://github.com/LeastAuthority/magic-folder 22 | cd magic-folder 23 | pip install --require-hashes -r requirements/base.txt 24 | pip install --no-deps . 25 | 26 | 27 | from source 28 | ^^^^^^^^^^^ 29 | To install from source (either so you can hack on it, or just to run 30 | pre-release code), you should create a virtualenv and install into that:: 31 | 32 | git clone https://github.com/LeastAuthority/magic-folder.git 33 | cd magic-folder 34 | virtualenv venv 35 | venv/bin/pip install --upgrade setuptools 36 | venv/bin/pip install --editable . 37 | venv/bin/magic-folder --version 38 | 39 | 40 | Usage 41 | ===== 42 | 43 | Magic-Folder is configured via the ``magic-folder`` command-line tool. 44 | 45 | Magic-Folder configuration is kept in a directory. 46 | The default place for this directory is platform-dependant; on Linux it will be in ``~/.config/magic-folder``. 47 | Inside this directory is a database for global configuration and sub-directories to track state and temporary space for each actual magic-folder including a configuration database. 48 | All databases are SQLite. 49 | 50 | A running Magic-Folder needs to have access to a Tahoe-LAFS client that it may use to perform operations in the Tahoe-LAFS Grid. 51 | This is referenced by the "node directory" of the Tahoe-LAFS client although actual operations are performed via the Tahoe-LAFS WebUI. 52 | 53 | There are two ways to create a new Magic Folder instance (that is, the configuration required). 54 | Create a fresh one with ``magic-folder init`` or migrate from a Tahoe-LAFS 1.14.0 or earlier instance with ``magic-folder migrate``. 55 | 56 | Once a Magic-Folder is configured, functionality is provided by running a long-lived magic-folder daemon. 57 | This process is run using the ``magic-folder`` command line tool:: 58 | 59 | magic-folder --config run 60 | 61 | As long as this process is running, whatever magic folders are configured will be functional. 62 | The process must be restarted to read configuration changes. 63 | All other interactions are via the HTTP API which listens on a local endpoint according to the configuration. 64 | Other ``magic-folder`` subcommands are typically just thin CLI wrappers around a particular HTTP endpoint. 65 | 66 | 67 | Testing 68 | ======= 69 | 70 | To run the unit test suite: 71 | 72 | * ``tox`` 73 | 74 | You can pass arguments to ``trial`` with an environment variable. For 75 | example, you can run the test suite on multiple cores to speed it up: 76 | 77 | * ``MAGIC_FOLDER_TRIAL_ARGS="-j4" tox`` 78 | 79 | 80 | Documentation 81 | ============= 82 | 83 | Documentation is written as reStructuredText documents and processed 84 | using Sphinx; you will need ``sphinx`` and ``sphinx_rtd_theme``. To 85 | generate HTML version of Magic Folder documents, do: 86 | 87 | * ``cd docs; make html`` 88 | 89 | Resulting HTML files will be under ``docs/_build/html/``. 90 | 91 | 92 | License 93 | ======= 94 | 95 | Copyright 2006-2018 The Tahoe-LAFS Software Foundation 96 | Copyright 2020-2021 The Magic-Folder Developers 97 | 98 | You may use this package under the GNU General Public License, version 2 or, 99 | at your option, any later version. You may use this package under the 100 | Transitive Grace Period Public Licence, version 1.0, or at your option, any 101 | later version. (You may choose to use this package under the terms of either 102 | licence, at your option.) See the file `COPYING.GPL`_ for the terms of the 103 | GNU General Public License, version 2. See the file `COPYING.TGPPL`_ for 104 | the terms of the Transitive Grace Period Public Licence, version 1.0. 105 | 106 | See `TGPPL.PDF`_ for why the TGPPL exists, graphically illustrated on three 107 | slides. 108 | 109 | .. _OSPackages: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/OSPackages 110 | .. _Mac: docs/OS-X.rst 111 | .. _pip: https://pip.pypa.io/en/stable/installing/ 112 | .. _COPYING.GPL: https://github.com/tahoe-lafs/tahoe-lafs/blob/master/COPYING.GPL 113 | .. _COPYING.TGPPL: https://github.com/tahoe-lafs/tahoe-lafs/blob/master/COPYING.TGPPL.rst 114 | .. _TGPPL.PDF: https://tahoe-lafs.org/~zooko/tgppl.pdf 115 | 116 | ---- 117 | 118 | .. |readthedocs| image:: http://readthedocs.org/projects/magic-folder/badge/?version=latest 119 | :alt: documentation status 120 | :target: http://magic-folder.readthedocs.io/en/latest/?badge=latest 121 | 122 | .. |gha_linux| image:: https://github.com/leastauthority/magic-folder/actions/workflows/linux.yml/badge.svg 123 | :target: https://github.com/LeastAuthority/magic-folder/actions/workflows/linux.yml 124 | 125 | .. |gha_macos| image:: https://github.com/leastauthority/magic-folder/actions/workflows/macos.yaml/badge.svg 126 | :target: https://github.com/LeastAuthority/magic-folder/actions/workflows/macos.yaml 127 | 128 | .. |gha_windows| image:: https://github.com/leastauthority/magic-folder/actions/workflows/windows.yml/badge.svg 129 | :target: https://github.com/LeastAuthority/magic-folder/actions/workflows/windows.yml 130 | 131 | .. |codecov| image:: https://codecov.io/github/leastauthority/magic-folder/coverage.svg?branch=main 132 | :alt: test coverage percentage 133 | :target: https://codecov.io/github/leastauthority/magic-folder?branch=main 134 | -------------------------------------------------------------------------------- /src/magic_folder/test/matchers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testtools-style matchers useful to the Tahoe-LAFS test suite. 3 | """ 4 | 5 | import base64 6 | from uuid import ( 7 | UUID, 8 | ) 9 | from nacl.exceptions import ( 10 | BadSignatureError, 11 | ) 12 | 13 | import attr 14 | 15 | from testtools.matchers import ( 16 | Mismatch, 17 | AfterPreprocessing, 18 | MatchesStructure, 19 | MatchesAll, 20 | MatchesPredicate, 21 | MatchesException, 22 | ContainsDict, 23 | Always, 24 | Equals, 25 | ) 26 | from testtools.twistedsupport import ( 27 | succeeded, 28 | ) 29 | 30 | from treq import ( 31 | content, 32 | ) 33 | 34 | @attr.s 35 | class MatchesAuthorSignature(object): 36 | """ 37 | Confirm signatures on a RemoteSnapshot 38 | """ 39 | snapshot = attr.ib() # LocalSnapshot 40 | remote_snapshot = attr.ib() 41 | 42 | def match(self, other): 43 | # "other" is the RemoteSnapshot's signature 44 | public_key = self.snapshot.author.verify_key 45 | alleged_sig = base64.b64decode(self.remote_snapshot.signature) 46 | signed_data = ( 47 | u"{content_capability}\n" 48 | u"{name}\n" 49 | ).format( 50 | content_capability=self.remote_snapshot.content_cap, 51 | name=self.remote_snapshot.metadata['name'], 52 | ).encode("utf8") 53 | 54 | try: 55 | public_key.verify(signed_data, alleged_sig) 56 | except BadSignatureError: 57 | return Mismatch("The signature did not verify.") 58 | 59 | 60 | class MatchesSameElements(object): 61 | """ 62 | Match if the two-tuple value given contains two elements that are equal to 63 | each other. 64 | """ 65 | def match(self, value): 66 | left, right = value 67 | return Equals(left).match(right) 68 | 69 | 70 | def matches_response(code_matcher=Always(), headers_matcher=Always(), body_matcher=Always()): 71 | """ 72 | Match a Treq response object with certain code and body. 73 | 74 | :param Matcher code_matcher: A matcher to apply to the response code. 75 | 76 | :param Matcher headers_matcher: A matcher to apply to the response headers 77 | (a ``twisted.web.http_headers.Headers`` instance). 78 | 79 | :param Matcher body_matcher: A matcher to apply to the response body. 80 | 81 | :return: A matcher. 82 | """ 83 | matchers = [ 84 | MatchesStructure( 85 | code=code_matcher, 86 | headers=headers_matcher, 87 | ), 88 | ] 89 | # see comment in test_web.MagicFolderTests.test_method_not_allowed 90 | # which is one user that wants nothing to try and read the content 91 | # in some cases.. 92 | if body_matcher is not None: 93 | matchers.append( 94 | AfterPreprocessing( 95 | lambda response: content(response), 96 | succeeded(body_matcher), 97 | ) 98 | ) 99 | return MatchesAll(*matchers) 100 | 101 | def contained_by(container): 102 | """ 103 | Match an element in the given container. 104 | 105 | :param container: Anything that supports being the right-hand operand to 106 | ``in``. 107 | 108 | :return: A matcher. 109 | """ 110 | return MatchesPredicate( 111 | lambda element: element in container, 112 | "%r not found", 113 | ) 114 | 115 | 116 | def header_contains(header_dict): 117 | """ 118 | Match a ``twisted.web.http_headers.HTTPHeaders`` containing at least the 119 | given items. 120 | 121 | :param dict[bytes, Matcher] header_dict: A dictionary mapping header 122 | names (canonical case) to matchers for the associated values (a list 123 | of byte-strings). 124 | 125 | :return: A matcher. 126 | """ 127 | return AfterPreprocessing( 128 | lambda headers: dict(headers.getAllRawHeaders()), 129 | ContainsDict(header_dict), 130 | ) 131 | 132 | 133 | def provides(*interfaces): 134 | """ 135 | Match an object that provides all of the given interfaces. 136 | 137 | :param InterfaceClass *interfaces: The required interfaces. 138 | 139 | :return: A matcher. 140 | """ 141 | return MatchesAll(*list( 142 | MatchesPredicate( 143 | lambda obj, iface=iface: iface.providedBy(obj), 144 | "%s does not provide {!r}".format(iface), 145 | ) 146 | for iface 147 | in interfaces 148 | )) 149 | 150 | 151 | def is_hex_uuid(): 152 | """ 153 | Match text strings giving a hex representation of a UUID. 154 | 155 | :return: A matcher. 156 | """ 157 | def _is_hex_uuid(value): 158 | if not isinstance(value, str): 159 | return False 160 | try: 161 | UUID(hex=value) 162 | except ValueError: 163 | return False 164 | return True 165 | return MatchesPredicate( 166 | _is_hex_uuid, 167 | "%r is not a UUID hex representation.", 168 | ) 169 | 170 | 171 | def matches_flushed_traceback(exception, value_re=None): 172 | """ 173 | Matches an eliot traceback message with the given exception. 174 | 175 | This is expected to be used with :py:`testtools.matchers.MatchesListwise`, 176 | on the result of :py:`eliot.MemoryLogger.flush_tracebacks`. 177 | 178 | See :py:`testtools.matchers.MatchesExeption`. 179 | """ 180 | def as_exc_info_tuple(message): 181 | return message["exception"], message["reason"], message["traceback"] 182 | 183 | return AfterPreprocessing( 184 | as_exc_info_tuple, MatchesException(exception, value_re=value_re) 185 | ) 186 | 187 | def matches_failure(exception, value_re=None): 188 | """ 189 | Matches an twisted :py:`Failure` with the given exception. 190 | 191 | See :py:`testtools.matches.MatchesException`. 192 | """ 193 | def as_exc_info_tuple(failure): 194 | return failure.type, failure.value, failure.tb 195 | 196 | return AfterPreprocessing( 197 | as_exc_info_tuple, MatchesException(exception, value_re=value_re) 198 | ) 199 | -------------------------------------------------------------------------------- /src/magic_folder/common.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Least Authority TFA GmbH 2 | # See COPYING for details. 3 | 4 | """ 5 | Common functions and types used by other modules. 6 | """ 7 | 8 | from contextlib import contextmanager 9 | import unicodedata 10 | 11 | import attr 12 | 13 | from twisted.web import http 14 | 15 | 16 | class BadResponseCode(Exception): 17 | """ 18 | An HTTP request received a response code which does not allow an operation 19 | to progress further. 20 | """ 21 | def __str__(self): 22 | return "Request for {!r} received unexpected response code {!r}:\n{}".format( 23 | self.args[0].to_text(), 24 | self.args[1], 25 | self.args[2], 26 | ) 27 | 28 | class BadMetadataResponse(Exception): 29 | """ 30 | An attempt to load some metadata about something received a response that 31 | cannot be interpreted as that metadata. 32 | """ 33 | 34 | 35 | def does_not_have_keys(*keys): 36 | def validator(inst, attr, value): 37 | invalid_keys = [key for key in keys if key in value] 38 | if invalid_keys: 39 | raise ValueError( 40 | "'{name}' must not have {invalid_keys} keys.".format( 41 | name=attr.name, 42 | invalid_keys=invalid_keys, 43 | ) 44 | ) 45 | return validator 46 | 47 | 48 | # this cannot be frozen=True because of Twisted 49 | @attr.s(auto_exc=True) 50 | class APIError(Exception): 51 | """ 52 | An error to be reported from the API. 53 | 54 | :ivar reason unicode: The message to be returned as the ``reason`` key of 55 | the error response. 56 | :ivar code int: The HTTP status code to use for this error. 57 | """ 58 | 59 | code = attr.ib( 60 | validator=attr.validators.optional(attr.validators.instance_of(int)), 61 | ) 62 | reason = attr.ib(validator=attr.validators.instance_of(str)) 63 | extra_fields = attr.ib( 64 | default=None, 65 | validator=attr.validators.optional( 66 | attr.validators.and_( 67 | attr.validators.instance_of(dict), 68 | does_not_have_keys("reason"), 69 | ), 70 | ) 71 | ) 72 | 73 | @classmethod 74 | def from_exception(cls, code, exception, prefix=None): 75 | """ 76 | Return an exception with the given code, and reason from the given exception. 77 | 78 | :param code int: The HTTP status code to use for this error. 79 | :param exception Exception: The exception to get the error message from. 80 | :param prefix unicode: A prefix to add to the error message. 81 | 82 | :returns APIError: An error with the given error code and a message that is 83 | ``"{prefix}: {exception message}"``. 84 | """ 85 | if prefix is not None: 86 | reason = u"{}: {}".format(prefix, exception) 87 | else: 88 | reason = u"{}".format(exception) 89 | return cls(code=code, reason=reason) 90 | 91 | def to_json(self): 92 | """ 93 | :return: a representation of this error suitable for JSON encoding. 94 | """ 95 | result = {"reason": self.reason} 96 | if self.extra_fields: 97 | result.update(self.extra_fields) 98 | return result 99 | 100 | def __str__(self): 101 | return self.reason 102 | 103 | 104 | @attr.s(auto_exc=True) 105 | class NoSuchMagicFolder(APIError): 106 | """ 107 | There is not a magic folder of the given name. 108 | """ 109 | 110 | name = attr.ib(validator=attr.validators.instance_of(str)) 111 | code = attr.ib( 112 | init=False, 113 | default=http.NOT_FOUND, 114 | ) 115 | reason = attr.ib( 116 | init=False, 117 | default=attr.Factory( 118 | lambda self: u"No such magic-folder '{}'".format(self.name), 119 | takes_self=True, 120 | ), 121 | ) 122 | extra_fields = attr.ib(init=False, default=None) 123 | 124 | 125 | @contextmanager 126 | def atomic_makedirs(path): 127 | """ 128 | Call `path.makedirs()` but if an error occurs before this 129 | context-manager exits we will delete the directory. 130 | 131 | :param FilePath path: the directory/ies to create 132 | """ 133 | path.makedirs() 134 | try: 135 | yield path 136 | except Exception: 137 | # on error, clean up our directory 138 | path.remove() 139 | # ...and pass on the error 140 | raise 141 | 142 | 143 | @attr.s(auto_exc=True) 144 | class InvalidMagicFolderName(APIError): 145 | """ 146 | The given magic folder name contains an invalid character. 147 | 148 | See :py:`valid_magic_folder_name` for details. 149 | """ 150 | 151 | message = ( 152 | u"Magic folder names cannot contain '/', '\\', " 153 | u"control characters or unassigned characters." 154 | ) 155 | 156 | name = attr.ib(validator=attr.validators.instance_of(str)) 157 | code = attr.ib( 158 | default=http.BAD_REQUEST, 159 | validator=attr.validators.optional(attr.validators.instance_of(int)), 160 | ) 161 | reason = attr.ib(init=False, default=message) 162 | extra_fields = attr.ib(init=False, default=None) 163 | 164 | 165 | def valid_magic_folder_name(name): 166 | """ 167 | Check if the magic folder name is valid. 168 | 169 | We disallow: 170 | 171 | - ``\0``, ``/``, and ``\\`` as they can cause issues with the HTTP API 172 | - control characters as they are not meant for display 173 | - non-characters (reserved and unassigned) 174 | - isolated surrogate characters as these are likely from invalid unicode 175 | (see PEP 383). 176 | 177 | :param unicode name: the name of the magic-folder to verify 178 | 179 | :raises ValueError: if this is an invalid magic folder name 180 | """ 181 | if ( 182 | u"\0" in name 183 | or u"/" in name 184 | or u"\\" in name 185 | or any((unicodedata.category(c) in ("Cc", "Cn", "Cs") for c in name)) 186 | ): 187 | raise InvalidMagicFolderName(name) 188 | --------------------------------------------------------------------------------