├── docs ├── _static │ └── .gitkeep ├── changelog.rst ├── license.rst ├── tutorials │ ├── google.rst │ ├── baikal.rst │ ├── nextcloud.rst │ ├── fastmail.rst │ ├── xandikos.rst │ ├── owncloud.rst │ ├── icloud.rst │ ├── radicale.rst │ ├── systemd-timer.rst │ ├── davmail.rst │ ├── index.rst │ ├── todoman.rst │ └── claws-mail.rst ├── contact.rst ├── donations.rst ├── problems.rst ├── index.rst ├── ssl-tutorial.rst ├── when.rst ├── partial-sync.rst ├── keyring.rst ├── conf.py ├── packaging.rst ├── vdir.rst ├── installation.rst ├── contributing.rst ├── Makefile └── make.bat ├── .envrc ├── tests ├── system │ ├── cli │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_utils.py │ │ ├── test_fetchparams.py │ │ └── test_repair.py │ ├── conftest.py │ └── utils │ │ └── test_main.py ├── storage │ ├── servers │ │ ├── __init__.py │ │ ├── davical │ │ │ ├── install.sh │ │ │ └── __init__.py │ │ ├── skip │ │ │ └── __init__.py │ │ ├── xandikos │ │ │ └── __init__.py │ │ ├── radicale │ │ │ └── __init__.py │ │ ├── baikal │ │ │ └── __init__.py │ │ ├── icloud │ │ │ └── __init__.py │ │ └── fastmail │ │ │ └── __init__.py │ ├── dav │ │ ├── test_carddav.py │ │ ├── test_main.py │ │ ├── __init__.py │ │ └── test_caldav.py │ ├── test_memory.py │ ├── test_singlefile.py │ ├── test_http_with_singlefile.py │ ├── conftest.py │ ├── test_filesystem.py │ └── test_http.py ├── unit │ ├── test_exceptions.py │ ├── cli │ │ ├── test_config.py │ │ ├── test_fetchparams.py │ │ └── test_discover.py │ ├── sync │ │ └── test_status.py │ ├── test_repair.py │ ├── test_retry.py │ └── test_metasync.py ├── conftest.py └── __init__.py ├── CODE_OF_CONDUCT.rst ├── .codecov.yml ├── vdirsyncer ├── __main__.py ├── storage │ ├── __init__.py │ ├── google_helpers.py │ ├── memory.py │ ├── http.py │ └── singlefile.py ├── __init__.py ├── sync │ └── exceptions.py ├── exceptions.py ├── metasync.py ├── repair.py ├── cli │ ├── fetchparams.py │ └── tasks.py └── utils.py ├── CONTRIBUTING.rst ├── contrib ├── vdirsyncer.timer ├── vdirsyncer.service └── conflict_resolution │ └── resolve_interactively.py ├── .gitignore ├── .readthedocs.yaml ├── MANIFEST.in ├── .coveragerc ├── ISSUE_TEMPLATE.md ├── AUTHORS.rst ├── publish-release.yaml ├── .builds ├── tests-minimal.yml ├── tests-pypi.yml └── archlinux-py313.yml ├── .pre-commit-config.yaml ├── scripts ├── _build_deb_in_container.bash └── release-deb.sh ├── LICENSE ├── Makefile ├── config.example ├── README.rst └── pyproject.toml /docs/_static/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | layout python3 2 | -------------------------------------------------------------------------------- /tests/system/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/storage/servers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGELOG.rst 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | See `the pimutils CoC `_. 2 | -------------------------------------------------------------------------------- /tests/storage/servers/davical/install.sh: -------------------------------------------------------------------------------- 1 | pip install pytest-rerunfailures 2 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | coverage: 3 | status: 4 | patch: false 5 | -------------------------------------------------------------------------------- /vdirsyncer/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | if __name__ == "__main__": 4 | from vdirsyncer.cli import app 5 | 6 | app() 7 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Please see `the documentation 2 | `_ for how to 3 | contribute to this project. 4 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | Credits and License 3 | =================== 4 | 5 | .. include:: ../AUTHORS.rst 6 | 7 | License 8 | ======= 9 | 10 | .. include:: ../LICENSE 11 | -------------------------------------------------------------------------------- /contrib/vdirsyncer.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Synchronize vdirs 3 | 4 | [Timer] 5 | OnBootSec=5m 6 | OnUnitActiveSec=15m 7 | AccuracySec=5m 8 | 9 | [Install] 10 | WantedBy=timers.target 11 | -------------------------------------------------------------------------------- /tests/storage/servers/skip/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | class ServerMixin: 7 | @pytest.fixture 8 | def get_storage_args(self): 9 | pytest.skip("DAV tests disabled.") 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | __pycache__ 3 | htmlcov 4 | .coverage 5 | build 6 | env 7 | *.egg-info 8 | .cache 9 | .pytest_cache 10 | .eggs 11 | .egg 12 | .xprocess 13 | dist 14 | docs/_build/ 15 | vdirsyncer/version.py 16 | .hypothesis 17 | coverage.xml 18 | -------------------------------------------------------------------------------- /contrib/vdirsyncer.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Synchronize calendars and contacts 3 | Documentation=https://vdirsyncer.readthedocs.org/ 4 | StartLimitBurst=2 5 | 6 | [Service] 7 | ExecStart=/usr/bin/vdirsyncer sync 8 | RuntimeMaxSec=3m 9 | Restart=on-failure 10 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | 6 | build: 7 | os: "ubuntu-22.04" 8 | tools: 9 | python: "3.9" 10 | 11 | python: 12 | install: 13 | - method: pip 14 | path: . 15 | extra_requirements: 16 | - docs 17 | -------------------------------------------------------------------------------- /docs/tutorials/google.rst: -------------------------------------------------------------------------------- 1 | ====== 2 | Google 3 | ====== 4 | 5 | Using vdirsyncer with Google Calendar is possible as of 0.10, but it is not 6 | tested frequently. You can use :storage:`google_contacts` and 7 | :storage:`google_calendar`. 8 | 9 | For more information see :gh:`202` and :gh:`8`. 10 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # setuptools-scm includes everything tracked by git 2 | prune docker 3 | prune scripts 4 | prune tests/storage/servers 5 | recursive-include tests/storage/servers/radicale * 6 | recursive-include tests/storage/servers/skip * 7 | 8 | prune docs/_build 9 | global-exclude *.py[cdo] __pycache__ *.so *.pyd 10 | -------------------------------------------------------------------------------- /vdirsyncer/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | There are storage classes which control the access to one vdir-collection and 3 | offer basic CRUD-ish methods for modifying those collections. The exact 4 | interface is described in `vdirsyncer.storage.base`, the `Storage` class should 5 | be a superclass of all storage classes. 6 | """ 7 | -------------------------------------------------------------------------------- /docs/tutorials/baikal.rst: -------------------------------------------------------------------------------- 1 | ====== 2 | Baikal 3 | ====== 4 | 5 | Vdirsyncer is continuously tested against the latest version of Baikal_. 6 | 7 | - Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same 8 | issue as ownCloud, see :gh:`160`. This issue is fixed in later versions. 9 | 10 | .. _Baikal: http://sabre.io/baikal/ 11 | -------------------------------------------------------------------------------- /tests/storage/dav/test_carddav.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from vdirsyncer.storage.dav import CardDAVStorage 6 | 7 | from . import DAVStorageTests 8 | 9 | 10 | class TestCardDAVStorage(DAVStorageTests): 11 | storage_class = CardDAVStorage 12 | 13 | @pytest.fixture(params=["VCARD"]) 14 | def item_type(self, request): 15 | return request.param 16 | -------------------------------------------------------------------------------- /tests/unit/test_exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from vdirsyncer import exceptions 4 | 5 | 6 | def test_user_error_problems(): 7 | e = exceptions.UserError( 8 | "A few problems occurred", 9 | problems=["Problem one", "Problem two", "Problem three"], 10 | ) 11 | 12 | assert "one" in str(e) 13 | assert "two" in str(e) 14 | assert "three" in str(e) 15 | assert "problems occurred" in str(e) 16 | -------------------------------------------------------------------------------- /tests/storage/test_memory.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from vdirsyncer.storage.memory import MemoryStorage 6 | 7 | from . import StorageTests 8 | 9 | 10 | class TestMemoryStorage(StorageTests): 11 | storage_class = MemoryStorage 12 | supports_collections = False 13 | 14 | @pytest.fixture 15 | def get_storage_args(self): 16 | async def inner(**args): 17 | return args 18 | 19 | return inner 20 | -------------------------------------------------------------------------------- /docs/contact.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | Support and Contact 3 | =================== 4 | 5 | * The ``#pimutils`` `IRC channel on Libera.Chat `_ 6 | might be active, depending on your timezone. Use it for support and general 7 | (including off-topic) discussion. 8 | 9 | * Open `a GitHub issue `_ for 10 | concrete bug reports and feature requests. 11 | 12 | * For security issues, contact ``contact@pimutils.org``. 13 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | 4 | [paths] 5 | source = vdirsyncer/ 6 | 7 | [report] 8 | exclude_lines = 9 | # Have to re-enable the standard pragma 10 | pragma: no cover 11 | 12 | # Don't complain about missing debug-only code: 13 | def __repr__ 14 | if self\.debug 15 | 16 | # Don't complain if tests don't hit defensive assertion code: 17 | raise AssertionError 18 | raise NotImplementedError 19 | 20 | # Don't complain if non-runnable code isn't run: 21 | if 0: 22 | if __name__ == .__main__.: 23 | -------------------------------------------------------------------------------- /docs/donations.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Donations 3 | ========= 4 | 5 | vdirsyncer is and will always be free and open source software. We appreciate 6 | sponsors willing to fund our continued work on it. 7 | 8 | If you found my work useful, please consider donating. Thank you! 9 | 10 | - Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``. 11 | - Sponsor via one-time tips or recurring donations `via Ko-fi`_. 12 | - Sponsor via recurring donations `via liberapay`_. 13 | 14 | .. _via Ko-fi: https://ko-fi.com/whynothugo 15 | .. _via liberapay: https://liberapay.com/WhyNotHugo/ 16 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Before you submit bug reports: https://vdirsyncer.pimutils.org/en/stable/contributing.html 2 | 3 | Things to include in your bugreport: 4 | 5 | * Your vdirsyncer version 6 | * If applicable, which server software (and which version) you're using 7 | * Your Python version 8 | * Your operating system 9 | * Your config file 10 | * Use `vdirsyncer -vdebug` for debug output. The output is sensitive, but 11 | please attach at least the last few lines before the error (if applicable), 12 | censored as necessary. This is almost always the most useful information. 13 | -------------------------------------------------------------------------------- /docs/tutorials/nextcloud.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | nextCloud 3 | ========= 4 | 5 | Vdirsyncer is continuously tested against the latest version of nextCloud_:: 6 | 7 | [storage cal] 8 | type = "caldav" 9 | url = "https://nextcloud.example.com/" 10 | username = "..." 11 | password = "..." 12 | 13 | [storage card] 14 | type = "carddav" 15 | url = "https://nextcloud.example.com/" 16 | 17 | - WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant 18 | issue `_. 19 | 20 | .. _nextCloud: https://nextcloud.com/ 21 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | Contributors 2 | ============ 3 | 4 | In alphabetical order: 5 | 6 | - Ben Boeckel 7 | - Bleala 8 | - Christian Geier 9 | - Clément Mondon 10 | - Corey Hinshaw 11 | - Kai Herlemann 12 | - Hugo Osvaldo Barrera 13 | - Jason Cox 14 | - Julian Mehne 15 | - Malte Kiefer 16 | - Marek Marczykowski-Górecki 17 | - Markus Unterwaditzer 18 | - Michael Adler 19 | - rEnr3n 20 | - Thomas Weißschuh 21 | - Witcher01 22 | - samm81 23 | 24 | Special thanks goes to: 25 | 26 | * `FastMail `_ sponsors a 27 | paid account for testing their servers. 28 | * `Packagecloud `_ provide repositories for 29 | vdirsyncer's Debian packages. 30 | -------------------------------------------------------------------------------- /tests/storage/test_singlefile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from vdirsyncer.storage.singlefile import SingleFileStorage 6 | 7 | from . import StorageTests 8 | 9 | 10 | class TestSingleFileStorage(StorageTests): 11 | storage_class = SingleFileStorage 12 | supports_metadata = False 13 | 14 | @pytest.fixture 15 | def get_storage_args(self, tmpdir): 16 | async def inner(collection="test"): 17 | rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection} 18 | if collection is not None: 19 | rv = await self.storage_class.create_collection(**rv) 20 | return rv 21 | 22 | return inner 23 | -------------------------------------------------------------------------------- /docs/tutorials/fastmail.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | FastMail 3 | ======== 4 | 5 | Vdirsyncer is continuously tested against FastMail_, thanks to them for 6 | providing a free account for this purpose. There are no known issues with it. 7 | `FastMail's support pages 8 | `_ provide 9 | the settings to use:: 10 | 11 | [storage cal] 12 | type = "caldav" 13 | url = "https://caldav.fastmail.com/" 14 | username = "..." 15 | password = "..." 16 | 17 | [storage card] 18 | type = "carddav" 19 | url = "https://carddav.fastmail.com/" 20 | username = "..." 21 | password = "..." 22 | 23 | .. _FastMail: https://www.fastmail.com/ 24 | -------------------------------------------------------------------------------- /docs/tutorials/xandikos.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Xandikos 3 | ======== 4 | 5 | Xandikos_ is a lightweight, yet complete CalDAV and CardDAV server, backed by 6 | git. Vdirsyncer is continuously tested against its latest version. 7 | 8 | After running ``./bin/xandikos --defaults -d $HOME/dav``, you should be able to 9 | point vdirsyncer against the root of Xandikos like this:: 10 | 11 | [storage cal] 12 | type = "caldav" 13 | url = "https://xandikos.example.com/" 14 | username = "..." 15 | password = "..." 16 | 17 | [storage card] 18 | type = "carddav" 19 | url = "https://xandikos.example.com/" 20 | username = "..." 21 | password = "..." 22 | 23 | .. _Xandikos: https://github.com/jelmer/xandikos 24 | -------------------------------------------------------------------------------- /tests/system/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import ssl 4 | 5 | import pytest 6 | import trustme 7 | 8 | 9 | @pytest.fixture(scope="session") 10 | def ca(): 11 | return trustme.CA() 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def localhost_cert(ca): 16 | return ca.issue_cert("localhost") 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def httpserver_ssl_context(localhost_cert): 21 | context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) 22 | 23 | crt = localhost_cert.cert_chain_pems[0] 24 | key = localhost_cert.private_key_pem 25 | with crt.tempfile() as crt_file, key.tempfile() as key_file: 26 | context.load_cert_chain(crt_file, key_file) 27 | 28 | return context 29 | -------------------------------------------------------------------------------- /publish-release.yaml: -------------------------------------------------------------------------------- 1 | # Push new version to PyPI. 2 | # 3 | # Usage: hut builds submit publish-release.yaml --follow 4 | 5 | image: alpine/edge 6 | packages: 7 | - py3-build 8 | - py3-pip 9 | - py3-setuptools 10 | - py3-setuptools_scm 11 | - py3-wheel 12 | - twine 13 | sources: 14 | - https://github.com/pimutils/vdirsyncer 15 | secrets: 16 | - a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token. 17 | environment: 18 | CI: true 19 | tasks: 20 | - check-tag: | 21 | cd vdirsyncer 22 | git fetch --tags 23 | 24 | # Stop here unless this is a tag. 25 | git describe --exact-match --tags || complete-build 26 | - publish: | 27 | cd vdirsyncer 28 | python -m build --no-isolation 29 | twine upload --non-interactive dist/* 30 | -------------------------------------------------------------------------------- /docs/problems.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Known Problems 3 | ============== 4 | 5 | For any unanswered questions or problems, see :doc:`contact`. 6 | 7 | .. _debian-urllib3: 8 | 9 | Requests-related ImportErrors 10 | ----------------------------- 11 | 12 | ImportError: No module named packages.urllib3.poolmanager 13 | 14 | ImportError: cannot import name iter_field_objects 15 | 16 | Debian and nowadays even other distros make modifications to the ``requests`` 17 | package that don't play well with packages assuming a normal ``requests``. This 18 | is due to stubbornness on both sides. 19 | 20 | See :gh:`82` and :gh:`140` for past discussions. You have one option to work 21 | around this, that is, to install vdirsyncer in a virtual environment, see 22 | :ref:`manual-installation`. 23 | -------------------------------------------------------------------------------- /tests/storage/servers/xandikos/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | class ServerMixin: 7 | @pytest.fixture 8 | def get_storage_args( 9 | self, 10 | request, 11 | tmpdir, 12 | slow_create_collection, 13 | xandikos_server, 14 | aio_connector, 15 | ): 16 | async def inner(collection="test"): 17 | url = "http://127.0.0.1:8000/" 18 | args = {"url": url, "connector": aio_connector} 19 | 20 | if collection is not None: 21 | args = await slow_create_collection( 22 | self.storage_class, 23 | args, 24 | collection, 25 | ) 26 | 27 | return args 28 | 29 | return inner 30 | -------------------------------------------------------------------------------- /docs/tutorials/owncloud.rst: -------------------------------------------------------------------------------- 1 | .. _owncloud_setup: 2 | 3 | ======== 4 | ownCloud 5 | ======== 6 | 7 | Vdirsyncer is continuously tested against the latest version of ownCloud_:: 8 | 9 | [storage cal] 10 | type = "caldav" 11 | url = "https://example.com/remote.php/dav/" 12 | username = ... 13 | password = ... 14 | 15 | [storage card] 16 | type = "carddav" 17 | url = "https://example.com/remote.php/dav/" 18 | username = ... 19 | password = ... 20 | 21 | - *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems 22 | detecting collisions and race-conditions. The problems were reported and are 23 | fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since 24 | 7.0.0. See :gh:`16` for more information. 25 | 26 | .. _ownCloud: https://owncloud.org/ 27 | -------------------------------------------------------------------------------- /tests/unit/cli/test_config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | from vdirsyncer.cli.config import _resolve_conflict_via_command 6 | from vdirsyncer.vobject import Item 7 | 8 | 9 | def test_conflict_resolution_command(): 10 | def check_call(command): 11 | command, a_tmp, b_tmp = command 12 | assert command == os.path.expanduser("~/command") 13 | with open(a_tmp) as f: 14 | assert f.read() == a.raw 15 | with open(b_tmp) as f: 16 | assert f.read() == b.raw 17 | 18 | with open(b_tmp, "w") as f: 19 | f.write(a.raw) 20 | 21 | a = Item("UID:AAAAAAA") 22 | b = Item("UID:BBBBBBB") 23 | assert ( 24 | _resolve_conflict_via_command( 25 | a, b, ["~/command"], "a", "b", _check_call=check_call 26 | ).raw 27 | == a.raw 28 | ) 29 | -------------------------------------------------------------------------------- /vdirsyncer/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Vdirsyncer synchronizes calendars and contacts. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" 8 | BUGTRACKER_HOME = PROJECT_HOME + "/issues" 9 | DOCS_HOME = "https://vdirsyncer.pimutils.org/en/stable" 10 | 11 | try: 12 | from .version import version as __version__ 13 | except ImportError: # pragma: no cover 14 | raise ImportError( 15 | "Failed to find (autogenerated) version.py. " 16 | "This might be because you are installing from GitHub's tarballs, " 17 | "use the PyPI ones." 18 | ) 19 | 20 | __all__ = ["__version__"] 21 | 22 | 23 | def _check_python_version(): 24 | import sys 25 | 26 | if sys.version_info < (3, 9, 0): # noqa: UP036 27 | print("vdirsyncer requires at least Python 3.9.") 28 | sys.exit(1) 29 | 30 | 31 | _check_python_version() 32 | del _check_python_version 33 | -------------------------------------------------------------------------------- /tests/storage/servers/radicale/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | class ServerMixin: 7 | @pytest.fixture 8 | def get_storage_args( 9 | self, 10 | request, 11 | tmpdir, 12 | slow_create_collection, 13 | radicale_server, 14 | aio_connector, 15 | ): 16 | async def inner(collection="test"): 17 | url = "http://127.0.0.1:8001/" 18 | args = { 19 | "url": url, 20 | "username": "radicale", 21 | "password": "radicale", 22 | "connector": aio_connector, 23 | } 24 | 25 | if collection is not None: 26 | args = await slow_create_collection( 27 | self.storage_class, 28 | args, 29 | collection, 30 | ) 31 | return args 32 | 33 | return inner 34 | -------------------------------------------------------------------------------- /tests/system/cli/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from textwrap import dedent 4 | 5 | import pytest 6 | from click.testing import CliRunner 7 | 8 | import vdirsyncer.cli as cli 9 | 10 | 11 | class _CustomRunner: 12 | def __init__(self, tmpdir): 13 | self.tmpdir = tmpdir 14 | self.cfg = tmpdir.join("config") 15 | self.runner = CliRunner() 16 | 17 | def invoke(self, args, env=None, **kwargs): 18 | env = env or {} 19 | env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg)) 20 | return self.runner.invoke(cli.app, args, env=env, **kwargs) 21 | 22 | def write_with_general(self, data): 23 | self.cfg.write( 24 | dedent( 25 | """ 26 | [general] 27 | status_path = "{}/status/" 28 | """ 29 | ).format(str(self.tmpdir)) 30 | ) 31 | self.cfg.write(data, mode="a") 32 | 33 | 34 | @pytest.fixture 35 | def runner(tmpdir): 36 | return _CustomRunner(tmpdir) 37 | -------------------------------------------------------------------------------- /docs/tutorials/icloud.rst: -------------------------------------------------------------------------------- 1 | .. _icloud_setup: 2 | 3 | ====== 4 | iCloud 5 | ====== 6 | 7 | Vdirsyncer is regularly tested against iCloud_. 8 | 9 | :: 10 | 11 | [storage cal] 12 | type = "caldav" 13 | url = "https://caldav.icloud.com/" 14 | username = "..." 15 | password = "..." 16 | 17 | [storage card] 18 | type = "carddav" 19 | url = "https://contacts.icloud.com/" 20 | username = "..." 21 | password = "..." 22 | 23 | Problems: 24 | 25 | - Vdirsyncer can't do two-factor auth with iCloud (there doesn't seem to be a 26 | way to do two-factor auth over the DAV APIs) You'll need to use `app-specific 27 | passwords `_ instead. 28 | - iCloud has a few special requirements when creating collections. In principle 29 | vdirsyncer can do it, but it is recommended to create them from an Apple 30 | client (or the iCloud web interface). 31 | 32 | - iCloud requires a minimum length of collection names. 33 | - Calendars created by vdirsyncer cannot be used as tasklists. 34 | 35 | .. _iCloud: https://www.icloud.com/ 36 | -------------------------------------------------------------------------------- /tests/system/cli/test_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from vdirsyncer import exceptions 6 | from vdirsyncer.cli.utils import handle_cli_error 7 | from vdirsyncer.cli.utils import storage_instance_from_config 8 | from vdirsyncer.cli.utils import storage_names 9 | 10 | 11 | def test_handle_cli_error(capsys): 12 | try: 13 | raise exceptions.InvalidResponse("ayy lmao") 14 | except BaseException: 15 | handle_cli_error() 16 | 17 | _out, err = capsys.readouterr() 18 | assert "returned something vdirsyncer doesn't understand" in err 19 | assert "ayy lmao" in err 20 | 21 | 22 | @pytest.mark.asyncio 23 | async def test_storage_instance_from_config(monkeypatch, aio_connector): 24 | class Dummy: 25 | def __init__(self, **kw): 26 | assert kw == {"foo": "bar", "baz": 1} 27 | 28 | monkeypatch.setitem(storage_names._storages, "lol", Dummy) 29 | config = {"type": "lol", "foo": "bar", "baz": 1} 30 | storage = await storage_instance_from_config(config, connector=aio_connector) 31 | assert isinstance(storage, Dummy) 32 | -------------------------------------------------------------------------------- /tests/storage/servers/baikal/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | class ServerMixin: 7 | @pytest.fixture 8 | def get_storage_args( 9 | self, 10 | request, 11 | tmpdir, 12 | slow_create_collection, 13 | baikal_server, 14 | aio_connector, 15 | ): 16 | async def inner(collection="test"): 17 | base_url = "http://127.0.0.1:8002/" 18 | args = { 19 | "url": base_url, 20 | "username": "baikal", 21 | "password": "baikal", 22 | "connector": aio_connector, 23 | } 24 | 25 | if self.storage_class.fileext == ".vcf": 26 | args["url"] = base_url + "card.php/" 27 | else: 28 | args["url"] = base_url + "cal.php/" 29 | 30 | if collection is not None: 31 | args = await slow_create_collection( 32 | self.storage_class, 33 | args, 34 | collection, 35 | ) 36 | return args 37 | 38 | return inner 39 | -------------------------------------------------------------------------------- /.builds/tests-minimal.yml: -------------------------------------------------------------------------------- 1 | # Run tests using oldest available dependency versions. 2 | # 3 | # TODO: It might make more sense to test with an older Ubuntu or Fedora version 4 | # here, and consider that our "oldest suppported environment". 5 | 6 | image: alpine/3.19 # python 3.11 7 | packages: 8 | - docker 9 | - docker-cli 10 | - docker-compose 11 | - py3-pip 12 | - python3-dev 13 | sources: 14 | - https://github.com/pimutils/vdirsyncer 15 | environment: 16 | BUILD: test 17 | CI: true 18 | CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 19 | DAV_SERVER: radicale xandikos 20 | REQUIREMENTS: minimal 21 | tasks: 22 | - venv: | 23 | python3 -m venv $HOME/venv 24 | echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv 25 | - docker: | 26 | sudo addgroup $(whoami) docker 27 | sudo service docker start 28 | - setup: | 29 | cd vdirsyncer 30 | # Hack, no idea why it's needed 31 | sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h 32 | make -e install-dev 33 | - test: | 34 | cd vdirsyncer 35 | make -e ci-test 36 | make -e ci-test-storage 37 | -------------------------------------------------------------------------------- /docs/tutorials/radicale.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Radicale 3 | ======== 4 | 5 | Radicale_ is a very lightweight server, however, it intentionally doesn't 6 | implement the CalDAV and CardDAV standards completely, which might lead to 7 | issues even with very well-written clients. Apart from its non-conformity with 8 | standards, there are multiple other problems with its code quality and the way 9 | it is maintained. Consider using e.g. :doc:`xandikos` instead. 10 | 11 | That said, vdirsyncer is continuously tested against the git version and the 12 | latest PyPI release of Radicale. 13 | 14 | - Vdirsyncer can't create collections on Radicale. 15 | - Radicale doesn't `support time ranges in the calendar-query of CalDAV 16 | `_, so setting ``start_date`` 17 | and ``end_date`` for :storage:`caldav` will have no or unpredicted 18 | consequences. 19 | 20 | - `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all 21 | items of a collection `_. 22 | 23 | You have to set ``item_types = ["VTODO", "VEVENT"]`` in 24 | :storage:`caldav` for vdirsyncer to work with those versions. 25 | 26 | .. _Radicale: http://radicale.org/ 27 | -------------------------------------------------------------------------------- /tests/storage/servers/icloud/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | import pytest 6 | 7 | 8 | class ServerMixin: 9 | @pytest.fixture 10 | def get_storage_args(self, item_type, slow_create_collection): 11 | if item_type != "VEVENT": 12 | # iCloud collections can either be calendars or task lists. 13 | # See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 14 | pytest.skip("iCloud doesn't support anything else than VEVENT") 15 | 16 | async def inner(collection="test"): 17 | args = { 18 | "username": os.environ["ICLOUD_USERNAME"], 19 | "password": os.environ["ICLOUD_PASSWORD"], 20 | } 21 | 22 | if self.storage_class.fileext == ".ics": 23 | args["url"] = "https://caldav.icloud.com/" 24 | elif self.storage_class.fileext == ".vcf": 25 | args["url"] = "https://contacts.icloud.com/" 26 | else: 27 | raise RuntimeError 28 | 29 | if collection is not None: 30 | args = slow_create_collection(self.storage_class, args, collection) 31 | return args 32 | 33 | return inner 34 | -------------------------------------------------------------------------------- /.builds/tests-pypi.yml: -------------------------------------------------------------------------------- 1 | # Run tests using latest dependencies from PyPI 2 | 3 | image: archlinux 4 | packages: 5 | - docker 6 | - docker-compose 7 | - python-pip 8 | sources: 9 | - https://github.com/pimutils/vdirsyncer 10 | secrets: 11 | - 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536 12 | environment: 13 | BUILD: test 14 | CI: true 15 | CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 16 | DAV_SERVER: baikal radicale xandikos 17 | REQUIREMENTS: release 18 | # TODO: ETESYNC_TESTS 19 | tasks: 20 | - venv: | 21 | python -m venv $HOME/venv 22 | echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv 23 | - docker: | 24 | sudo systemctl start docker 25 | - setup: | 26 | cd vdirsyncer 27 | make -e install-dev 28 | - test: | 29 | cd vdirsyncer 30 | make -e ci-test 31 | make -e ci-test-storage 32 | - check: | 33 | cd vdirsyncer 34 | make check 35 | - check-secrets: | 36 | # Stop here if this is a PR. PRs can't run with the below secrets. 37 | [ -f ~/fastmail-secrets ] || complete-build 38 | - extra-storages: | 39 | set +x 40 | source ~/fastmail-secrets 41 | set -x 42 | 43 | cd vdirsyncer 44 | export PATH=$PATH:~/.local/bin/ 45 | DAV_SERVER=fastmail pytest tests/storage 46 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v5.0.0 4 | hooks: 5 | - id: trailing-whitespace 6 | args: [--markdown-linebreak-ext=md] 7 | - id: end-of-file-fixer 8 | - id: check-toml 9 | - id: check-added-large-files 10 | - id: debug-statements 11 | - repo: https://github.com/pre-commit/mirrors-mypy 12 | rev: "v1.15.0" 13 | hooks: 14 | - id: mypy 15 | files: vdirsyncer/.* 16 | additional_dependencies: 17 | - types-setuptools 18 | - types-docutils 19 | - types-requests 20 | - repo: https://github.com/charliermarsh/ruff-pre-commit 21 | rev: 'v0.11.4' 22 | hooks: 23 | - id: ruff 24 | args: [--fix, --exit-non-zero-on-fix] 25 | - id: ruff-format 26 | - repo: local 27 | hooks: 28 | - id: typos-syncroniz 29 | name: typos-syncroniz 30 | language: system 31 | # Not how you spell "synchronise" 32 | entry: sh -c "git grep -i syncroniz" 33 | files: ".*/.*" 34 | - id: typos-text-icalendar 35 | name: typos-text-icalendar 36 | language: system 37 | # It's "text/calendar", no "i". 38 | entry: sh -c "git grep -i 'text/icalendar'" 39 | files: ".*/.*" 40 | -------------------------------------------------------------------------------- /.builds/archlinux-py313.yml: -------------------------------------------------------------------------------- 1 | # Run tests using the packaged dependencies on ArchLinux. 2 | 3 | image: archlinux 4 | packages: 5 | - docker 6 | - docker-compose 7 | # Build dependencies: 8 | - python-wheel 9 | - python-build 10 | - python-installer 11 | - python-setuptools-scm 12 | # Runtime dependencies: 13 | - python-click 14 | - python-click-log 15 | - python-click-threading 16 | - python-requests 17 | - python-aiohttp-oauthlib 18 | - python-tenacity 19 | # Test dependencies: 20 | - python-hypothesis 21 | - python-pytest-cov 22 | - python-pytest-httpserver 23 | - python-trustme 24 | - python-pytest-asyncio 25 | - python-aiohttp 26 | - python-aiostream 27 | - python-aioresponses 28 | sources: 29 | - https://github.com/pimutils/vdirsyncer 30 | environment: 31 | BUILD: test 32 | CI: true 33 | CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79 34 | DAV_SERVER: radicale xandikos 35 | REQUIREMENTS: release 36 | # TODO: ETESYNC_TESTS 37 | tasks: 38 | - check-python: 39 | python --version | grep 'Python 3.13' 40 | - docker: | 41 | sudo systemctl start docker 42 | - setup: | 43 | cd vdirsyncer 44 | python -m build --wheel --skip-dependency-check --no-isolation 45 | sudo python -m installer dist/*.whl 46 | - test: | 47 | cd vdirsyncer 48 | make -e ci-test 49 | make -e ci-test-storage 50 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | vdirsyncer 3 | ========== 4 | 5 | - `Documentation `_ 6 | - `Source code `_ 7 | 8 | Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks 9 | between a variety of servers and the local filesystem. The most popular usecase 10 | is to synchronize a server with a local folder and use a set of other 11 | :doc:`programs ` to change the local events and contacts. 12 | Vdirsyncer can then synchronize those changes back to the server. 13 | 14 | However, vdirsyncer is not limited to synchronizing between clients and 15 | servers. It can also be used to synchronize calendars and/or addressbooks 16 | between two servers directly. 17 | 18 | It aims to be for calendars and contacts what `OfflineIMAP 19 | `_ is for emails. 20 | 21 | .. toctree:: 22 | :caption: Users 23 | :maxdepth: 1 24 | 25 | when 26 | installation 27 | tutorial 28 | ssl-tutorial 29 | keyring 30 | partial-sync 31 | config 32 | tutorials/index 33 | problems 34 | 35 | .. toctree:: 36 | :caption: Developers 37 | :maxdepth: 1 38 | 39 | contributing 40 | vdir 41 | 42 | .. toctree:: 43 | :caption: General 44 | :maxdepth: 1 45 | 46 | packaging 47 | contact 48 | changelog 49 | license 50 | donations 51 | -------------------------------------------------------------------------------- /tests/unit/sync/test_status.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | 5 | import hypothesis.strategies as st 6 | from hypothesis import assume 7 | from hypothesis import given 8 | 9 | from vdirsyncer.sync.status import SqliteStatus 10 | 11 | status_dict_strategy = st.dictionaries( 12 | st.text(), 13 | st.tuples( 14 | *( 15 | st.fixed_dictionaries( 16 | {"href": st.text(), "hash": st.text(), "etag": st.text()} 17 | ) 18 | for _ in range(2) 19 | ) 20 | ), 21 | ) 22 | 23 | 24 | @given(status_dict=status_dict_strategy) 25 | def test_legacy_status(status_dict): 26 | hrefs_a = {meta_a["href"] for meta_a, meta_b in status_dict.values()} 27 | hrefs_b = {meta_b["href"] for meta_a, meta_b in status_dict.values()} 28 | assume(len(hrefs_a) == len(status_dict) == len(hrefs_b)) 29 | with contextlib.closing(SqliteStatus()) as status: 30 | status.load_legacy_status(status_dict) 31 | assert dict(status.to_legacy_status()) == status_dict 32 | 33 | for ident, (meta_a, meta_b) in status_dict.items(): 34 | ident_a, meta2_a = status.get_by_href_a(meta_a["href"]) 35 | ident_b, meta2_b = status.get_by_href_b(meta_b["href"]) 36 | assert meta2_a.to_status() == meta_a 37 | assert meta2_b.to_status() == meta_b 38 | assert ident_a == ident_b == ident 39 | -------------------------------------------------------------------------------- /tests/storage/servers/fastmail/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | import pytest 6 | 7 | 8 | class ServerMixin: 9 | @pytest.fixture 10 | def get_storage_args(self, slow_create_collection, aio_connector, request): 11 | if ( 12 | "item_type" in request.fixturenames 13 | and request.getfixturevalue("item_type") == "VTODO" 14 | ): 15 | # Fastmail has non-standard support for TODOs 16 | # See https://github.com/pimutils/vdirsyncer/issues/824 17 | pytest.skip("Fastmail has non-standard VTODO support.") 18 | 19 | async def inner(collection="test"): 20 | args = { 21 | "username": os.environ["FASTMAIL_USERNAME"], 22 | "password": os.environ["FASTMAIL_PASSWORD"], 23 | "connector": aio_connector, 24 | } 25 | 26 | if self.storage_class.fileext == ".ics": 27 | args["url"] = "https://caldav.fastmail.com/" 28 | elif self.storage_class.fileext == ".vcf": 29 | args["url"] = "https://carddav.fastmail.com/" 30 | else: 31 | raise RuntimeError 32 | 33 | if collection is not None: 34 | args = await slow_create_collection( 35 | self.storage_class, 36 | args, 37 | collection, 38 | ) 39 | 40 | return args 41 | 42 | return inner 43 | -------------------------------------------------------------------------------- /docs/tutorials/systemd-timer.rst: -------------------------------------------------------------------------------- 1 | .. _systemd_timer-tutorial: 2 | 3 | Running as a systemd.timer 4 | ========================== 5 | 6 | vdirsyncer includes unit files to run at an interval (by default every 15±5 7 | minutes). 8 | 9 | .. note:: 10 | 11 | These are not installed when installing via pip, only via distribution 12 | packages. If you installed via pip, or your distribution doesn't ship systemd 13 | unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_ 14 | into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``. 15 | 16 | .. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service 17 | .. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer 18 | 19 | Activation 20 | ---------- 21 | 22 | To activate the timer, just run ``systemctl --user enable vdirsyncer.timer``. 23 | To see logs of previous runs, use ``journalctl --user -u vdirsyncer``. 24 | 25 | Configuration 26 | ------------- 27 | 28 | It's quite possible that the default "every fifteen minutes" interval isn't to 29 | your liking. No default will suit everybody, but this is configurable by simply 30 | running:: 31 | 32 | systemctl --user edit vdirsyncer.timer 33 | 34 | This will open a blank editor, where you can override the timer by including:: 35 | 36 | OnBootSec=5m # This is how long after boot the first run takes place. 37 | OnUnitActiveSec=15m # This is how often subsequent runs take place. 38 | -------------------------------------------------------------------------------- /scripts/_build_deb_in_container.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # This script is mean to be run inside a dedicated container, 4 | # and not interatively. 5 | 6 | set -ex 7 | 8 | export DEBIAN_FRONTEND=noninteractive 9 | 10 | apt-get update 11 | apt-get install -y build-essential fakeroot debhelper git 12 | apt-get install -y python3-all python3-pip python3-venv 13 | apt-get install -y ruby ruby-dev 14 | 15 | pip3 install virtualenv virtualenv-tools3 16 | virtualenv -p python3 /vdirsyncer/env/ 17 | 18 | gem install fpm 19 | 20 | # See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970 21 | pip3 uninstall -y virtualenv 22 | echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv 23 | chmod +x /usr/local/bin/virtualenv 24 | 25 | cp -r /source/ /vdirsyncer/vdirsyncer/ 26 | cd /vdirsyncer/vdirsyncer/ || exit 2 27 | mkdir /vdirsyncer/pkgs/ 28 | 29 | basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version 30 | # XXX: Do I really not want google support included? 31 | (echo -n *.tar.gz; echo '[google]') | tee requirements.txt 32 | fpm --verbose \ 33 | --input-type virtualenv \ 34 | --output-type deb \ 35 | --name "vdirsyncer-latest" \ 36 | --version "$(cat version)" \ 37 | --prefix /opt/venvs/vdirsyncer-latest \ 38 | --depends python3 \ 39 | requirements.txt 40 | 41 | mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/ 42 | 43 | cd /vdirsyncer/pkgs/ 44 | dpkg -i -- *.deb 45 | 46 | # Check that it works: 47 | LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version 48 | 49 | cp -- *.deb /source/ 50 | -------------------------------------------------------------------------------- /tests/system/cli/test_fetchparams.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from textwrap import dedent 4 | 5 | 6 | def test_get_password_from_command(tmpdir, runner): 7 | runner.write_with_general( 8 | dedent( 9 | f""" 10 | [pair foobar] 11 | a = "foo" 12 | b = "bar" 13 | collections = ["a", "b", "c"] 14 | 15 | [storage foo] 16 | type.fetch = ["shell", "echo filesystem"] 17 | path = "{tmpdir!s}/foo/" 18 | fileext.fetch = ["command", "echo", ".txt"] 19 | 20 | [storage bar] 21 | type = "filesystem" 22 | path = "{tmpdir!s}/bar/" 23 | fileext.fetch = ["prompt", "Fileext for bar"] 24 | """ 25 | ) 26 | ) 27 | 28 | foo = tmpdir.ensure("foo", dir=True) 29 | foo.ensure("a", dir=True) 30 | foo.ensure("b", dir=True) 31 | foo.ensure("c", dir=True) 32 | bar = tmpdir.ensure("bar", dir=True) 33 | bar.ensure("a", dir=True) 34 | bar.ensure("b", dir=True) 35 | bar.ensure("c", dir=True) 36 | 37 | result = runner.invoke(["discover"], input=".asdf\n") 38 | assert not result.exception 39 | status = tmpdir.join("status").join("foobar.collections").read() 40 | assert "foo" in status 41 | assert "bar" in status 42 | assert "asdf" not in status 43 | assert "txt" not in status 44 | 45 | foo.join("a").join("foo.txt").write("BEGIN:VCARD\nUID:foo\nEND:VCARD") 46 | result = runner.invoke(["sync"], input=".asdf\n") 47 | assert not result.exception 48 | assert [x.basename for x in bar.join("a").listdir()] == ["foo.asdf"] 49 | -------------------------------------------------------------------------------- /scripts/release-deb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -xeu 4 | 5 | SCRIPT_PATH=$(realpath "$0") 6 | SCRIPT_DIR=$(dirname "$SCRIPT_PATH") 7 | 8 | # E.g.: debian, ubuntu 9 | DISTRO=${DISTRO:1} 10 | # E.g.: bullseye, bookwork 11 | DISTROVER=${DISTROVER:2} 12 | CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}" 13 | CONTEXT="$(mktemp -d)" 14 | 15 | DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER" 16 | 17 | cleanup() { 18 | rm -rf "$CONTEXT" 19 | } 20 | trap cleanup EXIT 21 | 22 | # Prepare files. 23 | cp scripts/_build_deb_in_container.bash "$CONTEXT" 24 | python setup.py sdist -d "$CONTEXT" 25 | 26 | docker run -it \ 27 | --name "$CONTAINER_NAME" \ 28 | --volume "$CONTEXT:/source" \ 29 | "$DISTRO:$DISTROVER" \ 30 | bash /source/_build_deb_in_container.bash 31 | 32 | # Keep around the package filename. 33 | PACKAGE=$(ls "$CONTEXT"/*.deb) 34 | PACKAGE=$(basename "$PACKAGE") 35 | 36 | # Save the build deb files. 37 | mkdir -p "$DEST_DIR" 38 | cp "$CONTEXT"/*.deb "$DEST_DIR" 39 | 40 | echo Build complete! 🤖 41 | 42 | # Packagecloud uses some internal IDs for each distro. 43 | # Extract the one for the distro we're publishing. 44 | DISTRO_ID=$( 45 | curl -s \ 46 | https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \ 47 | jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id' 48 | ) 49 | 50 | # Actually push the package. 51 | curl \ 52 | -F "package[distro_version_id]=$DISTRO_ID" \ 53 | -F "package[package_file]=@$DEST_DIR/$PACKAGE" \ 54 | https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json 55 | 56 | echo Done! ✨ 57 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See 2 | AUTHORS.rst for more details. 3 | 4 | Some rights reserved. 5 | 6 | Redistribution and use in source and binary forms of the software as well 7 | as documentation, with or without modification, are permitted provided 8 | that the following conditions are met: 9 | 10 | * Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above 14 | copyright notice, this list of conditions and the following 15 | disclaimer in the documentation and/or other materials provided 16 | with the distribution. 17 | 18 | * The names of the contributors may not be used to endorse or 19 | promote products derived from this software without specific 20 | prior written permission. 21 | 22 | THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND 23 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT 24 | NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 25 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER 26 | OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 27 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 28 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 29 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 30 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 31 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 32 | SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 33 | DAMAGE. 34 | -------------------------------------------------------------------------------- /tests/storage/servers/davical/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import uuid 5 | 6 | import pytest 7 | 8 | try: 9 | caldav_args = { 10 | # Those credentials are configured through the Travis UI 11 | "username": os.environ["DAVICAL_USERNAME"].strip(), 12 | "password": os.environ["DAVICAL_PASSWORD"].strip(), 13 | "url": "https://brutus.lostpackets.de/davical-test/caldav.php/", 14 | } 15 | except KeyError as e: 16 | pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}") 17 | 18 | 19 | @pytest.mark.flaky(reruns=5) 20 | class ServerMixin: 21 | @pytest.fixture 22 | def davical_args(self): 23 | if self.storage_class.fileext == ".ics": 24 | return dict(caldav_args) 25 | elif self.storage_class.fileext == ".vcf": 26 | pytest.skip("No carddav") 27 | else: 28 | raise RuntimeError 29 | 30 | @pytest.fixture 31 | def get_storage_args(self, davical_args, request): 32 | async def inner(collection="test"): 33 | if collection is None: 34 | return davical_args 35 | 36 | assert collection.startswith("test") 37 | 38 | for _ in range(4): 39 | args = self.storage_class.create_collection( 40 | collection + str(uuid.uuid4()), **davical_args 41 | ) 42 | s = self.storage_class(**args) 43 | if not list(s.list()): 44 | # See: https://stackoverflow.com/a/33984811 45 | request.addfinalizer(lambda x=s: x.session.request("DELETE", "")) 46 | return args 47 | 48 | raise RuntimeError("Failed to find free collection.") 49 | 50 | return inner 51 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | General-purpose fixtures for vdirsyncer's testsuite. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | import logging 8 | import os 9 | 10 | import aiohttp 11 | import click_log 12 | import pytest 13 | import pytest_asyncio 14 | from hypothesis import HealthCheck 15 | from hypothesis import Verbosity 16 | from hypothesis import settings 17 | 18 | 19 | @pytest.fixture(autouse=True) 20 | def setup_logging(): 21 | click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG) 22 | 23 | 24 | try: 25 | import pytest_benchmark 26 | except ImportError: 27 | 28 | @pytest.fixture 29 | def benchmark(): 30 | return lambda x: x() 31 | 32 | else: 33 | del pytest_benchmark 34 | 35 | 36 | settings.register_profile( 37 | "ci", 38 | settings( 39 | max_examples=1000, 40 | verbosity=Verbosity.verbose, 41 | suppress_health_check=[HealthCheck.too_slow], 42 | ), 43 | ) 44 | settings.register_profile( 45 | "deterministic", 46 | settings( 47 | derandomize=True, 48 | suppress_health_check=list(HealthCheck), 49 | ), 50 | ) 51 | settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow])) 52 | 53 | if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true": 54 | settings.load_profile("deterministic") 55 | elif os.environ.get("CI", "false").lower() == "true": 56 | settings.load_profile("ci") 57 | else: 58 | settings.load_profile("dev") 59 | 60 | 61 | @pytest_asyncio.fixture 62 | async def aio_session(): 63 | async with aiohttp.ClientSession() as session: 64 | yield session 65 | 66 | 67 | @pytest_asyncio.fixture 68 | async def aio_connector(): 69 | async with aiohttp.TCPConnector(limit_per_host=16) as conn: 70 | yield conn 71 | -------------------------------------------------------------------------------- /vdirsyncer/storage/google_helpers.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: Apache-2.0 2 | # 3 | # Based on: 4 | # https://github.com/googleapis/google-auth-library-python-oauthlib/blob/1fb16be1bad9050ee29293541be44e41e82defd7/google_auth_oauthlib/flow.py#L513 5 | from __future__ import annotations 6 | 7 | import logging 8 | import wsgiref.simple_server 9 | import wsgiref.util 10 | from collections.abc import Iterable 11 | from typing import Any 12 | from typing import Callable 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class _WSGIRequestHandler(wsgiref.simple_server.WSGIRequestHandler): 18 | """Custom WSGIRequestHandler.""" 19 | 20 | def log_message(self, format, *args): 21 | # (format is the argument name defined in the superclass.) 22 | logger.info(format, *args) 23 | 24 | 25 | class _RedirectWSGIApp: 26 | """WSGI app to handle the authorization redirect. 27 | 28 | Stores the request URI and displays the given success message. 29 | """ 30 | 31 | last_request_uri: str | None 32 | 33 | def __init__(self, success_message: str): 34 | """ 35 | :param success_message: The message to display in the web browser the 36 | authorization flow is complete. 37 | """ 38 | self.last_request_uri = None 39 | self._success_message = success_message 40 | 41 | def __call__( 42 | self, 43 | environ: dict[str, Any], 44 | start_response: Callable[[str, list], None], 45 | ) -> Iterable[bytes]: 46 | """WSGI Callable. 47 | 48 | :param environ: The WSGI environment. 49 | :param start_response: The WSGI start_response callable. 50 | :returns: The response body. 51 | """ 52 | start_response("200 OK", [("Content-type", "text/plain; charset=utf-8")]) 53 | self.last_request_uri = wsgiref.util.request_uri(environ) 54 | return [self._success_message.encode("utf-8")] 55 | -------------------------------------------------------------------------------- /tests/storage/dav/test_main.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from vdirsyncer.storage.dav import _BAD_XML_CHARS 6 | from vdirsyncer.storage.dav import _merge_xml 7 | from vdirsyncer.storage.dav import _normalize_href 8 | from vdirsyncer.storage.dav import _parse_xml 9 | 10 | 11 | def test_xml_utilities(): 12 | x = _parse_xml( 13 | b""" 14 | 15 | 16 | 17 | HTTP/1.1 404 Not Found 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | """ 32 | ) 33 | 34 | response = x.find("{DAV:}response") 35 | props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop")) 36 | assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None 37 | assert props.find("{DAV:}getcontenttype") is not None 38 | 39 | 40 | @pytest.mark.parametrize("char", range(32)) 41 | def test_xml_specialchars(char): 42 | x = _parse_xml( 43 | '' 44 | f"ye{chr(char)}s\r\n" 45 | "hello".encode("ascii") 46 | ) 47 | 48 | if char in _BAD_XML_CHARS: 49 | assert x.text == "yes\nhello" 50 | 51 | 52 | @pytest.mark.parametrize( 53 | "href", 54 | [ 55 | "/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics", 56 | ], 57 | ) 58 | def test_normalize_href(href): 59 | assert href == _normalize_href("https://example.com", href) 60 | -------------------------------------------------------------------------------- /tests/storage/dav/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import uuid 5 | 6 | import aiohttp 7 | import aiostream 8 | import pytest 9 | 10 | from tests import assert_item_equals 11 | from tests.storage import StorageTests 12 | from tests.storage import get_server_mixin 13 | from vdirsyncer import exceptions 14 | from vdirsyncer.vobject import Item 15 | 16 | dav_server = os.environ.get("DAV_SERVER", "skip") 17 | ServerMixin = get_server_mixin(dav_server) 18 | 19 | 20 | class DAVStorageTests(ServerMixin, StorageTests): 21 | dav_server = dav_server 22 | 23 | @pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.") 24 | @pytest.mark.asyncio 25 | async def test_dav_broken_item(self, s): 26 | item = Item("HAHA:YES") 27 | with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)): 28 | await s.upload(item) 29 | assert not await aiostream.stream.list(s.list()) 30 | 31 | @pytest.mark.asyncio 32 | async def test_dav_empty_get_multi_performance(self, s, monkeypatch): 33 | def breakdown(*a, **kw): 34 | raise AssertionError("Expected not to be called.") 35 | 36 | monkeypatch.setattr("requests.sessions.Session.request", breakdown) 37 | 38 | try: 39 | assert list(await aiostream.stream.list(s.get_multi([]))) == [] 40 | finally: 41 | # Make sure monkeypatch doesn't interfere with DAV server teardown 42 | monkeypatch.undo() 43 | 44 | @pytest.mark.asyncio 45 | async def test_dav_unicode_href(self, s, get_item, monkeypatch): 46 | if self.dav_server == "radicale": 47 | pytest.skip("Radicale is unable to deal with unicode hrefs") 48 | 49 | monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext) 50 | item = get_item(uid="град сатану" + str(uuid.uuid4())) 51 | href, _etag = await s.upload(item) 52 | item2, _etag2 = await s.get(href) 53 | assert_item_equals(item, item2) 54 | -------------------------------------------------------------------------------- /docs/tutorials/davmail.rst: -------------------------------------------------------------------------------- 1 | .. _davmail_setup: 2 | 3 | =========================== 4 | DavMail (Exchange, Outlook) 5 | =========================== 6 | 7 | DavMail_ is a proxy program that allows you to use Card- and CalDAV clients 8 | with Outlook. That allows you to use vdirsyncer with Outlook. 9 | 10 | In practice your success with DavMail may wildly vary. Depending on your 11 | Exchange server you might get confronted with weird errors of all sorts 12 | (including data-loss). 13 | 14 | **Make absolutely sure you use the latest DavMail**:: 15 | 16 | [storage outlook] 17 | type = "caldav" 18 | url = "http://localhost:1080/users/user@example.com/calendar/" 19 | username = "user@example.com" 20 | password = "..." 21 | 22 | - Older versions of DavMail handle URLs case-insensitively. See :gh:`144`. 23 | - DavMail is handling malformed data on the Exchange server very poorly. In 24 | such cases the `Calendar Checking Tool for Outlook 25 | `_ might 26 | help. 27 | - In some cases, you may see errors about duplicate events. It may look 28 | something like this:: 29 | 30 | error: my_calendar/calendar: Storage "my_calendar_remote/calendar" contains multiple items with the same UID or even content. Vdirsyncer will now abort the synchronization of this collection, because the fix for this is not clear; It could be the result of a badly behaving server. You can try running: 31 | error: 32 | error: vdirsyncer repair my_calendar_remote/calendar 33 | error: 34 | error: But make sure to have a backup of your data in some form. The offending hrefs are: 35 | [...] 36 | 37 | In order to fix this, you can try the Remove-DuplicateAppointments.ps1_ 38 | PowerShell script that Microsoft has come up with in order to remove duplicates. 39 | 40 | .. _DavMail: http://davmail.sourceforge.net/ 41 | .. _Remove-DuplicateAppointments.ps1: https://blogs.msdn.microsoft.com/emeamsgdev/2015/02/12/powershell-remove-duplicate-calendar-appointments/ 42 | -------------------------------------------------------------------------------- /docs/tutorials/index.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Other tutorials 3 | =============== 4 | 5 | The following section contains tutorials not explicitly about any particular 6 | core function of vdirsyncer. They usually show how to integrate vdirsyncer with 7 | third-party software. Because of that, it may be that the information regarding 8 | that other software only applies to specific versions of them. 9 | 10 | .. note:: 11 | Please :doc:`contribute ` your own tutorials too! Pages are 12 | often only stubs and are lacking full examples. 13 | 14 | Client applications 15 | =================== 16 | 17 | .. toctree:: 18 | :maxdepth: 1 19 | 20 | claws-mail 21 | systemd-timer 22 | todoman 23 | 24 | Further applications, with missing pages: 25 | 26 | - khal_, a CLI calendar application supporting :doc:`vdir `. You can use 27 | :storage:`filesystem` with it. 28 | - Many graphical calendar apps such as dayplanner_, Orage_ or rainlendar_ save 29 | a calendar in a single ``.ics`` file. You can use :storage:`singlefile` with 30 | those. 31 | - khard_, a commandline addressbook supporting :doc:`vdir `. You can use 32 | :storage:`filesystem` with it. 33 | - contactquery.c_, a small program explicitly written for querying vdirs from 34 | mutt. 35 | - mates_, a commandline addressbook supporting :doc:`vdir `. 36 | - vdirel_, access :doc:`vdir ` contacts from Emacs. 37 | 38 | .. _khal: http://lostpackets.de/khal/ 39 | .. _dayplanner: http://www.day-planner.org/ 40 | .. _Orage: https://gitlab.xfce.org/apps/orage 41 | .. _rainlendar: http://www.rainlendar.net/ 42 | .. _khard: https://github.com/scheibler/khard/ 43 | .. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c 44 | .. _mates: https://github.com/pimutils/mates.rs 45 | .. _vdirel: https://github.com/DamienCassou/vdirel 46 | 47 | .. _supported-servers: 48 | 49 | Servers 50 | ======= 51 | 52 | .. toctree:: 53 | :maxdepth: 1 54 | 55 | baikal 56 | davmail 57 | fastmail 58 | google 59 | icloud 60 | nextcloud 61 | owncloud 62 | radicale 63 | xandikos 64 | -------------------------------------------------------------------------------- /vdirsyncer/sync/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from vdirsyncer import exceptions 4 | 5 | 6 | class SyncError(exceptions.Error): 7 | """Errors related to synchronization.""" 8 | 9 | 10 | class SyncConflict(SyncError): 11 | """ 12 | Two items changed since the last sync, they now have different contents and 13 | no conflict resolution method was given. 14 | 15 | :param ident: The ident of the item. 16 | :param href_a: The item's href on side A. 17 | :param href_b: The item's href on side B. 18 | """ 19 | 20 | ident = None 21 | href_a = None 22 | href_b = None 23 | 24 | 25 | class IdentConflict(SyncError): 26 | """ 27 | Multiple items on the same storage have the same UID. 28 | 29 | :param storage: The affected storage. 30 | :param hrefs: List of affected hrefs on `storage`. 31 | """ 32 | 33 | storage = None 34 | _hrefs = None 35 | 36 | @property 37 | def hrefs(self): 38 | return self._hrefs 39 | 40 | @hrefs.setter 41 | def hrefs(self, val): 42 | new_val = set(val) 43 | assert len(new_val) > 1, val 44 | self._hrefs = new_val 45 | 46 | 47 | class StorageEmpty(SyncError): 48 | """ 49 | One storage unexpectedly got completely empty between two synchronizations. 50 | The first argument is the empty storage. 51 | 52 | :param empty_storage: The empty 53 | :py:class:`vdirsyncer.storage.base.Storage`. 54 | """ 55 | 56 | empty_storage = None 57 | 58 | 59 | class BothReadOnly(SyncError): 60 | """ 61 | Both storages are marked as read-only. Synchronization is therefore not 62 | possible. 63 | """ 64 | 65 | 66 | class PartialSync(SyncError): 67 | """ 68 | Attempted change on read-only storage. 69 | """ 70 | 71 | storage = None 72 | 73 | 74 | class IdentAlreadyExists(SyncError): 75 | """Like IdentConflict, but for internal state. If this bubbles up, we don't 76 | have a data race, but a bug.""" 77 | 78 | old_href = None 79 | new_href = None 80 | 81 | def to_ident_conflict(self, storage): 82 | return IdentConflict(storage=storage, hrefs=[self.old_href, self.new_href]) 83 | -------------------------------------------------------------------------------- /vdirsyncer/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Contains exception classes used by vdirsyncer. Not all exceptions are here, 3 | only the most commonly used ones. 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | 9 | class Error(Exception): 10 | """Baseclass for all errors.""" 11 | 12 | def __init__(self, *args, **kwargs): 13 | for key, value in kwargs.items(): 14 | if getattr(self, key, object()) is not None: # pragma: no cover 15 | raise TypeError(f"Invalid argument: {key}") 16 | setattr(self, key, value) 17 | 18 | super().__init__(*args) 19 | 20 | 21 | class UserError(Error, ValueError): 22 | """Wrapper exception to be used to signify the traceback should not be 23 | shown to the user.""" 24 | 25 | problems = None 26 | 27 | def __str__(self): 28 | msg = Error.__str__(self) 29 | for problem in self.problems or (): 30 | msg += f"\n - {problem}" 31 | 32 | return msg 33 | 34 | 35 | class CollectionNotFound(Error): 36 | """Collection not found""" 37 | 38 | 39 | class PairNotFound(Error): 40 | """Pair not found""" 41 | 42 | pair_name = None 43 | 44 | 45 | class PreconditionFailed(Error): 46 | """ 47 | - The item doesn't exist although it should 48 | - The item exists although it shouldn't 49 | - The etags don't match. 50 | 51 | Due to CalDAV we can't actually say which error it is. 52 | This error may indicate race conditions. 53 | """ 54 | 55 | 56 | class NotFoundError(PreconditionFailed): 57 | """Item not found""" 58 | 59 | 60 | class AlreadyExistingError(PreconditionFailed): 61 | """Item already exists.""" 62 | 63 | existing_href = None 64 | 65 | 66 | class WrongEtagError(PreconditionFailed): 67 | """Wrong etag""" 68 | 69 | 70 | class ReadOnlyError(Error): 71 | """Storage is read-only.""" 72 | 73 | 74 | class InvalidResponse(Error, ValueError): 75 | """The backend returned an invalid result.""" 76 | 77 | 78 | class UnsupportedMetadataError(Error, NotImplementedError): 79 | """The storage doesn't support this type of metadata.""" 80 | 81 | 82 | class CollectionRequired(Error): 83 | """`collection = null` is not allowed.""" 84 | -------------------------------------------------------------------------------- /docs/tutorials/todoman.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Todoman 3 | ======= 4 | 5 | The iCalendar format also supports saving tasks in form of ``VTODO``-entries, 6 | with the same file extension as normal events: ``.ics``. Many CalDAV servers 7 | support synchronizing tasks, vdirsyncer does too. 8 | 9 | todoman_ is a CLI task manager supporting :doc:`vdir `. Its interface is 10 | similar to the ones of Taskwarrior or the todo.txt CLI app. You can use 11 | :storage:`filesystem` with it. 12 | 13 | .. _todoman: http://todoman.readthedocs.io/ 14 | 15 | Setting up vdirsyncer 16 | ===================== 17 | 18 | For this tutorial we will use NextCloud. 19 | 20 | Assuming a config like this:: 21 | 22 | [general] 23 | status_path = "~/.vdirsyncer/status/" 24 | 25 | [pair calendars] 26 | conflict_resolution = "b wins" 27 | a = "calendars_local" 28 | b = "calendars_dav" 29 | collections = ["from b"] 30 | metadata = ["color", "displayname"] 31 | 32 | [storage calendars_local] 33 | type = "filesystem" 34 | path = "~/.calendars/" 35 | fileext = ".ics" 36 | 37 | [storage calendars_dav] 38 | type = "caldav" 39 | url = "https://nextcloud.example.net/" 40 | username = "..." 41 | password = "..." 42 | 43 | ``vdirsyncer sync`` will then synchronize the calendars of your NextCloud_ 44 | instance to subfolders of ``~/.calendar/``. 45 | 46 | .. _NextCloud: https://nextcloud.com/ 47 | 48 | Setting up todoman 49 | ================== 50 | 51 | Write this to ``~/.config/todoman/config.py``:: 52 | 53 | path = "~/.calendars/*" 54 | 55 | The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``, 56 | which is exactly the tasklists we want. Now you can use ``todoman`` as 57 | described in its documentation_ and run ``vdirsyncer sync`` to synchronize the changes to NextCloud. 58 | 59 | .. _glob: https://en.wikipedia.org/wiki/Glob_(programming) 60 | .. _documentation: http://todoman.readthedocs.io/ 61 | 62 | Other clients 63 | ============= 64 | 65 | The following client applications also synchronize over CalDAV: 66 | 67 | - The Tasks-app found on iOS 68 | - `OpenTasks for Android `_ 69 | - The `Tasks `_-app for NextCloud's web UI 70 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # See the documentation on how to run the tests: 2 | # https://vdirsyncer.pimutils.org/en/stable/contributing.html 3 | 4 | # Which DAV server to run the tests against (radicale, xandikos, skip, owncloud, nextcloud, ...) 5 | export DAV_SERVER := skip 6 | 7 | # release (install release versions of dependencies) 8 | # development (install development versions of some of vdirsyncer's dependencies) 9 | # or minimal (install oldest version of each dependency that is supported by vdirsyncer) 10 | export REQUIREMENTS := release 11 | 12 | # Set this to true if you run vdirsyncer's test as part of e.g. packaging. 13 | export DETERMINISTIC_TESTS := false 14 | 15 | # Assume to run in CI. Don't use this outside of a virtual machine. It will 16 | # heavily "pollute" your system, such as attempting to install a new Python 17 | # systemwide. 18 | export CI := false 19 | 20 | # Whether to generate coverage data while running tests. 21 | export COVERAGE := $(CI) 22 | 23 | # Variables below this line are not very interesting for getting started. 24 | 25 | CODECOV_PATH = /tmp/codecov.sh 26 | 27 | all: 28 | $(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation) 29 | 30 | ci-test: 31 | curl -s https://codecov.io/bash > $(CODECOV_PATH) 32 | pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/ 33 | bash $(CODECOV_PATH) -c 34 | 35 | ci-test-storage: 36 | curl -s https://codecov.io/bash > $(CODECOV_PATH) 37 | set -ex; \ 38 | for server in $(DAV_SERVER); do \ 39 | DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \ 40 | done 41 | bash $(CODECOV_PATH) -c 42 | 43 | check: 44 | ruff check 45 | ruff format --diff 46 | #mypy vdirsyncer 47 | 48 | release-deb: 49 | sh scripts/release-deb.sh debian jessie 50 | sh scripts/release-deb.sh debian stretch 51 | sh scripts/release-deb.sh ubuntu trusty 52 | sh scripts/release-deb.sh ubuntu xenial 53 | sh scripts/release-deb.sh ubuntu zesty 54 | 55 | install-dev: 56 | pip install -U pip setuptools wheel 57 | pip install -e '.[test,check,docs]' 58 | set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \ 59 | pip install pyproject-dependencies && \ 60 | pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \ 61 | fi 62 | 63 | .PHONY: docs 64 | -------------------------------------------------------------------------------- /vdirsyncer/metasync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | 5 | from . import exceptions 6 | from .storage.base import normalize_meta_value 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class MetaSyncError(exceptions.Error): 12 | pass 13 | 14 | 15 | class MetaSyncConflict(MetaSyncError): 16 | key = None 17 | 18 | 19 | def status_set_key(status, key, value): 20 | if value is None: 21 | status.pop(key, None) 22 | else: 23 | status[key] = value 24 | 25 | 26 | async def metasync(storage_a, storage_b, status, keys, conflict_resolution=None): 27 | async def _a_to_b(): 28 | logger.info(f"Copying {key} to {storage_b}") 29 | await storage_b.set_meta(key, a) 30 | status_set_key(status, key, a) 31 | 32 | async def _b_to_a(): 33 | logger.info(f"Copying {key} to {storage_a}") 34 | await storage_a.set_meta(key, b) 35 | status_set_key(status, key, b) 36 | 37 | async def _resolve_conflict(): 38 | if a == b: 39 | status_set_key(status, key, a) 40 | elif conflict_resolution == "a wins": 41 | await _a_to_b() 42 | elif conflict_resolution == "b wins": 43 | await _b_to_a() 44 | else: 45 | if callable(conflict_resolution): 46 | logger.warning("Custom commands don't work on metasync.") 47 | elif conflict_resolution is not None: 48 | raise exceptions.UserError("Invalid conflict resolution setting.") 49 | raise MetaSyncConflict(key) 50 | 51 | for key in keys: 52 | a = await storage_a.get_meta(key) 53 | b = await storage_b.get_meta(key) 54 | s = normalize_meta_value(status.get(key)) 55 | logger.debug(f"Key: {key}") 56 | logger.debug(f"A: {a}") 57 | logger.debug(f"B: {b}") 58 | logger.debug(f"S: {s}") 59 | 60 | if (a != s and b != s) or storage_a.read_only or storage_b.read_only: 61 | await _resolve_conflict() 62 | elif a != s and b == s: 63 | await _a_to_b() 64 | elif a == s and b != s: 65 | await _b_to_a() 66 | else: 67 | assert a == b 68 | 69 | for key in set(status) - set(keys): 70 | del status[key] 71 | -------------------------------------------------------------------------------- /vdirsyncer/repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from os.path import basename 5 | 6 | import aiostream 7 | 8 | from .utils import generate_href 9 | from .utils import href_safe 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class IrreparableItem(Exception): 15 | pass 16 | 17 | 18 | async def repair_storage(storage, repair_unsafe_uid): 19 | seen_uids = set() 20 | all_hrefs = await aiostream.stream.list(storage.list()) 21 | for i, (href, _) in enumerate(all_hrefs): 22 | item, etag = await storage.get(href) 23 | logger.info(f"[{i}/{len(all_hrefs)}] Processing {href}") 24 | 25 | try: 26 | new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) 27 | except IrreparableItem: 28 | logger.error( 29 | f"Item {href!r} is malformed beyond repair. " 30 | "The PRODID property may indicate which software " 31 | "created this item." 32 | ) 33 | logger.error(f"Item content: {item.raw!r}") 34 | continue 35 | 36 | seen_uids.add(new_item.uid) 37 | if new_item.raw != item.raw: 38 | if new_item.uid != item.uid: 39 | await storage.upload(new_item) 40 | await storage.delete(href, etag) 41 | else: 42 | await storage.update(href, new_item, etag) 43 | 44 | 45 | def repair_item(href, item, seen_uids, repair_unsafe_uid): 46 | if item.parsed is None: 47 | raise IrreparableItem 48 | 49 | new_item = item 50 | 51 | if not item.uid: 52 | logger.warning("No UID, assigning random UID.") 53 | new_item = item.with_uid(generate_href()) 54 | elif item.uid in seen_uids: 55 | logger.warning("Duplicate UID, assigning random UID.") 56 | new_item = item.with_uid(generate_href()) 57 | elif not href_safe(item.uid) or not href_safe(basename(href)): 58 | if not repair_unsafe_uid: 59 | logger.warning("UID may cause problems, add --repair-unsafe-uid to repair.") 60 | else: 61 | logger.warning("UID or href is unsafe, assigning random UID.") 62 | new_item = item.with_uid(generate_href()) 63 | 64 | if not new_item.uid: 65 | raise IrreparableItem 66 | 67 | return new_item 68 | -------------------------------------------------------------------------------- /tests/system/cli/test_repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from textwrap import dedent 4 | 5 | import pytest 6 | 7 | 8 | @pytest.fixture 9 | def storage(tmpdir, runner): 10 | runner.write_with_general( 11 | dedent( 12 | """ 13 | [storage foo] 14 | type = "filesystem" 15 | path = "{base}/foo/" 16 | fileext = ".txt" 17 | """ 18 | ).format(base=str(tmpdir)) 19 | ) 20 | 21 | return tmpdir.mkdir("foo") 22 | 23 | 24 | @pytest.mark.parametrize("collection", [None, "foocoll"]) 25 | def test_basic(storage, runner, collection): 26 | if collection is not None: 27 | storage = storage.mkdir(collection) 28 | collection_arg = f"foo/{collection}" 29 | else: 30 | collection_arg = "foo" 31 | 32 | argv = ["repair", collection_arg] 33 | 34 | result = runner.invoke(argv, input="y") 35 | assert not result.exception 36 | 37 | storage.join("item.txt").write("BEGIN:VCARD\nEND:VCARD") 38 | storage.join("toobroken.txt").write("") 39 | 40 | result = runner.invoke(argv, input="y") 41 | assert not result.exception 42 | assert "No UID" in result.output 43 | assert "'toobroken.txt' is malformed beyond repair" in result.output 44 | (new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x)) 45 | assert "UID:" in new_fname.read() 46 | 47 | 48 | @pytest.mark.parametrize("repair_uids", [None, True, False]) 49 | def test_repair_uids(storage, runner, repair_uids): 50 | f = storage.join("baduid.txt") 51 | orig_f = "BEGIN:VCARD\nUID:!!!!!\nEND:VCARD" 52 | f.write(orig_f) 53 | 54 | if repair_uids is None: 55 | opt = [] 56 | elif repair_uids: 57 | opt = ["--repair-unsafe-uid"] 58 | else: 59 | opt = ["--no-repair-unsafe-uid"] 60 | 61 | result = runner.invoke(["repair", *opt, "foo"], input="y") 62 | assert not result.exception 63 | 64 | if repair_uids: 65 | assert "UID or href is unsafe, assigning random UID" in result.output 66 | assert not f.exists() 67 | (new_f,) = storage.listdir() 68 | s = new_f.read() 69 | 70 | assert s.startswith("BEGIN:VCARD") 71 | assert s.endswith("END:VCARD") 72 | assert s != orig_f 73 | else: 74 | assert ( 75 | "UID may cause problems, add --repair-unsafe-uid to repair." 76 | in result.output 77 | ) 78 | assert f.read() == orig_f 79 | -------------------------------------------------------------------------------- /config.example: -------------------------------------------------------------------------------- 1 | # An example configuration for vdirsyncer. 2 | # 3 | # Move it to ~/.vdirsyncer/config or ~/.config/vdirsyncer/config and edit it. 4 | # Run `vdirsyncer --help` for CLI usage. 5 | # 6 | # Optional parameters are commented out. 7 | # This file doesn't document all available parameters, see 8 | # http://vdirsyncer.pimutils.org/ for the rest of them. 9 | 10 | [general] 11 | # A folder where vdirsyncer can store some metadata about each pair. 12 | status_path = "~/.vdirsyncer/status/" 13 | 14 | # CARDDAV 15 | [pair bob_contacts] 16 | # A `[pair ]` block defines two storages `a` and `b` that should be 17 | # synchronized. The definition of these storages follows in `[storage ]` 18 | # blocks. This is similar to accounts in OfflineIMAP. 19 | a = "bob_contacts_local" 20 | b = "bob_contacts_remote" 21 | 22 | # Synchronize all collections that can be found. 23 | # You need to run `vdirsyncer discover` if new calendars/addressbooks are added 24 | # on the server. 25 | 26 | collections = ["from a", "from b"] 27 | 28 | # Synchronize the "display name" property into a local file (~/.contacts/displayname). 29 | metadata = ["displayname"] 30 | 31 | # To resolve a conflict the following values are possible: 32 | # `null` - abort when collisions occur (default) 33 | # `"a wins"` - assume a's items to be more up-to-date 34 | # `"b wins"` - assume b's items to be more up-to-date 35 | #conflict_resolution = null 36 | 37 | [storage bob_contacts_local] 38 | # A storage references actual data on a remote server or on the local disk. 39 | # Similar to repositories in OfflineIMAP. 40 | type = "filesystem" 41 | path = "~/.contacts/" 42 | fileext = ".vcf" 43 | 44 | [storage bob_contacts_remote] 45 | type = "carddav" 46 | url = "https://owncloud.example.com/remote.php/carddav/" 47 | #username = 48 | # The password can also be fetched from the system password storage, netrc or a 49 | # custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html 50 | #password = 51 | 52 | # CALDAV 53 | [pair bob_calendar] 54 | a = "bob_calendar_local" 55 | b = "bob_calendar_remote" 56 | collections = ["from a", "from b"] 57 | 58 | # Calendars also have a color property 59 | metadata = ["displayname", "color"] 60 | 61 | [storage bob_calendar_local] 62 | type = "filesystem" 63 | path = "~/.calendars/" 64 | fileext = ".ics" 65 | 66 | [storage bob_calendar_remote] 67 | type = "caldav" 68 | url = "https://owncloud.example.com/remote.php/caldav/" 69 | #username = 70 | #password = 71 | -------------------------------------------------------------------------------- /docs/ssl-tutorial.rst: -------------------------------------------------------------------------------- 1 | .. _ssl-tutorial: 2 | 3 | ============================== 4 | SSL and certificate validation 5 | ============================== 6 | 7 | All SSL configuration is done per-storage. 8 | 9 | Pinning by fingerprint 10 | ---------------------- 11 | 12 | To pin the certificate by fingerprint:: 13 | 14 | [storage foo] 15 | type = "caldav" 16 | ... 17 | verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E" 18 | 19 | SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported. 20 | CA validation is disabled when pinning a fingerprint. 21 | 22 | You can use the following command for obtaining a SHA256 fingerprint:: 23 | 24 | echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256 25 | 26 | However, please consider using `Let's Encrypt `_ such 27 | that you can forget about all of that. It is easier to deploy a free 28 | certificate from them than configuring all of your clients to accept the 29 | self-signed certificate. 30 | 31 | .. _ssl-cas: 32 | 33 | Custom root CAs 34 | --------------- 35 | 36 | To point vdirsyncer to a custom set of root CAs:: 37 | 38 | [storage foo] 39 | type = "caldav" 40 | ... 41 | verify = "/path/to/cert.pem" 42 | 43 | Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext 44 | https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default. 45 | 46 | There are cases where certificate validation fails even though you can access 47 | the server fine through e.g. your browser. This usually indicates that your 48 | installation of ``python`` or the ``aiohttp`` or library is somehow broken. In 49 | such cases, it makes sense to explicitly set ``verify`` or 50 | ``verify_fingerprint`` as shown above. 51 | 52 | .. _aiohttp: https://docs.aiohttp.org/en/stable/index.html 53 | 54 | .. _ssl-client-certs: 55 | 56 | Client Certificates 57 | ------------------- 58 | 59 | Client certificates may be specified with the ``auth_cert`` parameter. If the 60 | key and certificate are stored in the same file, it may be a string:: 61 | 62 | [storage foo] 63 | type = "caldav" 64 | ... 65 | auth_cert = "/path/to/certificate.pem" 66 | 67 | If the key and certificate are separate, a list may be used:: 68 | 69 | [storage foo] 70 | type = "caldav" 71 | ... 72 | auth_cert = ["/path/to/certificate.crt", "/path/to/key.key"] 73 | -------------------------------------------------------------------------------- /vdirsyncer/storage/memory.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import random 4 | 5 | from vdirsyncer import exceptions 6 | from vdirsyncer.vobject import Item 7 | 8 | from .base import Storage 9 | from .base import normalize_meta_value 10 | 11 | 12 | def _random_string(): 13 | return f"{random.random():.9f}" 14 | 15 | 16 | class MemoryStorage(Storage): 17 | storage_name = "memory" 18 | 19 | """ 20 | Saves data in RAM, only useful for testing. 21 | """ 22 | 23 | def __init__(self, fileext="", **kwargs): 24 | if kwargs.get("collection") is not None: 25 | raise exceptions.UserError("MemoryStorage does not support collections.") 26 | self.items = {} # href => (etag, item) 27 | self.metadata = {} 28 | self.fileext = fileext 29 | super().__init__(**kwargs) 30 | 31 | def _get_href(self, item): 32 | return item.ident + self.fileext 33 | 34 | async def list(self): 35 | for href, (etag, _item) in self.items.items(): 36 | yield href, etag 37 | 38 | async def get(self, href) -> tuple[Item, str]: 39 | etag, item = self.items[href] 40 | return item, etag 41 | 42 | async def has(self, href): 43 | return href in self.items 44 | 45 | async def upload(self, item): 46 | href = self._get_href(item) 47 | if href in self.items: 48 | raise exceptions.AlreadyExistingError(existing_href=href) 49 | etag = _random_string() 50 | self.items[href] = (etag, item) 51 | return href, etag 52 | 53 | async def update(self, href, item, etag): 54 | if href not in self.items: 55 | raise exceptions.NotFoundError(href) 56 | actual_etag, _ = self.items[href] 57 | if etag != actual_etag: 58 | raise exceptions.WrongEtagError(etag, actual_etag) 59 | 60 | new_etag = _random_string() 61 | self.items[href] = (new_etag, item) 62 | return new_etag 63 | 64 | async def delete(self, href, etag): 65 | if not await self.has(href): 66 | raise exceptions.NotFoundError(href) 67 | if etag != self.items[href][0]: 68 | raise exceptions.WrongEtagError(etag) 69 | del self.items[href] 70 | 71 | async def get_meta(self, key): 72 | return normalize_meta_value(self.metadata.get(key)) 73 | 74 | async def set_meta(self, key, value): 75 | if value is None: 76 | self.metadata.pop(key, None) 77 | else: 78 | self.metadata[key] = normalize_meta_value(value) 79 | -------------------------------------------------------------------------------- /contrib/conflict_resolution/resolve_interactively.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Ask user to resolve a vdirsyncer sync conflict interactively. 3 | 4 | Needs a way to ask the user. 5 | The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded. 6 | 7 | Depends on python>3.5 and KDialog. 8 | 9 | Usage: 10 | Ensure the file executable and use it in the vdirsyncer.conf file, e.g. 11 | 12 | conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"] 13 | 14 | This file is Free Software under the following license: 15 | SPDX-License-Identifier: BSD-3-Clause 16 | SPDX-FileCopyrightText: 2021 Intevation GmbH 17 | Author: 18 | """ 19 | 20 | from __future__ import annotations 21 | 22 | import re 23 | import subprocess 24 | import sys 25 | from pathlib import Path 26 | 27 | KDIALOG = "/usr/bin/kdialog" 28 | 29 | SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE) 30 | 31 | 32 | def get_summary(icalendar_text: str): 33 | """Get the first SUMMARY: line from an iCalendar text. 34 | 35 | Do not care about the line being continued. 36 | """ 37 | match = re.search(SUMMARY_PATTERN, icalendar_text) 38 | return match[1] 39 | 40 | 41 | def main(ical1_filename, ical2_filename): 42 | ical1 = ical1_filename.read_text() 43 | ical2 = ical2_filename.read_text() 44 | 45 | additional_args = ["--yes-label", "take first"] # return code == 0 46 | additional_args += ["--no-label", "take second"] # return code == 1 47 | additional_args += ["--cancel-label", "do not resolve"] # return code == 2 48 | 49 | r = subprocess.run( 50 | args=[ 51 | KDIALOG, 52 | "--warningyesnocancel", 53 | "There was a sync conflict, do you prefer the first entry: \n" 54 | f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n" 55 | "or the second entry:\n" 56 | f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?", 57 | *additional_args, 58 | ] 59 | ) 60 | 61 | if r.returncode == 2: 62 | # cancel was pressed 63 | return # shall lead to items not changed, because not copied 64 | 65 | if r.returncode == 0: 66 | # we want to take the first item, so overwrite the second 67 | ical2_filename.write_text(ical1) 68 | else: # r.returncode == 1, we want the second item, so overwrite the first 69 | ical1_filename.write_text(ical2) 70 | 71 | 72 | if len(sys.argv) != 3: 73 | sys.stdout.write(__doc__) 74 | else: 75 | main(Path(sys.argv[1]), Path(sys.argv[2])) 76 | -------------------------------------------------------------------------------- /docs/when.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | When do I need Vdirsyncer? 3 | ========================== 4 | 5 | Why not Dropbox + todo.txt? 6 | --------------------------- 7 | 8 | Projects like `todo.txt `_ criticize the complexity of 9 | modern productivity apps, and that rightfully. So they set out to create a new, 10 | super-simple, human-readable format, such that vim suffices for viewing the raw 11 | data. However, when they're faced with the question how to synchronize that 12 | data across multiple devices, they seemed to have reached the dead end with 13 | their novel idea: "Let's just use Dropbox". 14 | 15 | What does file sync software do if both files have changed since the last sync? 16 | The answer is to ignore the question, just sync as often as possible, and hope 17 | for the best. Because if it comes to a sync conflict, most sync services are 18 | not daring to merge files, and create two copies on each computer instead. 19 | Merging the two task lists is left to the user. 20 | 21 | A better idea would've been to use ``git`` to synchronize the ``todo.txt`` 22 | file, which is at least able to resolve some basic conflicts. 23 | 24 | Why not file sync (Dropbox, git, ...) + vdir? 25 | --------------------------------------------- 26 | 27 | Since :doc:`vdirs ` are just a bunch of files, it is obvious to try *file 28 | synchronization* for synchronizing your data between multiple computers, such 29 | as: 30 | 31 | * `Syncthing `_ 32 | * `Dropbox `_ or one of the gajillion services like it 33 | * `unison `_ 34 | * Just ``git`` with a ``sshd``. 35 | 36 | The disadvantages of those solutions largely depend on the exact file sync 37 | program chosen: 38 | 39 | * Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of 40 | the files' contents. If a file has changed on both sides, Dropbox just copies 41 | both versions to both sides. 42 | 43 | This is a good idea if the user is directly interfacing with the file system 44 | and is able to resolve conflicts themselves. Here it might lead to 45 | erroneous behavior with e.g. ``khal``, since there are now two events with 46 | the same UID. 47 | 48 | This point doesn't apply to git: It has very good merging capabilities, 49 | better than what vdirsyncer currently has. 50 | 51 | * Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other 52 | hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with 53 | e.g. DAVx⁵_ or other apps bundled with smartphones. 54 | 55 | .. _DAVx⁵: https://www.davx5.com/ 56 | -------------------------------------------------------------------------------- /docs/partial-sync.rst: -------------------------------------------------------------------------------- 1 | .. _partial_sync_tutorial: 2 | 3 | =============================== 4 | Syncing with read-only storages 5 | =============================== 6 | 7 | If you want to subscribe to a public, read-only `WebCAL 8 | `_-calendar but neither your server nor 9 | your calendar apps support that (or support it insufficiently), vdirsyncer can 10 | be used to synchronize such a public calendar ``A`` with a new calendar ``B`` 11 | of your own and keep ``B`` updated. 12 | 13 | Step 1: Create the target calendar 14 | ================================== 15 | 16 | First you need to create the calendar you want to sync the WebCAL-calendar 17 | with. Most servers offer a web interface for this. You then need to note the 18 | CalDAV URL of your calendar. Note that this URL should directly point to the 19 | calendar you just created, which means you would have one such URL for each 20 | calendar you have. 21 | 22 | Step 2: Creating the config 23 | =========================== 24 | 25 | Paste this into your vdirsyncer config:: 26 | 27 | [pair holidays] 28 | a = "holidays_public" 29 | b = "holidays_private" 30 | collections = null 31 | 32 | [storage holidays_public] 33 | type = "http" 34 | # The URL to your iCalendar file. 35 | url = "..." 36 | 37 | [storage holidays_private] 38 | type = "caldav" 39 | # The direct URL to your calendar. 40 | url = "..." 41 | # The credentials to your CalDAV server 42 | username = "..." 43 | password = "..." 44 | 45 | Then run ``vdirsyncer discover holidays`` and ``vdirsyncer sync holidays``, and 46 | your previously created calendar should be filled with events. 47 | 48 | Step 3: The partial_sync parameter 49 | ================================== 50 | 51 | .. versionadded:: 0.14 52 | 53 | You may get into a situation where you want to hide or modify some events from 54 | your ``holidays`` calendar. If you try to do that at this point, you'll notice 55 | that vdirsyncer will revert any changes you've made after a few times of 56 | running ``sync``. This is because vdirsyncer wants to keep everything in sync, 57 | and it can't synchronize changes to the public holidays-calendar because it 58 | doesn't have the rights to do so. 59 | 60 | For such purposes you can set the ``partial_sync`` parameter to ``ignore``:: 61 | 62 | [pair holidays] 63 | a = "holidays_public" 64 | b = "holidays_private" 65 | collections = null 66 | partial_sync = ignore 67 | 68 | See :ref:`the config docs ` for more information. 69 | 70 | .. _nextCloud: https://nextcloud.com/ 71 | .. _Baikal: http://sabre.io/baikal/ 72 | .. _DAViCal: http://www.davical.org/ 73 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test suite for vdirsyncer. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | import hypothesis.strategies as st 8 | import urllib3.exceptions 9 | 10 | from vdirsyncer.vobject import normalize_item 11 | 12 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 13 | 14 | 15 | def blow_up(*a, **kw): 16 | raise AssertionError("Did not expect to be called.") 17 | 18 | 19 | def assert_item_equals(a, b): 20 | assert normalize_item(a) == normalize_item(b) 21 | 22 | 23 | VCARD_TEMPLATE = """BEGIN:VCARD 24 | VERSION:3.0 25 | FN:Cyrus Daboo 26 | N:Daboo;Cyrus;;; 27 | ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA 28 | EMAIL;TYPE=PREF:cyrus@example.com 29 | NICKNAME:me 30 | NOTE:Example VCard. 31 | ORG:Self Employed 32 | TEL;TYPE=VOICE:412 605 0499 33 | TEL;TYPE=FAX:412 605 0705 34 | URL;VALUE=URI:http://www.example.com 35 | X-SOMETHING:{r} 36 | UID:{uid} 37 | END:VCARD""" 38 | 39 | TASK_TEMPLATE = """BEGIN:VCALENDAR 40 | VERSION:2.0 41 | PRODID:-//dmfs.org//mimedir.icalendar//EN 42 | BEGIN:VTODO 43 | CREATED:20130721T142233Z 44 | DTSTAMP:20130730T074543Z 45 | LAST-MODIFIED;VALUE=DATE-TIME:20140122T151338Z 46 | SEQUENCE:2 47 | SUMMARY:Book: Kowlani - Tödlicher Staub 48 | X-SOMETHING:{r} 49 | UID:{uid} 50 | END:VTODO 51 | END:VCALENDAR""" 52 | 53 | 54 | BARE_EVENT_TEMPLATE = """BEGIN:VEVENT 55 | DTSTART:19970714T170000Z 56 | DTEND:19970715T035959Z 57 | SUMMARY:Bastille Day Party 58 | X-SOMETHING:{r} 59 | UID:{uid} 60 | END:VEVENT""" 61 | 62 | 63 | EVENT_TEMPLATE = ( 64 | """BEGIN:VCALENDAR 65 | VERSION:2.0 66 | PRODID:-//hacksw/handcal//NONSGML v1.0//EN 67 | """ 68 | + BARE_EVENT_TEMPLATE 69 | + """ 70 | END:VCALENDAR""" 71 | ) 72 | 73 | EVENT_WITH_TIMEZONE_TEMPLATE = ( 74 | """BEGIN:VCALENDAR 75 | BEGIN:VTIMEZONE 76 | TZID:Europe/Rome 77 | X-LIC-LOCATION:Europe/Rome 78 | BEGIN:DAYLIGHT 79 | TZOFFSETFROM:+0100 80 | TZOFFSETTO:+0200 81 | TZNAME:CEST 82 | DTSTART:19700329T020000 83 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3 84 | END:DAYLIGHT 85 | BEGIN:STANDARD 86 | TZOFFSETFROM:+0200 87 | TZOFFSETTO:+0100 88 | TZNAME:CET 89 | DTSTART:19701025T030000 90 | RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10 91 | END:STANDARD 92 | END:VTIMEZONE 93 | """ 94 | + BARE_EVENT_TEMPLATE 95 | + """ 96 | END:VCALENDAR""" 97 | ) 98 | 99 | 100 | SIMPLE_TEMPLATE = """BEGIN:FOO 101 | UID:{uid} 102 | X-SOMETHING:{r} 103 | HAHA:YES 104 | END:FOO""" 105 | 106 | printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs"))) 107 | 108 | uid_strategy = st.text( 109 | st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1 110 | ).filter(lambda x: x.strip() == x) 111 | -------------------------------------------------------------------------------- /docs/keyring.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Storing passwords 3 | ================= 4 | 5 | .. versionchanged:: 0.7.0 6 | 7 | Password configuration got completely overhauled. 8 | 9 | Vdirsyncer can fetch passwords from several sources other than the config file. 10 | 11 | Command 12 | ======= 13 | 14 | Say you have the following configuration:: 15 | 16 | [storage foo] 17 | type = "caldav" 18 | url = ... 19 | username = "foo" 20 | password = "bar" 21 | 22 | But it bugs you that the password is stored in cleartext in the config file. 23 | You can do this:: 24 | 25 | [storage foo] 26 | type = "caldav" 27 | url = ... 28 | username = "foo" 29 | password.fetch = ["command", "~/get-password.sh", "more", "args"] 30 | 31 | You can fetch the username as well:: 32 | 33 | [storage foo] 34 | type = "caldav" 35 | url = ... 36 | username.fetch = ["command", "~/get-username.sh"] 37 | password.fetch = ["command", "~/get-password.sh"] 38 | 39 | Or really any kind of parameter in a storage section. 40 | 41 | You can also pass the command as a string to be executed in a shell:: 42 | 43 | [storage foo] 44 | ... 45 | password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"] 46 | 47 | With pass_ for example, you might find yourself writing something like this in 48 | your configuration file:: 49 | 50 | password.fetch = ["command", "pass", "caldav"] 51 | 52 | .. _pass: https://www.passwordstore.org/ 53 | 54 | Accessing the system keyring 55 | ---------------------------- 56 | 57 | As shown above, you can use the ``command`` strategy to fetch your credentials 58 | from arbitrary sources. A very common usecase is to fetch your password from 59 | the system keyring. 60 | 61 | The keyring_ Python package contains a command-line utility for fetching 62 | passwords from the OS's password store. Installation:: 63 | 64 | pip install keyring 65 | 66 | Basic usage:: 67 | 68 | password.fetch = ["command", "keyring", "get", "example.com", "foouser"] 69 | 70 | .. _keyring: https://github.com/jaraco/keyring/ 71 | 72 | Password Prompt 73 | =============== 74 | 75 | You can also simply prompt for the password:: 76 | 77 | [storage foo] 78 | type = "caldav" 79 | username = "myusername" 80 | password.fetch = ["prompt", "Password for CalDAV"] 81 | 82 | Environment variable 83 | =============== 84 | 85 | To read the password from an environment variable:: 86 | 87 | [storage foo] 88 | type = "caldav" 89 | username = "myusername" 90 | password.fetch = ["command", "printenv", "DAV_PW"] 91 | 92 | This is especially handy if you use the same password multiple times 93 | (say, for a CardDAV and a CalDAV storage). 94 | On bash, you can read and export the password without printing:: 95 | 96 | read -s DAV_PW "DAV Password: " && export DAV_PW 97 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | vdirsyncer 3 | ========== 4 | 5 | .. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg 6 | :target: https://builds.sr.ht/~whynothugo/vdirsyncer 7 | :alt: CI status 8 | 9 | .. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main 10 | :target: https://codecov.io/github/pimutils/vdirsyncer?branch=main 11 | :alt: Codecov coverage report 12 | 13 | .. image:: https://readthedocs.org/projects/vdirsyncer/badge/ 14 | :target: https://vdirsyncer.rtfd.org/ 15 | :alt: documentation 16 | 17 | .. image:: https://img.shields.io/pypi/v/vdirsyncer.svg 18 | :target: https://pypi.python.org/pypi/vdirsyncer 19 | :alt: version on pypi 20 | 21 | .. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg 22 | :target: https://packagecloud.io/pimutils/vdirsyncer 23 | :alt: Debian packages 24 | 25 | .. image:: https://img.shields.io/pypi/l/vdirsyncer.svg 26 | :target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE 27 | :alt: licence: BSD 28 | 29 | - `Documentation `_ 30 | - `Source code `_ 31 | 32 | Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks 33 | between a variety of servers and the local filesystem. The most popular usecase 34 | is to synchronize a server with a local folder and use a set of other programs_ 35 | to change the local events and contacts. Vdirsyncer can then synchronize those 36 | changes back to the server. 37 | 38 | However, vdirsyncer is not limited to synchronizing between clients and 39 | servers. It can also be used to synchronize calendars and/or addressbooks 40 | between two servers directly. 41 | 42 | It aims to be for calendars and contacts what `OfflineIMAP 43 | `_ is for emails. 44 | 45 | .. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/ 46 | 47 | Links of interest 48 | ================= 49 | 50 | * Check out `the tutorial 51 | `_ for basic 52 | usage. 53 | 54 | * `Contact information 55 | `_ 56 | 57 | * `How to contribute to this project 58 | `_ 59 | 60 | * `Donations `_ 61 | 62 | Dockerized 63 | ================= 64 | If you want to run `Vdirsyncer `_ in a 65 | Docker environment, you can check out the following GitHub Repository: 66 | 67 | * `Vdirsyncer DOCKERIZED `_ 68 | 69 | Note: This is an unofficial Docker build, it is maintained by `Bleala `_. 70 | 71 | License 72 | ======= 73 | 74 | Licensed under the 3-clause BSD license, see ``LICENSE``. 75 | -------------------------------------------------------------------------------- /tests/unit/test_repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import aiostream 4 | import pytest 5 | from hypothesis import HealthCheck 6 | from hypothesis import given 7 | from hypothesis import settings 8 | 9 | from tests import uid_strategy 10 | from vdirsyncer.repair import IrreparableItem 11 | from vdirsyncer.repair import repair_item 12 | from vdirsyncer.repair import repair_storage 13 | from vdirsyncer.storage.memory import MemoryStorage 14 | from vdirsyncer.utils import href_safe 15 | from vdirsyncer.vobject import Item 16 | 17 | 18 | @given(uid=uid_strategy) 19 | # Using the random module for UIDs: 20 | @settings(suppress_health_check=list(HealthCheck)) 21 | @pytest.mark.asyncio 22 | async def test_repair_uids(uid): 23 | s = MemoryStorage() 24 | s.items = { 25 | "one": ("asdf", Item(f"BEGIN:VCARD\nFN:Hans\nUID:{uid}\nEND:VCARD")), 26 | "two": ("asdf", Item(f"BEGIN:VCARD\nFN:Peppi\nUID:{uid}\nEND:VCARD")), 27 | } 28 | 29 | uid1, uid2 = [(await s.get(href))[0].uid async for href, etag in s.list()] 30 | assert uid1 == uid2 31 | 32 | await repair_storage(s, repair_unsafe_uid=False) 33 | 34 | uid1, uid2 = [ 35 | (await s.get(href))[0].uid 36 | for href, etag in await aiostream.stream.list(s.list()) 37 | ] 38 | assert uid1 != uid2 39 | 40 | 41 | @given(uid=uid_strategy.filter(lambda x: not href_safe(x))) 42 | # Using the random module for UIDs: 43 | @settings(suppress_health_check=list(HealthCheck)) 44 | @pytest.mark.asyncio 45 | async def test_repair_unsafe_uids(uid): 46 | s = MemoryStorage() 47 | item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") 48 | href, _etag = await s.upload(item) 49 | assert (await s.get(href))[0].uid == uid 50 | assert not href_safe(uid) 51 | 52 | await repair_storage(s, repair_unsafe_uid=True) 53 | 54 | new_href = (await aiostream.stream.list(s.list()))[0][0] 55 | assert href_safe(new_href) 56 | newuid = (await s.get(new_href))[0].uid 57 | assert href_safe(newuid) 58 | 59 | 60 | @pytest.mark.parametrize( 61 | ("uid", "href"), [("b@dh0mbr3", "perfectly-fine"), ("perfectly-fine", "b@dh0mbr3")] 62 | ) 63 | def test_repair_unsafe_href(uid, href): 64 | item = Item(f"BEGIN:VCARD\nUID:{uid}\nEND:VCARD") 65 | new_item = repair_item(href, item, set(), True) 66 | assert new_item.raw != item.raw 67 | assert new_item.uid != item.uid 68 | assert href_safe(new_item.uid) 69 | 70 | 71 | def test_repair_do_nothing(): 72 | item = Item("BEGIN:VCARD\nUID:justfine\nEND:VCARD") 73 | assert repair_item("fine", item, set(), True) is item 74 | assert repair_item("@@@@/fine", item, set(), True) is item 75 | 76 | 77 | @pytest.mark.parametrize( 78 | "raw", ["AYYY", "", "@@@@", "BEGIN:VCARD", "BEGIN:FOO\nEND:FOO"] 79 | ) 80 | def test_repair_irreparable(raw): 81 | with pytest.raises(IrreparableItem): 82 | repair_item("fine", Item(raw), set(), True) 83 | -------------------------------------------------------------------------------- /docs/tutorials/claws-mail.rst: -------------------------------------------------------------------------------- 1 | .. _claws-mail-tutorial: 2 | 3 | Vdirsyncer with Claws Mail 4 | ========================== 5 | 6 | First of all, Claws-Mail only supports **read-only** functions for vCards. It 7 | can only read contacts, but there's no editor. 8 | 9 | Preparation 10 | ----------- 11 | 12 | We need to install vdirsyncer, for that look :doc:`here `. Then 13 | we need to create some folders:: 14 | 15 | mkdir ~/.vdirsyncer 16 | mkdir ~/.contacts 17 | 18 | Configuration 19 | ------------- 20 | 21 | Now we create the configuration for vdirsyncer. Open 22 | ``~/.vdirsyncer/config`` with a text editor. The config should look like 23 | this: 24 | 25 | .. code:: ini 26 | 27 | [general] 28 | status_path = "~/.vdirsyncer/status/" 29 | 30 | [storage local] 31 | type = "singlefile" 32 | path = "~/.contacts/%s.vcf" 33 | 34 | [storage online] 35 | type = "carddav" 36 | url = "CARDDAV_LINK" 37 | username = "USERNAME" 38 | password = "PASSWORD" 39 | read_only = true 40 | 41 | [pair contacts] 42 | a = "local" 43 | b = "online" 44 | collections = ["from a", "from b"] 45 | conflict_resolution = "b wins" 46 | 47 | - In the general section, we define the status folder path, for discovered 48 | collections and generally stuff that needs to persist between syncs. 49 | - In the local section we define that all contacts should be sync in a single 50 | file and the path for the contacts. 51 | - In the online section you must change the url, username and password to your 52 | setup. We also set the storage to read-only such that no changes get 53 | synchronized back. Claws-Mail should not be able to do any changes anyway, 54 | but this is one extra safety step in case files get corrupted or vdirsyncer 55 | behaves erratically. You can leave that part out if you want to be able to 56 | edit those files locally. 57 | - In the last section we configure that online contacts win in a conflict 58 | situation. Configure this part however you like. A correct value depends on 59 | which side is most likely to be up-to-date. 60 | 61 | Sync 62 | ---- 63 | 64 | Now we discover and sync our contacts:: 65 | 66 | vdirsyncer discover contacts 67 | vdirsyncer sync contacts 68 | 69 | Claws Mail 70 | ---------- 71 | 72 | Open Claws-Mail. Go to **Tools** => **Addressbook**. 73 | 74 | Click on **Addressbook** => **New vCard**. Choose a name for the book. 75 | 76 | Then search for the for the vCard in the folder **~/.contacts/**. Click 77 | ok, and you we will see your contacts. 78 | 79 | .. note:: 80 | 81 | Claws-Mail shows only contacts that have a mail address. 82 | 83 | Crontab 84 | ------- 85 | 86 | On the end we create a crontab, so that vdirsyncer syncs automatically 87 | every 30 minutes our contacts:: 88 | 89 | crontab -e 90 | 91 | On the end of that file enter this line:: 92 | 93 | */30 * * * * /usr/local/bin/vdirsyncer sync > /dev/null 94 | 95 | And you're done! 96 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Vdirsyncer synchronizes calendars and contacts. 2 | # 3 | # Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for 4 | # how to package vdirsyncer. 5 | 6 | [build-system] 7 | requires = ["setuptools>=64", "setuptools_scm>=8"] 8 | build-backend = "setuptools.build_meta" 9 | 10 | [project] 11 | name = "vdirsyncer" 12 | authors = [ 13 | {name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"}, 14 | ] 15 | description = "Synchronize calendars and contacts" 16 | readme = "README.rst" 17 | requires-python = ">=3.9" 18 | keywords = ["todo", "task", "icalendar", "cli"] 19 | license = "BSD-3-Clause" 20 | license-files = ["LICENSE"] 21 | classifiers = [ 22 | "Development Status :: 4 - Beta", 23 | "Environment :: Console", 24 | "Operating System :: POSIX", 25 | "Programming Language :: Python :: 3", 26 | "Programming Language :: Python :: 3.10", 27 | "Programming Language :: Python :: 3.11", 28 | "Programming Language :: Python :: 3.12", 29 | "Programming Language :: Python :: 3.13", 30 | "Programming Language :: Python :: 3.9", 31 | "Topic :: Internet", 32 | "Topic :: Office/Business :: Scheduling", 33 | "Topic :: Utilities", 34 | ] 35 | dependencies = [ 36 | "click>=5.0,<9.0", 37 | "click-log>=0.3.0,<0.5.0", 38 | "requests>=2.20.0", 39 | "aiohttp>=3.8.2,<4.0.0", 40 | "aiostream>=0.4.3,<0.8.0", 41 | "tenacity>=9.0.0", 42 | ] 43 | dynamic = ["version"] 44 | 45 | [project.optional-dependencies] 46 | google = ["aiohttp-oauthlib"] 47 | test = [ 48 | "hypothesis>=6.72.0,<7.0.0", 49 | "pytest", 50 | "pytest-cov", 51 | "pytest-httpserver", 52 | "trustme", 53 | "pytest-asyncio", 54 | "aioresponses", 55 | ] 56 | docs = [ 57 | "sphinx!=1.4.7", 58 | "sphinx_rtd_theme", 59 | "setuptools_scm", 60 | ] 61 | check = [ 62 | "mypy", 63 | "ruff", 64 | "types-docutils", 65 | "types-requests", 66 | "types-setuptools", 67 | ] 68 | 69 | [project.scripts] 70 | vdirsyncer = "vdirsyncer.cli:app" 71 | 72 | [tool.ruff.lint] 73 | extend-select = [ 74 | "B0", 75 | "C4", 76 | "E", 77 | "I", 78 | "RSE", 79 | "SIM", 80 | "TID", 81 | "UP", 82 | "W", 83 | ] 84 | 85 | [tool.ruff.lint.isort] 86 | force-single-line = true 87 | required-imports = ["from __future__ import annotations"] 88 | 89 | [tool.pytest.ini_options] 90 | addopts = """ 91 | --tb=short 92 | --cov-config .coveragerc 93 | --cov=vdirsyncer 94 | --cov-report=term-missing:skip-covered 95 | --no-cov-on-fail 96 | --color=yes 97 | """ 98 | # filterwarnings=error 99 | asyncio_default_fixture_loop_scope = "function" 100 | 101 | [tool.mypy] 102 | ignore_missing_imports = true 103 | 104 | [tool.coverage.report] 105 | exclude_lines = [ 106 | "if TYPE_CHECKING:", 107 | ] 108 | 109 | [tool.setuptools.packages.find] 110 | include = ["vdirsyncer*"] 111 | 112 | [tool.setuptools_scm] 113 | write_to = "vdirsyncer/version.py" 114 | version_scheme = "no-guess-dev" 115 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import os 5 | 6 | from pkg_resources import get_distribution 7 | 8 | extensions = ["sphinx.ext.autodoc"] 9 | 10 | templates_path = ["_templates"] 11 | 12 | source_suffix = ".rst" 13 | master_doc = "index" 14 | 15 | project = "vdirsyncer" 16 | copyright = "2014-{}, Markus Unterwaditzer & contributors".format( 17 | datetime.date.today().strftime("%Y") 18 | ) 19 | 20 | release = get_distribution("vdirsyncer").version 21 | version = ".".join(release.split(".")[:2]) # The short X.Y version. 22 | 23 | rst_epilog = f".. |vdirsyncer_version| replace:: {release}" 24 | 25 | exclude_patterns = ["_build"] 26 | 27 | pygments_style = "sphinx" 28 | 29 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 30 | 31 | try: 32 | import sphinx_rtd_theme 33 | 34 | html_theme = "sphinx_rtd_theme" 35 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 36 | except ImportError: 37 | html_theme = "default" 38 | if not on_rtd: 39 | print("-" * 74) 40 | print("Warning: sphinx-rtd-theme not installed, building with default theme.") 41 | print("-" * 74) 42 | 43 | html_static_path = ["_static"] 44 | htmlhelp_basename = "vdirsyncerdoc" 45 | 46 | latex_elements = {} 47 | latex_documents = [ 48 | ( 49 | "index", 50 | "vdirsyncer.tex", 51 | "vdirsyncer Documentation", 52 | "Markus Unterwaditzer", 53 | "manual", 54 | ), 55 | ] 56 | 57 | man_pages = [ 58 | ("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1) 59 | ] 60 | 61 | texinfo_documents = [ 62 | ( 63 | "index", 64 | "vdirsyncer", 65 | "vdirsyncer Documentation", 66 | "Markus Unterwaditzer", 67 | "vdirsyncer", 68 | "Synchronize calendars and contacts.", 69 | "Miscellaneous", 70 | ), 71 | ] 72 | 73 | 74 | def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()): 75 | options = options or {} 76 | try: 77 | issue_num = int(text) 78 | if issue_num <= 0: 79 | raise ValueError 80 | except ValueError: 81 | msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno) 82 | prb = inliner.problematic(rawtext, rawtext, msg) 83 | return [prb], [msg] 84 | 85 | from docutils import nodes 86 | 87 | PROJECT_HOME = "https://github.com/pimutils/vdirsyncer" 88 | link = "{}/{}/{}".format( 89 | PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num 90 | ) 91 | linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num) 92 | node = nodes.reference(rawtext, linktext, refuri=link, **options) 93 | return [node], [] 94 | 95 | 96 | def setup(app): 97 | from sphinx.domains.python import PyObject 98 | 99 | app.add_object_type( 100 | "storage", 101 | "storage", 102 | "pair: %s; storage", 103 | doc_field_types=PyObject.doc_field_types, 104 | ) 105 | app.add_role("gh", github_issue_role) 106 | app.add_role("ghpr", github_issue_role) 107 | -------------------------------------------------------------------------------- /tests/storage/test_http_with_singlefile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import aiostream 4 | import pytest 5 | from aioresponses import CallbackResult 6 | from aioresponses import aioresponses 7 | 8 | import vdirsyncer.storage.http 9 | from vdirsyncer.storage.base import Storage 10 | from vdirsyncer.storage.singlefile import SingleFileStorage 11 | 12 | from . import StorageTests 13 | 14 | 15 | class CombinedStorage(Storage): 16 | """A subclass of HttpStorage to make testing easier. It supports writes via 17 | SingleFileStorage.""" 18 | 19 | _repr_attributes = ("url", "path") 20 | storage_name = "http_and_singlefile" 21 | 22 | def __init__(self, url, path, *, connector, **kwargs): 23 | if kwargs.get("collection") is not None: 24 | raise ValueError 25 | 26 | super().__init__(**kwargs) 27 | self.url = url 28 | self.path = path 29 | self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector) 30 | self._reader._ignore_uids = False 31 | self._writer = SingleFileStorage(path=path) 32 | 33 | async def list(self, *a, **kw): 34 | async for item in self._reader.list(*a, **kw): 35 | yield item 36 | 37 | async def get(self, *a, **kw): 38 | await aiostream.stream.list(self.list()) 39 | return await self._reader.get(*a, **kw) 40 | 41 | async def upload(self, *a, **kw): 42 | return await self._writer.upload(*a, **kw) 43 | 44 | async def update(self, *a, **kw): 45 | return await self._writer.update(*a, **kw) 46 | 47 | async def delete(self, *a, **kw): 48 | return await self._writer.delete(*a, **kw) 49 | 50 | 51 | class TestHttpStorage(StorageTests): 52 | storage_class = CombinedStorage 53 | supports_collections = False 54 | supports_metadata = False 55 | 56 | @pytest.fixture(autouse=True) 57 | def setup_tmpdir(self, tmpdir, monkeypatch): 58 | self.tmpfile = str(tmpdir.ensure("collection.txt")) 59 | 60 | def callback(url, headers, **kwargs): 61 | """Read our tmpfile at request time. 62 | 63 | We can't just read this during test setup since the file get written to 64 | during test execution. 65 | 66 | It might make sense to actually run a server serving the local file. 67 | """ 68 | assert headers["User-Agent"].startswith("vdirsyncer/") 69 | 70 | with open(self.tmpfile) as f: 71 | body = f.read() 72 | 73 | return CallbackResult( 74 | status=200, 75 | body=body, 76 | headers={"Content-Type": "text/calendar; charset=utf-8"}, 77 | ) 78 | 79 | with aioresponses() as m: 80 | m.get("http://localhost:123/collection.txt", callback=callback, repeat=True) 81 | yield 82 | 83 | @pytest.fixture 84 | def get_storage_args(self, aio_connector): 85 | async def inner(collection=None): 86 | assert collection is None 87 | return { 88 | "url": "http://localhost:123/collection.txt", 89 | "path": self.tmpfile, 90 | "connector": aio_connector, 91 | } 92 | 93 | return inner 94 | -------------------------------------------------------------------------------- /vdirsyncer/cli/fetchparams.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | 5 | import click 6 | 7 | from vdirsyncer import exceptions 8 | from vdirsyncer.utils import expand_path 9 | from vdirsyncer.utils import synchronized 10 | 11 | from . import AppContext 12 | 13 | SUFFIX = ".fetch" 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def expand_fetch_params(config): 19 | config = dict(config) 20 | for key in list(config): 21 | if not key.endswith(SUFFIX): 22 | continue 23 | 24 | newkey = key[: -len(SUFFIX)] 25 | if newkey in config: 26 | raise ValueError(f"Can't set {key} and {newkey}.") 27 | config[newkey] = _fetch_value(config[key], key) 28 | del config[key] 29 | 30 | return config 31 | 32 | 33 | @synchronized() 34 | def _fetch_value(opts, key): 35 | if not isinstance(opts, list): 36 | raise ValueError(f"Invalid value for {key}: Expected a list, found {opts!r}.") 37 | if not opts: 38 | raise ValueError("Expected list of length > 0.") 39 | 40 | try: 41 | ctx = click.get_current_context().find_object(AppContext) 42 | if ctx is None: 43 | raise RuntimeError 44 | password_cache = ctx.fetched_params 45 | except RuntimeError: 46 | password_cache = {} 47 | 48 | cache_key = tuple(opts) 49 | if cache_key in password_cache: 50 | rv = password_cache[cache_key] 51 | logger.debug(f"Found cached value for {opts!r}.") 52 | if isinstance(rv, BaseException): 53 | raise rv 54 | return rv 55 | 56 | strategy = opts[0] 57 | try: 58 | strategy_fn = STRATEGIES[strategy] 59 | except KeyError: 60 | raise exceptions.UserError(f"Unknown strategy: {strategy}") 61 | 62 | logger.debug(f"Fetching value for {key} with {strategy} strategy.") 63 | try: 64 | rv = strategy_fn(*opts[1:]) 65 | except (click.Abort, KeyboardInterrupt) as e: 66 | password_cache[cache_key] = e 67 | raise 68 | else: 69 | if not rv: 70 | raise exceptions.UserError( 71 | f"Empty value for {key}, this most likely indicates an error." 72 | ) 73 | password_cache[cache_key] = rv 74 | return rv 75 | 76 | 77 | def _strategy_command(*command: str, shell: bool = False): 78 | """Execute a user-specified command and return its output.""" 79 | import subprocess 80 | 81 | # Normalize path of every path member. 82 | # If there is no path specified then nothing will happen. 83 | # Makes this a list to avoid it being exhausted on the first iteration. 84 | expanded_command = list(map(expand_path, command)) 85 | 86 | try: 87 | stdout = subprocess.check_output(expanded_command, text=True, shell=shell) 88 | return stdout.strip("\n") 89 | except OSError as e: 90 | cmd = " ".join(expanded_command) 91 | raise exceptions.UserError(f"Failed to execute command: {cmd}\n{e!s}") 92 | 93 | 94 | def _strategy_shell(*command: str): 95 | """Execute a user-specified command string in a shell and return its output.""" 96 | return _strategy_command(*command, shell=True) 97 | 98 | 99 | def _strategy_prompt(text): 100 | return click.prompt(text, hide_input=True) 101 | 102 | 103 | STRATEGIES = { 104 | "command": _strategy_command, 105 | "shell": _strategy_shell, 106 | "prompt": _strategy_prompt, 107 | } 108 | -------------------------------------------------------------------------------- /tests/storage/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import contextlib 5 | import subprocess 6 | import time 7 | import uuid 8 | 9 | import aiostream 10 | import pytest 11 | import pytest_asyncio 12 | import requests 13 | 14 | 15 | def wait_for_container(url): 16 | """Wait for a container to initialise. 17 | 18 | Polls a URL every 100ms until the server responds. 19 | """ 20 | # give the server 5 seconds to settle 21 | for _ in range(50): 22 | print(_) 23 | 24 | try: 25 | response = requests.get(url) 26 | response.raise_for_status() 27 | except requests.ConnectionError: 28 | pass 29 | else: 30 | return 31 | 32 | time.sleep(0.1) 33 | 34 | pytest.exit( 35 | "Server did not initialise in 5 seconds.\n" 36 | "WARNING: There may be a stale docker container still running." 37 | ) 38 | 39 | 40 | @contextlib.contextmanager 41 | def dockerised_server(name, container_port, exposed_port): 42 | """Run a dockerised DAV server as a contenxt manager.""" 43 | container_id = None 44 | url = f"http://127.0.0.1:{exposed_port}/" 45 | 46 | try: 47 | # Hint: This will block while the pull happends, and only return once 48 | # the container has actually started. 49 | output = subprocess.check_output( 50 | [ 51 | "docker", 52 | "run", 53 | "--rm", 54 | "--detach", 55 | "--publish", 56 | f"{exposed_port}:{container_port}", 57 | f"whynothugo/vdirsyncer-devkit-{name}", 58 | ] 59 | ) 60 | 61 | container_id = output.decode().strip() 62 | wait_for_container(url) 63 | 64 | yield url 65 | finally: 66 | if container_id: 67 | subprocess.check_output(["docker", "kill", container_id]) 68 | 69 | 70 | @pytest.fixture(scope="session") 71 | def baikal_server(): 72 | with dockerised_server("baikal", "80", "8002"): 73 | yield 74 | 75 | 76 | @pytest.fixture(scope="session") 77 | def radicale_server(): 78 | with dockerised_server("radicale", "8001", "8001"): 79 | yield 80 | 81 | 82 | @pytest.fixture(scope="session") 83 | def xandikos_server(): 84 | with dockerised_server("xandikos", "8000", "8000"): 85 | yield 86 | 87 | 88 | @pytest_asyncio.fixture 89 | async def slow_create_collection(request, aio_connector): 90 | # We need to properly clean up because otherwise we might run into 91 | # storage limits. 92 | to_delete = [] 93 | 94 | async def inner(cls: type, args: dict, collection_name: str) -> dict: 95 | """Create a collection 96 | 97 | Returns args necessary to create a Storage instance pointing to it. 98 | """ 99 | assert collection_name.startswith("test") 100 | 101 | # Make each name unique 102 | collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}" 103 | 104 | # Create the collection: 105 | args = await cls.create_collection(collection_name, **args) 106 | collection = cls(**args) 107 | 108 | # Keep collection in a list to be deleted once tests end: 109 | to_delete.append(collection) 110 | 111 | assert not await aiostream.stream.list(collection.list()) 112 | return args 113 | 114 | yield inner 115 | 116 | await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete)) 117 | -------------------------------------------------------------------------------- /docs/packaging.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | Packaging guidelines 3 | ==================== 4 | 5 | Thank you very much for packaging vdirsyncer! The following guidelines should 6 | help you to avoid some common pitfalls. 7 | 8 | If you find yourself needing to patch anything, or going in a different direction, 9 | please open an issue so we can also address in a way that works for everyone. Otherwise 10 | we get bug reports for code or scenarios that don't exist in upstream vdirsycner. 11 | 12 | Obtaining the source code 13 | ========================= 14 | 15 | The main distribution channel is `PyPI 16 | `_, and source tarballs can be 17 | obtained there. We mirror the same package tarball and wheel as GitHub 18 | releases. Please do not confuse these with the auto-generated GitHub "Source 19 | Code" tarball. Those are missing some important metadata and your build will fail. 20 | 21 | We give each release a tag in the git repo. If you want to get notified of new 22 | releases, `GitHub's feed 23 | `_ is a good way. 24 | 25 | Tags will be signed by the maintainer who is doing the release (starting with 26 | 0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the 27 | tag itself is signed. 28 | 29 | Dependency versions 30 | =================== 31 | 32 | As with most Python packages, ``setup.py`` denotes the dependencies of 33 | vdirsyncer. It also contains lower-bound versions of each dependency. Older 34 | versions will be rejected by the testsuite. 35 | 36 | Testing 37 | ======= 38 | 39 | Everything testing-related goes through the ``Makefile`` in the root of the 40 | repository or PyPI package. Trying to e.g. run ``pytest`` directly will 41 | require a lot of environment variables to be set (for configuration) and you 42 | probably don't want to deal with that. 43 | 44 | You can install the all development dependencies with:: 45 | 46 | make install-dev 47 | 48 | You probably don't want this since it will use pip to download the 49 | dependencies. Alternatively test dependencies are listed as ``test`` optional 50 | dependencies in ``pyproject.toml``, again with lower-bound version 51 | requirements. 52 | 53 | You also have to have vdirsyncer fully installed at this point. Merely 54 | ``cd``-ing into the tarball will not be sufficient. 55 | 56 | Running the tests happens with:: 57 | 58 | pytest 59 | 60 | Hypothesis will randomly generate test input. If you care about deterministic 61 | tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``:: 62 | 63 | make DETERMINISTIC_TESTS=true test 64 | 65 | There are a lot of additional variables that allow you to test vdirsyncer 66 | against a particular server. Those variables are not "stable" and may change 67 | drastically between minor versions. Just don't use them, you are unlikely to 68 | find bugs that vdirsyncer's CI hasn't found. 69 | 70 | Documentation 71 | ============= 72 | 73 | Using Sphinx_ you can generate the documentation you're reading right now in a 74 | variety of formats, such as HTML, PDF, or even as a manpage. That said, I only 75 | take care of the HTML docs' formatting. 76 | 77 | You can find a list of dependencies in ``pyproject.toml``, in the 78 | ``project.optional-dependencies`` section as ``docs``. Again, you can install 79 | those using pip with:: 80 | 81 | pip install '.[docs]' 82 | 83 | Then change into the ``docs/`` directory and build whatever format you want 84 | using the ``Makefile`` in there (run ``make`` for the formats you can build). 85 | 86 | .. _Sphinx: www.sphinx-doc.org/ 87 | 88 | Contrib files 89 | ============= 90 | 91 | Reference ``systemd.service`` and ``systemd.timer`` unit files are provided. It 92 | is recommended to install this if your distribution is systemd-based. 93 | -------------------------------------------------------------------------------- /tests/unit/cli/test_fetchparams.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from contextlib import contextmanager 4 | from unittest.mock import patch 5 | 6 | import hypothesis.strategies as st 7 | import pytest 8 | from hypothesis import given 9 | 10 | from vdirsyncer import exceptions 11 | from vdirsyncer.cli.fetchparams import STRATEGIES 12 | from vdirsyncer.cli.fetchparams import expand_fetch_params 13 | 14 | 15 | @pytest.fixture 16 | def mystrategy(monkeypatch): 17 | def strategy(x): 18 | calls.append(x) 19 | return x 20 | 21 | calls = [] 22 | monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) 23 | return calls 24 | 25 | 26 | @contextmanager 27 | def dummy_strategy(): 28 | def strategy(x): 29 | calls.append(x) 30 | return x 31 | 32 | calls = [] 33 | with patch.dict(STRATEGIES, {"mystrategy": strategy}): 34 | yield calls 35 | 36 | 37 | @pytest.fixture 38 | def value_cache(monkeypatch): 39 | _cache = {} 40 | 41 | class FakeContext: 42 | fetched_params = _cache 43 | 44 | def find_object(self, _): 45 | return self 46 | 47 | def get_context(*a, **kw): 48 | return FakeContext() 49 | 50 | monkeypatch.setattr("click.get_current_context", get_context) 51 | return _cache 52 | 53 | 54 | def test_key_conflict(monkeypatch, mystrategy): 55 | with pytest.raises(ValueError) as excinfo: 56 | expand_fetch_params({"foo": "bar", "foo.fetch": ["mystrategy", "baz"]}) 57 | 58 | assert "Can't set foo.fetch and foo." in str(excinfo.value) 59 | 60 | 61 | @given(s=st.text(), t=st.text(min_size=1)) 62 | def test_fuzzing(s, t): 63 | with dummy_strategy(): 64 | config = expand_fetch_params({f"{s}.fetch": ["mystrategy", t]}) 65 | 66 | assert config[s] == t 67 | 68 | 69 | @pytest.mark.parametrize("value", [[], "lol", 42]) 70 | def test_invalid_fetch_value(mystrategy, value): 71 | with pytest.raises(ValueError) as excinfo: 72 | expand_fetch_params({"foo.fetch": value}) 73 | 74 | assert "Expected a list" in str( 75 | excinfo.value 76 | ) or "Expected list of length > 0" in str(excinfo.value) 77 | 78 | 79 | def test_unknown_strategy(): 80 | with pytest.raises(exceptions.UserError) as excinfo: 81 | expand_fetch_params({"foo.fetch": ["unreal", "asdf"]}) 82 | 83 | assert "Unknown strategy" in str(excinfo.value) 84 | 85 | 86 | def test_caching(monkeypatch, mystrategy, value_cache): 87 | orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]} 88 | 89 | rv = expand_fetch_params(orig_cfg) 90 | assert rv["foo"] == "asdf" 91 | assert mystrategy == ["asdf"] 92 | assert len(value_cache) == 1 93 | 94 | rv = expand_fetch_params(orig_cfg) 95 | assert rv["foo"] == "asdf" 96 | assert mystrategy == ["asdf"] 97 | assert len(value_cache) == 1 98 | 99 | value_cache.clear() 100 | rv = expand_fetch_params(orig_cfg) 101 | assert rv["foo"] == "asdf" 102 | assert mystrategy == ["asdf"] * 2 103 | assert len(value_cache) == 1 104 | 105 | 106 | def test_failed_strategy(monkeypatch, value_cache): 107 | calls = [] 108 | 109 | def strategy(x): 110 | calls.append(x) 111 | raise KeyboardInterrupt 112 | 113 | monkeypatch.setitem(STRATEGIES, "mystrategy", strategy) 114 | 115 | orig_cfg = {"foo.fetch": ["mystrategy", "asdf"]} 116 | 117 | for _ in range(2): 118 | with pytest.raises(KeyboardInterrupt): 119 | expand_fetch_params(orig_cfg) 120 | 121 | assert len(value_cache) == 1 122 | assert len(calls) == 1 123 | 124 | 125 | def test_empty_value(monkeypatch, mystrategy): 126 | with pytest.raises(exceptions.UserError) as excinfo: 127 | expand_fetch_params({"foo.fetch": ["mystrategy", ""]}) 128 | 129 | assert "Empty value for foo.fetch, this most likely indicates an error" in str( 130 | excinfo.value 131 | ) 132 | -------------------------------------------------------------------------------- /tests/system/utils/test_main.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | 5 | import aiohttp 6 | import click_log 7 | import pytest 8 | from cryptography import x509 9 | from cryptography.hazmat.primitives import hashes 10 | 11 | from vdirsyncer import http 12 | from vdirsyncer import utils 13 | 14 | 15 | @pytest.fixture(autouse=True) 16 | def no_debug_output(request): 17 | logger = click_log.basic_config("vdirsyncer") 18 | logger.setLevel(logging.WARNING) 19 | 20 | 21 | def test_get_storage_init_args(): 22 | from vdirsyncer.storage.memory import MemoryStorage 23 | 24 | all, required = utils.get_storage_init_args(MemoryStorage) 25 | assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"} 26 | assert not required 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_request_ssl(): 31 | async with aiohttp.ClientSession() as session: 32 | with pytest.raises( 33 | aiohttp.ClientConnectorCertificateError, 34 | match="certificate verify failed", 35 | ): 36 | await http.request( 37 | "GET", 38 | "https://self-signed.badssl.com/", 39 | session=session, 40 | ) 41 | 42 | 43 | @pytest.mark.xfail(reason="feature not implemented") 44 | @pytest.mark.asyncio 45 | async def test_request_unsafe_ssl(): 46 | async with aiohttp.ClientSession() as session: 47 | await http.request( 48 | "GET", 49 | "https://self-signed.badssl.com/", 50 | verify=False, 51 | session=session, 52 | ) 53 | 54 | 55 | def fingerprint_of_cert(cert, hash=hashes.SHA256) -> str: 56 | return x509.load_pem_x509_certificate(cert.bytes()).fingerprint(hash()).hex() 57 | 58 | 59 | @pytest.mark.parametrize("hash_algorithm", [hashes.SHA256]) 60 | @pytest.mark.asyncio 61 | async def test_request_ssl_leaf_fingerprint( 62 | httpserver, 63 | localhost_cert, 64 | hash_algorithm, 65 | aio_session, 66 | ): 67 | fingerprint = fingerprint_of_cert(localhost_cert.cert_chain_pems[0], hash_algorithm) 68 | bogus = "".join(reversed(fingerprint)) 69 | 70 | # We have to serve something: 71 | httpserver.expect_request("/").respond_with_data("OK") 72 | url = f"https://127.0.0.1:{httpserver.port}/" 73 | 74 | ssl = http.prepare_verify(None, fingerprint) 75 | await http.request("GET", url, ssl=ssl, session=aio_session) 76 | 77 | ssl = http.prepare_verify(None, bogus) 78 | with pytest.raises(aiohttp.ServerFingerprintMismatch): 79 | await http.request("GET", url, ssl=ssl, session=aio_session) 80 | 81 | 82 | @pytest.mark.xfail(reason="Not implemented") 83 | @pytest.mark.parametrize("hash_algorithm", [hashes.SHA256]) 84 | @pytest.mark.asyncio 85 | async def test_request_ssl_ca_fingerprints(httpserver, ca, hash_algorithm, aio_session): 86 | fingerprint = fingerprint_of_cert(ca.cert_pem) 87 | bogus = "".join(reversed(fingerprint)) 88 | 89 | # We have to serve something: 90 | httpserver.expect_request("/").respond_with_data("OK") 91 | url = f"https://127.0.0.1:{httpserver.port}/" 92 | 93 | await http.request( 94 | "GET", 95 | url, 96 | verify=False, 97 | verify_fingerprint=fingerprint, 98 | session=aio_session, 99 | ) 100 | 101 | with pytest.raises(aiohttp.ServerFingerprintMismatch): 102 | http.request( 103 | "GET", 104 | url, 105 | verify=False, 106 | verify_fingerprint=bogus, 107 | session=aio_session, 108 | ) 109 | 110 | 111 | def test_open_graphical_browser(monkeypatch): 112 | import webbrowser 113 | 114 | # Just assert that this internal attribute still exists and behaves the way 115 | # expected 116 | assert webbrowser._tryorder is None 117 | 118 | monkeypatch.setattr("webbrowser._tryorder", []) 119 | 120 | with pytest.raises(RuntimeError) as excinfo: 121 | utils.open_graphical_browser("http://example.com") 122 | 123 | assert "No graphical browser found" in str(excinfo.value) 124 | -------------------------------------------------------------------------------- /vdirsyncer/storage/http.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import subprocess 5 | import urllib.parse as urlparse 6 | 7 | import aiohttp 8 | 9 | from vdirsyncer import exceptions 10 | from vdirsyncer.http import USERAGENT 11 | from vdirsyncer.http import prepare_auth 12 | from vdirsyncer.http import prepare_client_cert 13 | from vdirsyncer.http import prepare_verify 14 | from vdirsyncer.http import request 15 | from vdirsyncer.vobject import Item 16 | from vdirsyncer.vobject import split_collection 17 | 18 | from .base import Storage 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | class HttpStorage(Storage): 24 | storage_name = "http" 25 | read_only = True 26 | _repr_attributes = ("username", "url") 27 | _items = None 28 | 29 | # Required for tests. 30 | _ignore_uids = True 31 | 32 | def __init__( 33 | self, 34 | url, 35 | username="", 36 | password="", 37 | verify=None, 38 | auth=None, 39 | useragent=USERAGENT, 40 | verify_fingerprint=None, 41 | auth_cert=None, 42 | filter_hook=None, 43 | *, 44 | connector, 45 | **kwargs, 46 | ) -> None: 47 | super().__init__(**kwargs) 48 | 49 | self._settings = { 50 | "cert": prepare_client_cert(auth_cert), 51 | "latin1_fallback": False, 52 | } 53 | auth = prepare_auth(auth, username, password) 54 | if auth: 55 | self._settings["auth"] = auth 56 | 57 | ssl = prepare_verify(verify, verify_fingerprint) 58 | if ssl: 59 | self._settings["ssl"] = ssl 60 | 61 | self.username, self.password = username, password 62 | self.useragent = useragent 63 | assert connector is not None 64 | self.connector = connector 65 | self._filter_hook = filter_hook 66 | 67 | collection = kwargs.get("collection") 68 | if collection is not None: 69 | url = urlparse.urljoin(url, collection) 70 | self.url = url 71 | self.parsed_url = urlparse.urlparse(self.url) 72 | 73 | def _default_headers(self): 74 | return {"User-Agent": self.useragent} 75 | 76 | def _run_filter_hook(self, raw_item): 77 | try: 78 | result = subprocess.run( 79 | [self._filter_hook], 80 | input=raw_item, 81 | capture_output=True, 82 | encoding="utf-8", 83 | ) 84 | return result.stdout 85 | except OSError as e: 86 | logger.warning(f"Error executing external command: {e!s}") 87 | return raw_item 88 | 89 | async def list(self): 90 | async with aiohttp.ClientSession( 91 | connector=self.connector, 92 | connector_owner=False, 93 | trust_env=True, 94 | # TODO use `raise_for_status=true`, though this needs traces first, 95 | ) as session: 96 | r = await request( 97 | "GET", 98 | self.url, 99 | headers=self._default_headers(), 100 | session=session, 101 | **self._settings, 102 | ) 103 | self._items = {} 104 | 105 | for raw_item in split_collection((await r.read()).decode("utf-8")): 106 | if self._filter_hook: 107 | raw_item = self._run_filter_hook(raw_item) 108 | if not raw_item: 109 | continue 110 | 111 | item = Item(raw_item) 112 | if self._ignore_uids: 113 | item = item.with_uid(item.hash) 114 | 115 | self._items[item.ident] = item, item.hash 116 | 117 | for href, (_, etag) in self._items.items(): 118 | yield href, etag 119 | 120 | async def get(self, href) -> tuple[Item, str]: 121 | if self._items is None: 122 | async for _ in self.list(): 123 | pass 124 | 125 | assert self._items is not None # type assertion 126 | try: 127 | return self._items[href] 128 | except KeyError: 129 | raise exceptions.NotFoundError(href) 130 | -------------------------------------------------------------------------------- /docs/vdir.rst: -------------------------------------------------------------------------------- 1 | ======================= 2 | The Vdir Storage Format 3 | ======================= 4 | 5 | This document describes a standard for storing calendars and contacts on a 6 | filesystem, with the main goal of being easy to implement. 7 | 8 | Vdirsyncer synchronizes to vdirs via :storage:`filesystem`. Each vdir 9 | (basically just a directory with some files in it) represents a calendar or 10 | addressbook. 11 | 12 | Basic Structure 13 | =============== 14 | 15 | The main folder (root) contains an arbitrary number of subfolders 16 | (collections), which contain only files (items). Synonyms for "collection" may 17 | be "addressbook" or "calendar". 18 | 19 | An item is: 20 | 21 | - A vCard_ file, in which case the file extension *must* be `.vcf`, *or* 22 | - An iCalendar_ file, in which case the file extension *must* be `.ics`. 23 | 24 | An item *should* contain a ``UID`` property as described by the vCard and 25 | iCalendar standards. If it contains more than one ``UID`` property, the values 26 | of those *must* not differ. 27 | 28 | The file *must* contain exactly one event, task or contact. In most cases this 29 | also implies only one ``VEVENT``/``VTODO``/``VCARD`` component per file, but 30 | e.g. recurrence exceptions would require multiple ``VEVENT`` components per 31 | event. 32 | 33 | The filename should have similar properties as the ``UID`` of the file content. 34 | However, there is no requirement for these two to be the same. Programs may 35 | choose to store additional metadata in that filename, however, at the same time 36 | they *must not* assume that the metadata they included will be preserved by 37 | other programs. 38 | 39 | .. _vCard: https://tools.ietf.org/html/rfc6350 40 | .. _iCalendar: https://tools.ietf.org/html/rfc5545 41 | .. _CardDAV: http://tools.ietf.org/html/rfc6352 42 | .. _CalDAV: http://tools.ietf.org/search/rfc4791 43 | 44 | Metadata 45 | ======== 46 | 47 | Any of the below metadata files may be absent. None of the files listed below 48 | have any file extensions. 49 | 50 | - A file called ``color`` inside the vdir indicates the vdir's color, a 51 | property that is only relevant in UI design. 52 | 53 | Its content is an ASCII-encoded hex-RGB value of the form ``#RRGGBB``. For 54 | example, a file content of ``#FF0000`` indicates that the vdir has a red 55 | (user-visible) color. No short forms or informal values such as ``red`` (as 56 | known from CSS, for example) are allowed. The prefixing ``#`` must be 57 | present. 58 | 59 | - Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/ 60 | description, that may be used to represent the vdir in UIs. 61 | 62 | - A file called ``order`` inside the vdir includes the relative order 63 | of the calendar, a property that is only relevant in UI design. 64 | 65 | Writing to vdirs 66 | ================ 67 | 68 | Creating and modifying items or metadata files *should* happen atomically_. 69 | 70 | Writing to a temporary file on the same physical device, and then moving it to 71 | the appropriate location is usually a very effective solution. For this 72 | purpose, files with the extension ``.tmp`` may be created inside collections. 73 | 74 | When changing an item, the original filename *must* be used. 75 | 76 | .. _atomically: https://en.wikipedia.org/wiki/Atomicity_%28programming%29 77 | 78 | Reading from vdirs 79 | ================== 80 | 81 | - Any file ending with the ``.tmp`` or no file extension *must not* be treated 82 | as an item. 83 | 84 | - The ``ident`` part of the filename *should not* be parsed to improve the 85 | speed of item lookup. 86 | 87 | Considerations 88 | ============== 89 | 90 | The primary reason this format was chosen is due to its compatibility with the 91 | CardDAV_ and CalDAV_ standards. 92 | 93 | Performance 94 | ----------- 95 | 96 | Currently, vdirs suffer from a rather major performance problem, one which 97 | current implementations try to mitigate by building up indices of the 98 | collections for faster search and lookup. 99 | 100 | The reason items' filenames don't contain any extra information is simple: The 101 | solutions presented induced duplication of data, where one duplicate might 102 | become out of date because of bad implementations. As it stands right now, an 103 | index format could be formalized separately though. 104 | 105 | vdirsyncer doesn't really have to bother about efficient item lookup, because 106 | its synchronization algorithm needs to fetch the whole list of items anyway. 107 | Detecting changes is easily implemented by checking the files' modification 108 | time. 109 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | OS/distro packages 8 | ------------------ 9 | 10 | The following packages are community-contributed and were up-to-date at the 11 | time of writing: 12 | 13 | - `Arch Linux `_ 14 | - `Ubuntu and Debian, x86_64-only 15 | `_ (packages also exist 16 | in the official repositories but may be out of date) 17 | - `GNU Guix `_ 18 | - `macOS (homebrew) `_ 19 | - `NetBSD `_ 20 | - `OpenBSD `_ 21 | - `Slackware (SlackBuild at Slackbuilds.org) `_ 22 | 23 | We only support the latest version of vdirsyncer, which is at the time of this 24 | writing |vdirsyncer_version|. Please **do not file bugs if you use an older 25 | version**. 26 | 27 | Some distributions have multiple release channels. Debian and Fedora for 28 | example have a "stable" release channel that ships an older version of 29 | vdirsyncer. Those versions aren't supported either. 30 | 31 | If there is no suitable package for your distribution, you'll need to 32 | :ref:`install vdirsyncer manually `. There is an easy 33 | command to copy-and-paste for this as well, but you should be aware of its 34 | consequences. 35 | 36 | .. _manual-installation: 37 | 38 | Manual installation 39 | ------------------- 40 | 41 | If your distribution doesn't provide a package for vdirsyncer, you still can 42 | use Python's package manager "pip". First, you'll have to check that the 43 | following things are installed: 44 | 45 | - Python 3.9 to 3.13 and pip. 46 | - ``libxml`` and ``libxslt`` 47 | - ``zlib`` 48 | - Linux or macOS. **Windows is not supported**, see :gh:`535`. 49 | 50 | On Linux systems, using the distro's package manager is the best 51 | way to do this, for example, using Ubuntu:: 52 | 53 | sudo apt-get install libxml2 libxslt1.1 zlib1g python3 54 | 55 | Then you have several options. The following text applies for most Python 56 | software by the way. 57 | 58 | pipx: The clean, easy way 59 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 60 | 61 | pipx_ is a new package manager for Python-based software that automatically 62 | sets up a virtual environment for each program it installs. Please note that 63 | installing via pipx will not include manual pages nor systemd services. 64 | 65 | pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer`` 66 | 67 | Assuming that pipx is installed, vdirsyncer can be installed with:: 68 | 69 | pipx install vdirsyncer 70 | 71 | It can later be updated to the latest version with:: 72 | 73 | pipx upgrade vdirsyncer 74 | 75 | And can be uninstalled with:: 76 | 77 | pipx uninstall vdirsyncer 78 | 79 | This last command will remove vdirsyncer and any dependencies installed into 80 | the above location. 81 | 82 | .. _pipx: https://github.com/pipxproject/pipx 83 | 84 | The dirty, easy way 85 | ~~~~~~~~~~~~~~~~~~~ 86 | 87 | If pipx is not available on your distribution, the easiest way to install 88 | vdirsyncer at this point would be to run:: 89 | 90 | pip install --ignore-installed vdirsyncer 91 | 92 | - ``--ignore-installed`` is to work around Debian's potentially broken packages 93 | (see :ref:`debian-urllib3`). 94 | 95 | This method has a major flaw though: Pip doesn't keep track of the files it 96 | installs. Vdirsyncer's files would be located somewhere in 97 | ``~/.local/lib/python*``, but you can't possibly know which packages were 98 | installed as dependencies of vdirsyncer and which ones were not, should you 99 | decide to uninstall it. In other words, using pip that way would pollute your 100 | home directory. 101 | 102 | The clean, hard way 103 | ~~~~~~~~~~~~~~~~~~~ 104 | 105 | There is a way to install Python software without scattering stuff across 106 | your filesystem: virtualenv_. There are a lot of resources on how to use it, 107 | the simplest possible way would look something like:: 108 | 109 | virtualenv ~/vdirsyncer_env 110 | ~/vdirsyncer_env/bin/pip install vdirsyncer 111 | alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer" 112 | 113 | You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``. 114 | 115 | This method has two advantages: 116 | 117 | - It separately installs all Python packages into ``~/vdirsyncer_env/``, 118 | without relying on the system packages. This works around OS- or 119 | distro-specific issues. 120 | - You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely. 121 | 122 | .. _virtualenv: https://virtualenv.readthedocs.io/ 123 | -------------------------------------------------------------------------------- /tests/unit/test_retry.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from unittest.mock import AsyncMock 5 | from unittest.mock import Mock 6 | 7 | import aiohttp 8 | import pytest 9 | 10 | from vdirsyncer.http import UsageLimitReached 11 | from vdirsyncer.http import request 12 | 13 | 14 | async def _create_mock_response(status: int, body: str | dict): 15 | raw_body = body 16 | text_body = json.dumps(body) if isinstance(body, dict) else body 17 | 18 | mock_response = AsyncMock() 19 | mock_response.status = status 20 | mock_response.ok = 200 <= status < 300 21 | mock_response.reason = "OK" if mock_response.ok else "Forbidden" 22 | mock_response.headers = ( 23 | {"Content-Type": "application/json"} 24 | if isinstance(raw_body, dict) 25 | else {"Content-Type": "text/plain"} 26 | ) 27 | mock_response.text.return_value = text_body 28 | if isinstance(raw_body, dict): 29 | mock_response.json.return_value = raw_body 30 | else: 31 | mock_response.json.side_effect = ValueError("Not JSON") 32 | mock_response.raise_for_status = Mock( 33 | side_effect=( 34 | aiohttp.ClientResponseError( 35 | request_info=AsyncMock(), 36 | history=(), 37 | status=status, 38 | message=mock_response.reason, 39 | headers=mock_response.headers, 40 | ) 41 | if not mock_response.ok 42 | else None 43 | ) 44 | ) 45 | 46 | return mock_response 47 | 48 | 49 | @pytest.mark.asyncio 50 | async def test_request_retry_on_usage_limit(): 51 | url = "http://example.com/api" 52 | max_retries = 5 # As configured in the @retry decorator 53 | 54 | mock_session = AsyncMock() 55 | 56 | # Simulate (max_retries - 1) 403 errors and then a 200 OK 57 | mock_session.request.side_effect = [ 58 | await _create_mock_response( 59 | 403, 60 | { 61 | "error": { 62 | "errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}] 63 | } 64 | }, 65 | ) 66 | for _ in range(max_retries - 1) 67 | ] + [await _create_mock_response(200, "OK")] 68 | 69 | async with ( 70 | aiohttp.ClientSession() 71 | ): # Dummy session. Will be replaced by mock_session at call 72 | response = await request("GET", url, mock_session) 73 | 74 | assert response.status == 200 75 | assert mock_session.request.call_count == max_retries 76 | 77 | 78 | @pytest.mark.asyncio 79 | async def test_request_retry_exceeds_max_attempts(): 80 | url = "http://example.com/api" 81 | max_retries = 5 # As configured in the @retry decorator 82 | 83 | mock_session = AsyncMock() 84 | # Simulate max_retries 403 errors and then a 200 OK 85 | mock_session.request.side_effect = [ 86 | await _create_mock_response( 87 | 403, 88 | { 89 | "error": { 90 | "errors": [{"domain": "usageLimits", "reason": "quotaExceeded"}] 91 | } 92 | }, 93 | ) 94 | for _ in range(max_retries) 95 | ] 96 | 97 | async with ( 98 | aiohttp.ClientSession() 99 | ): # Dummy session. Will be replaced by mock_session at call 100 | with pytest.raises(UsageLimitReached): 101 | await request("GET", url, mock_session) 102 | assert mock_session.request.call_count == max_retries 103 | 104 | 105 | @pytest.mark.asyncio 106 | async def test_request_no_retry_on_generic_403_json(): 107 | url = "http://example.com/api" 108 | 109 | mock_session = AsyncMock() 110 | # Generic non-Google 403 error payload (e.g., GitHub-style) 111 | mock_session.request.side_effect = [ 112 | await _create_mock_response(403, {"message": "API rate limit exceeded"}) 113 | ] 114 | 115 | async with aiohttp.ClientSession(): 116 | with pytest.raises(aiohttp.ClientResponseError): 117 | await request("GET", url, mock_session) 118 | # Should not retry because it's not the Google quotaExceeded shape 119 | assert mock_session.request.call_count == 1 120 | 121 | 122 | @pytest.mark.asyncio 123 | async def test_request_no_retry_on_generic_403_text(): 124 | url = "http://example.com/api" 125 | 126 | mock_session = AsyncMock() 127 | # Plain-text 403 body mentioning rate limits, but not structured as Google error 128 | mock_session.request.side_effect = [ 129 | await _create_mock_response(403, "Rate limit exceeded") 130 | ] 131 | 132 | async with aiohttp.ClientSession(): 133 | with pytest.raises(aiohttp.ClientResponseError): 134 | await request("GET", url, mock_session) 135 | # Should not retry because the JSON shape is not Google quotaExceeded 136 | assert mock_session.request.call_count == 1 137 | -------------------------------------------------------------------------------- /tests/storage/test_filesystem.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import subprocess 4 | 5 | import aiostream 6 | import pytest 7 | 8 | from vdirsyncer.storage.filesystem import FilesystemStorage 9 | from vdirsyncer.vobject import Item 10 | 11 | from . import StorageTests 12 | 13 | 14 | class TestFilesystemStorage(StorageTests): 15 | storage_class = FilesystemStorage 16 | 17 | @pytest.fixture 18 | def get_storage_args(self, tmpdir): 19 | async def inner(collection="test"): 20 | rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection} 21 | if collection is not None: 22 | rv = await self.storage_class.create_collection(**rv) 23 | return rv 24 | 25 | return inner 26 | 27 | def test_is_not_directory(self, tmpdir): 28 | with pytest.raises(OSError): 29 | f = tmpdir.join("hue") 30 | f.write("stub") 31 | self.storage_class(str(tmpdir) + "/hue", ".txt") 32 | 33 | @pytest.mark.asyncio 34 | async def test_broken_data(self, tmpdir): 35 | s = self.storage_class(str(tmpdir), ".txt") 36 | 37 | class BrokenItem: 38 | raw = "Ц, Ш, Л, ж, Д, З, Ю".encode() 39 | uid = "jeezus" 40 | ident = uid 41 | 42 | with pytest.raises(TypeError): 43 | await s.upload(BrokenItem) 44 | assert not tmpdir.listdir() 45 | 46 | @pytest.mark.asyncio 47 | async def test_ident_with_slash(self, tmpdir): 48 | s = self.storage_class(str(tmpdir), ".txt") 49 | await s.upload(Item("UID:a/b/c")) 50 | (item_file,) = tmpdir.listdir() 51 | assert "/" not in item_file.basename 52 | assert item_file.isfile() 53 | 54 | @pytest.mark.asyncio 55 | async def test_ignore_tmp_files(self, tmpdir): 56 | """Test that files with .tmp suffix beside .ics files are ignored.""" 57 | s = self.storage_class(str(tmpdir), ".ics") 58 | await s.upload(Item("UID:xyzxyz")) 59 | (item_file,) = tmpdir.listdir() 60 | item_file.copy(item_file.new(ext="tmp")) 61 | assert len(tmpdir.listdir()) == 2 62 | assert len(await aiostream.stream.list(s.list())) == 1 63 | 64 | @pytest.mark.asyncio 65 | async def test_ignore_tmp_files_empty_fileext(self, tmpdir): 66 | """Test that files with .tmp suffix are ignored with empty fileext.""" 67 | s = self.storage_class(str(tmpdir), "") 68 | await s.upload(Item("UID:xyzxyz")) 69 | (item_file,) = tmpdir.listdir() 70 | item_file.copy(item_file.new(ext="tmp")) 71 | assert len(tmpdir.listdir()) == 2 72 | # assert False, tmpdir.listdir() # enable to see the created filename 73 | assert len(await aiostream.stream.list(s.list())) == 1 74 | 75 | @pytest.mark.asyncio 76 | async def test_ignore_files_typical_backup(self, tmpdir): 77 | """Test file-name ignorance with typical backup ending ~.""" 78 | ignorext = "~" # without dot 79 | 80 | storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext) 81 | await storage.upload(Item("UID:xyzxyz")) 82 | (item_file,) = tmpdir.listdir() 83 | item_file.copy(item_file.new(basename=item_file.basename + ignorext)) 84 | 85 | assert len(tmpdir.listdir()) == 2 86 | assert len(await aiostream.stream.list(storage.list())) == 1 87 | 88 | @pytest.mark.asyncio 89 | async def test_too_long_uid(self, tmpdir): 90 | storage = self.storage_class(str(tmpdir), ".txt") 91 | item = Item("UID:" + "hue" * 600) 92 | 93 | href, _etag = await storage.upload(item) 94 | assert item.uid not in href 95 | 96 | @pytest.mark.asyncio 97 | async def test_post_hook_inactive(self, tmpdir, monkeypatch): 98 | def check_call_mock(*args, **kwargs): 99 | raise AssertionError 100 | 101 | monkeypatch.setattr(subprocess, "call", check_call_mock) 102 | 103 | s = self.storage_class(str(tmpdir), ".txt", post_hook=None) 104 | await s.upload(Item("UID:a/b/c")) 105 | 106 | @pytest.mark.asyncio 107 | async def test_post_hook_active(self, tmpdir, monkeypatch): 108 | calls = [] 109 | exe = "foo" 110 | 111 | def check_call_mock(call, *args, **kwargs): 112 | calls.append(True) 113 | assert len(call) == 2 114 | assert call[0] == exe 115 | 116 | monkeypatch.setattr(subprocess, "call", check_call_mock) 117 | 118 | s = self.storage_class(str(tmpdir), ".txt", post_hook=exe) 119 | await s.upload(Item("UID:a/b/c")) 120 | assert calls 121 | 122 | @pytest.mark.asyncio 123 | async def test_ignore_git_dirs(self, tmpdir): 124 | tmpdir.mkdir(".git").mkdir("foo") 125 | tmpdir.mkdir("a") 126 | tmpdir.mkdir("b") 127 | 128 | expected = {"a", "b"} 129 | actual = { 130 | c["collection"] async for c in self.storage_class.discover(str(tmpdir)) 131 | } 132 | assert actual == expected 133 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | Contributing to this project 3 | ============================ 4 | 5 | .. note:: 6 | 7 | - Please read :doc:`contact` for questions and support requests. 8 | 9 | - All participants must follow the `pimutils Code of Conduct 10 | `_. 11 | 12 | The issue tracker 13 | ================= 14 | 15 | We use `GitHub issues `_ for 16 | organizing bug reports and feature requests. 17 | 18 | The following `labels `_ are of 19 | interest: 20 | 21 | * "Planning" is for issues that are still undecided, but where at least some 22 | discussion exists. 23 | 24 | * "Blocked" is for issues that can't be worked on at the moment because some 25 | other unsolved problem exists. This problem may be a bug in some software 26 | dependency, for instance. 27 | 28 | * "Ready" contains issues that are ready to work on. 29 | 30 | If you just want to get started with contributing, the "ready" issues are an 31 | option. Issues that are still in "Planning" are also an option, but require 32 | more upfront thinking and may turn out to be impossible to solve, or at least 33 | harder than anticipated. On the flip side those tend to be the more interesting 34 | issues as well, depending on how one looks at it. 35 | 36 | All of those labels are also available as a kanban board on `waffle.io 37 | `_. It is really just an alternative 38 | overview over all issues, but might be easier to comprehend. 39 | 40 | Feel free to :doc:`contact ` me or comment on the relevant issues for 41 | further information. 42 | 43 | Reporting bugs 44 | -------------- 45 | 46 | * Make sure your problem isn't already listed in :doc:`problems`. 47 | 48 | * Make sure you have the absolutely latest version of vdirsyncer. For users of 49 | some Linux distributions such as Debian or Fedora this may not be the version 50 | that your distro offers. In those cases please file a bug against the distro 51 | package, not against upstream vdirsyncer. 52 | 53 | * Use ``--verbosity=DEBUG`` when including output from vdirsyncer. 54 | 55 | Suggesting features 56 | ------------------- 57 | 58 | If you're suggesting a feature, keep in mind that vdirsyncer tries not to be a 59 | full calendar or contacts client, but rather just the piece of software that 60 | synchronizes all the data. :doc:`Take a look at the documentation for software 61 | working with vdirsyncer `. 62 | 63 | Submitting patches, pull requests 64 | ================================= 65 | 66 | * **Discuss everything in the issue tracker first** (or contact me somehow 67 | else) before implementing it. 68 | 69 | * Make sure the tests pass. See below for running them. 70 | 71 | * But not because you wrote too few tests. 72 | 73 | * Add yourself to ``AUTHORS.rst``, and add a note to ``CHANGELOG.rst`` too. 74 | 75 | Running tests, how to set up your development environment 76 | --------------------------------------------------------- 77 | 78 | For many patches, it might suffice to just let CI run the tests. However, 79 | CI is slow, so you might want to run them locally too. For this, set up a 80 | virtualenv_ and run this inside of it:: 81 | 82 | # Install development dependencies, including: 83 | # - vdirsyncer from the repo into the virtualenv 84 | # - style checks and formatting (ruff) 85 | make install-dev 86 | 87 | # Install git commit hook for some extra linting and checking 88 | pre-commit install 89 | 90 | Then you can run:: 91 | 92 | pytest # The normal testsuite 93 | pre-commit run --all # Run all linters (which also run via pre-commit) 94 | make -C docs html # Build the HTML docs, output is at docs/_build/html/ 95 | make -C docs linkcheck # Check docs for any broken links 96 | 97 | The ``Makefile`` has a lot of options that allow you to control which tests are 98 | run, and which servers are tested. Take a look at its code where they are all 99 | initialized and documented. 100 | 101 | To tests against a specific DAV server, use ``DAV_SERVER``:: 102 | 103 | make DAV_SERVER=xandikos test 104 | 105 | The server will be initialised in a docker container and terminated at the end 106 | of the test suite. 107 | 108 | If you have any questions, feel free to open issues about it. 109 | 110 | Structure of the testsuite 111 | -------------------------- 112 | 113 | Within ``tests/``, there are three main folders: 114 | 115 | - ``system`` contains system- and also integration tests. A rough rule is: If 116 | the test is using temporary files, put it here. 117 | 118 | - ``unit``, where each testcase tests a single class or function. 119 | 120 | - ``storage`` runs a generic storage testsuite against all storages. 121 | 122 | The reason for this separation is: We are planning to generate separate 123 | coverage reports for each of those testsuites. Ideally ``unit`` would generate 124 | palatable coverage of the entire codebase *on its own*, and the *combination* 125 | of ``system`` and ``storage`` as well. 126 | 127 | .. _virtualenv: http://virtualenv.readthedocs.io/ 128 | -------------------------------------------------------------------------------- /vdirsyncer/cli/tasks.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | 5 | import aiohttp 6 | 7 | from vdirsyncer import exceptions 8 | from vdirsyncer import sync 9 | 10 | from .config import CollectionConfig 11 | from .discover import DiscoverResult 12 | from .discover import collections_for_pair 13 | from .discover import storage_instance_from_config 14 | from .utils import JobFailed 15 | from .utils import cli_logger 16 | from .utils import get_status_name 17 | from .utils import handle_cli_error 18 | from .utils import load_status 19 | from .utils import manage_sync_status 20 | from .utils import save_status 21 | 22 | 23 | async def prepare_pair(pair_name, collections, config, *, connector): 24 | pair = config.get_pair(pair_name) 25 | 26 | all_collections = dict( 27 | await collections_for_pair( 28 | status_path=config.general["status_path"], 29 | pair=pair, 30 | connector=connector, 31 | ) 32 | ) 33 | 34 | for collection_name in collections or all_collections: 35 | try: 36 | config_a, config_b = all_collections[collection_name] 37 | except KeyError: 38 | raise exceptions.UserError( 39 | f"Pair {pair_name}: Collection {json.dumps(collection_name)} not found." 40 | f"These are the configured collections:\n{list(all_collections)}" 41 | ) 42 | 43 | collection = CollectionConfig(pair, collection_name, config_a, config_b) 44 | yield collection, config.general 45 | 46 | 47 | async def sync_collection( 48 | collection, 49 | general, 50 | force_delete, 51 | *, 52 | connector: aiohttp.TCPConnector, 53 | ): 54 | pair = collection.pair 55 | status_name = get_status_name(pair.name, collection.name) 56 | 57 | try: 58 | cli_logger.info(f"Syncing {status_name}") 59 | 60 | a = await storage_instance_from_config(collection.config_a, connector=connector) 61 | b = await storage_instance_from_config(collection.config_b, connector=connector) 62 | 63 | sync_failed = False 64 | 65 | def error_callback(e): 66 | nonlocal sync_failed 67 | sync_failed = True 68 | handle_cli_error(status_name, e) 69 | 70 | with manage_sync_status( 71 | general["status_path"], pair.name, collection.name 72 | ) as status: 73 | await sync.sync( 74 | a, 75 | b, 76 | status, 77 | conflict_resolution=pair.conflict_resolution, 78 | force_delete=force_delete, 79 | error_callback=error_callback, 80 | partial_sync=pair.partial_sync, 81 | ) 82 | 83 | if sync_failed: 84 | raise JobFailed 85 | except JobFailed: 86 | raise 87 | except BaseException: 88 | handle_cli_error(status_name) 89 | raise JobFailed 90 | 91 | 92 | async def discover_collections(pair, **kwargs): 93 | rv = await collections_for_pair(pair=pair, **kwargs) 94 | collections = [c for c, (a, b) in rv] 95 | if collections == [None]: 96 | collections = None 97 | cli_logger.info(f"Saved for {pair.name}: collections = {json.dumps(collections)}") 98 | 99 | 100 | async def repair_collection( 101 | config, 102 | collection, 103 | repair_unsafe_uid, 104 | *, 105 | connector: aiohttp.TCPConnector, 106 | ): 107 | from vdirsyncer.repair import repair_storage 108 | 109 | storage_name, collection = collection, None 110 | if "/" in storage_name: 111 | storage_name, collection = storage_name.split("/") 112 | 113 | config = config.get_storage_args(storage_name) 114 | # If storage type has a slash, ignore it and anything after it. 115 | storage_type = config["type"].split("/")[0] 116 | 117 | if collection is not None: 118 | cli_logger.info("Discovering collections (skipping cache).") 119 | get_discovered = DiscoverResult(config, connector=connector) 120 | discovered = await get_discovered.get_self() 121 | for config in discovered.values(): 122 | if config["collection"] == collection: 123 | break 124 | else: 125 | raise exceptions.UserError( 126 | f"Couldn't find collection {collection} for storage {storage_name}." 127 | ) 128 | 129 | config["type"] = storage_type 130 | storage = await storage_instance_from_config(config, connector=connector) 131 | 132 | cli_logger.info(f"Repairing {storage_name}/{collection}") 133 | cli_logger.warning("Make sure no other program is talking to the server.") 134 | await repair_storage(storage, repair_unsafe_uid=repair_unsafe_uid) 135 | 136 | 137 | async def metasync_collection(collection, general, *, connector: aiohttp.TCPConnector): 138 | from vdirsyncer.metasync import metasync 139 | 140 | pair = collection.pair 141 | status_name = get_status_name(pair.name, collection.name) 142 | 143 | try: 144 | cli_logger.info(f"Metasyncing {status_name}") 145 | 146 | status = load_status( 147 | general["status_path"], 148 | pair.name, 149 | collection.name, 150 | data_type="metadata", 151 | ) 152 | 153 | a = await storage_instance_from_config(collection.config_a, connector=connector) 154 | b = await storage_instance_from_config(collection.config_b, connector=connector) 155 | 156 | await metasync( 157 | a, 158 | b, 159 | status, 160 | conflict_resolution=pair.conflict_resolution, 161 | keys=pair.metadata, 162 | ) 163 | except BaseException: 164 | handle_cli_error(status_name) 165 | raise JobFailed 166 | 167 | save_status( 168 | base_path=general["status_path"], 169 | pair=pair.name, 170 | data_type="metadata", 171 | data=status, 172 | collection=collection.name, 173 | ) 174 | -------------------------------------------------------------------------------- /tests/unit/cli/test_discover.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import aiostream 4 | import pytest 5 | 6 | from vdirsyncer.cli.discover import expand_collections 7 | 8 | missing = object() 9 | 10 | 11 | @pytest.mark.parametrize( 12 | ("shortcuts", "expected"), 13 | [ 14 | ( 15 | ["from a"], 16 | [ 17 | ( 18 | "c1", 19 | ( 20 | {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, 21 | {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, 22 | ), 23 | ), 24 | ( 25 | "c2", 26 | ( 27 | {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, 28 | {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, 29 | ), 30 | ), 31 | ( 32 | "a3", 33 | ( 34 | {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, 35 | missing, 36 | ), 37 | ), 38 | ], 39 | ), 40 | ( 41 | ["from b"], 42 | [ 43 | ( 44 | "c1", 45 | ( 46 | {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, 47 | {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, 48 | ), 49 | ), 50 | ( 51 | "c2", 52 | ( 53 | {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, 54 | {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, 55 | ), 56 | ), 57 | ( 58 | "b3", 59 | ( 60 | missing, 61 | {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, 62 | ), 63 | ), 64 | ], 65 | ), 66 | ( 67 | ["from a", "from b"], 68 | [ 69 | ( 70 | "c1", 71 | ( 72 | {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, 73 | {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, 74 | ), 75 | ), 76 | ( 77 | "c2", 78 | ( 79 | {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, 80 | {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, 81 | ), 82 | ), 83 | ( 84 | "a3", 85 | ( 86 | {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, 87 | missing, 88 | ), 89 | ), 90 | ( 91 | "b3", 92 | ( 93 | missing, 94 | {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, 95 | ), 96 | ), 97 | ], 98 | ), 99 | ( 100 | [["c12", "c1", "c2"]], 101 | [ 102 | ( 103 | "c12", 104 | ( 105 | {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, 106 | {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, 107 | ), 108 | ), 109 | ], 110 | ), 111 | ( 112 | None, 113 | [ 114 | ( 115 | None, 116 | ( 117 | {"type": "fooboo", "storage_side": "a", "collection": None}, 118 | {"type": "fooboo", "storage_side": "b", "collection": None}, 119 | ), 120 | ) 121 | ], 122 | ), 123 | ( 124 | [None], 125 | [ 126 | ( 127 | None, 128 | ( 129 | {"type": "fooboo", "storage_side": "a", "collection": None}, 130 | {"type": "fooboo", "storage_side": "b", "collection": None}, 131 | ), 132 | ) 133 | ], 134 | ), 135 | ], 136 | ) 137 | @pytest.mark.asyncio 138 | async def test_expand_collections(shortcuts, expected): 139 | config_a = {"type": "fooboo", "storage_side": "a"} 140 | 141 | config_b = {"type": "fooboo", "storage_side": "b"} 142 | 143 | async def get_discovered_a(): 144 | return { 145 | "c1": {"type": "fooboo", "custom_arg": "a1", "collection": "c1"}, 146 | "c2": {"type": "fooboo", "custom_arg": "a2", "collection": "c2"}, 147 | "a3": {"type": "fooboo", "custom_arg": "a3", "collection": "a3"}, 148 | } 149 | 150 | async def get_discovered_b(): 151 | return { 152 | "c1": {"type": "fooboo", "custom_arg": "b1", "collection": "c1"}, 153 | "c2": {"type": "fooboo", "custom_arg": "b2", "collection": "c2"}, 154 | "b3": {"type": "fooboo", "custom_arg": "b3", "collection": "b3"}, 155 | } 156 | 157 | async def handle_not_found(config, collection): 158 | return missing 159 | 160 | assert sorted( 161 | await aiostream.stream.list( 162 | expand_collections( 163 | shortcuts, 164 | config_a, 165 | config_b, 166 | get_discovered_a, 167 | get_discovered_b, 168 | handle_not_found, 169 | ) 170 | ) 171 | ) == sorted(expected) 172 | -------------------------------------------------------------------------------- /tests/storage/test_http.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import aiohttp 4 | import pytest 5 | from aioresponses import CallbackResult 6 | from aioresponses import aioresponses 7 | 8 | from tests import normalize_item 9 | from vdirsyncer.exceptions import UserError 10 | from vdirsyncer.http import BasicAuthMethod 11 | from vdirsyncer.http import DigestAuthMethod 12 | from vdirsyncer.http import UsageLimitReached 13 | from vdirsyncer.http import request 14 | from vdirsyncer.storage.http import HttpStorage 15 | from vdirsyncer.storage.http import prepare_auth 16 | 17 | 18 | @pytest.mark.asyncio 19 | async def test_list(aio_connector): 20 | collection_url = "http://127.0.0.1/calendar/collection.ics" 21 | 22 | items = [ 23 | ( 24 | "BEGIN:VEVENT\n" 25 | "SUMMARY:Eine Kurzinfo\n" 26 | "DESCRIPTION:Beschreibung des Termines\n" 27 | "END:VEVENT" 28 | ), 29 | ( 30 | "BEGIN:VEVENT\n" 31 | "SUMMARY:Eine zweite Küèrzinfo\n" 32 | "DESCRIPTION:Beschreibung des anderen Termines\n" 33 | "BEGIN:VALARM\n" 34 | "ACTION:AUDIO\n" 35 | "TRIGGER:19980403T120000\n" 36 | "ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n" 37 | "REPEAT:4\n" 38 | "DURATION:PT1H\n" 39 | "END:VALARM\n" 40 | "END:VEVENT" 41 | ), 42 | ] 43 | 44 | responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2 45 | 46 | def callback(url, headers, **kwargs): 47 | assert headers["User-Agent"].startswith("vdirsyncer/") 48 | assert responses 49 | 50 | return CallbackResult( 51 | status=200, 52 | body=responses.pop().encode("utf-8"), 53 | headers={"Content-Type": "text/calendar; charset=iso-8859-1"}, 54 | ) 55 | 56 | with aioresponses() as m: 57 | m.get(collection_url, callback=callback, repeat=True) 58 | 59 | s = HttpStorage(url=collection_url, connector=aio_connector) 60 | 61 | found_items = {} 62 | 63 | async for href, etag in s.list(): 64 | item, etag2 = await s.get(href) 65 | assert item.uid is not None 66 | assert etag2 == etag 67 | found_items[normalize_item(item)] = href 68 | 69 | expected = { 70 | normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items 71 | } 72 | 73 | assert set(found_items) == expected 74 | 75 | async for href, etag in s.list(): 76 | item, etag2 = await s.get(href) 77 | assert item.uid is not None 78 | assert etag2 == etag 79 | assert found_items[normalize_item(item)] == href 80 | 81 | 82 | def test_readonly_param(aio_connector): 83 | """The ``readonly`` param cannot be ``False``.""" 84 | 85 | url = "http://example.com/" 86 | with pytest.raises(ValueError): 87 | HttpStorage(url=url, read_only=False, connector=aio_connector) 88 | 89 | a = HttpStorage(url=url, read_only=True, connector=aio_connector) 90 | b = HttpStorage(url=url, read_only=None, connector=aio_connector) 91 | 92 | assert a.read_only is b.read_only is True 93 | 94 | 95 | def test_prepare_auth(): 96 | assert prepare_auth(None, "", "") is None 97 | 98 | assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd") 99 | assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd") 100 | 101 | with pytest.raises(ValueError) as excinfo: 102 | assert prepare_auth("basic", "", "pwd") 103 | assert "you need to specify username and password" in str(excinfo.value).lower() 104 | 105 | assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod) 106 | 107 | with pytest.raises(ValueError) as excinfo: 108 | prepare_auth("ladida", "user", "pwd") 109 | 110 | assert "unknown authentication method" in str(excinfo.value).lower() 111 | 112 | 113 | def test_prepare_auth_guess(): 114 | # guess auth is currently not supported 115 | with pytest.raises(UserError) as excinfo: 116 | prepare_auth("guess", "usr", "pwd") 117 | 118 | assert "not supported" in str(excinfo.value).lower() 119 | 120 | 121 | def test_verify_false_disallowed(aio_connector): 122 | with pytest.raises(ValueError) as excinfo: 123 | HttpStorage(url="http://example.com", verify=False, connector=aio_connector) 124 | 125 | assert "must be a path to a pem-file." in str(excinfo.value).lower() 126 | 127 | 128 | @pytest.mark.asyncio 129 | async def test_403_usage_limit_exceeded(aio_connector): 130 | url = "http://127.0.0.1/test_403" 131 | error_body = { 132 | "error": { 133 | "errors": [ 134 | { 135 | "domain": "usageLimits", 136 | "message": "Calendar usage limits exceeded.", 137 | "reason": "quotaExceeded", 138 | } 139 | ], 140 | "code": 403, 141 | "message": "Calendar usage limits exceeded.", 142 | } 143 | } 144 | 145 | async with aiohttp.ClientSession(connector=aio_connector) as session: 146 | with aioresponses() as m: 147 | m.get(url, status=403, payload=error_body, repeat=True) 148 | with pytest.raises(UsageLimitReached): 149 | await request("GET", url, session) 150 | 151 | 152 | @pytest.mark.asyncio 153 | async def test_403_without_usage_limits_domain(aio_connector): 154 | """A 403 JSON error without the Google 'usageLimits' domain should not be 155 | treated as UsageLimitReached and should surface as ClientResponseError. 156 | """ 157 | url = "http://127.0.0.1/test_403_no_usage_limits" 158 | 159 | async with aiohttp.ClientSession(connector=aio_connector) as session: 160 | with aioresponses() as m: 161 | m.get(url, status=403, repeat=True) 162 | with pytest.raises(aiohttp.ClientResponseError): 163 | await request("GET", url, session) 164 | -------------------------------------------------------------------------------- /tests/unit/test_metasync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | 5 | import hypothesis.strategies as st 6 | import pytest 7 | import pytest_asyncio 8 | from hypothesis import example 9 | from hypothesis import given 10 | 11 | from tests import blow_up 12 | from vdirsyncer.exceptions import UserError 13 | from vdirsyncer.metasync import MetaSyncConflict 14 | from vdirsyncer.metasync import logger 15 | from vdirsyncer.metasync import metasync 16 | from vdirsyncer.storage.base import normalize_meta_value 17 | from vdirsyncer.storage.memory import MemoryStorage 18 | 19 | 20 | @pytest.mark.asyncio 21 | async def test_irrelevant_status(): 22 | a = MemoryStorage() 23 | b = MemoryStorage() 24 | status = {"foo": "bar"} 25 | 26 | await metasync(a, b, status, keys=()) 27 | assert not status 28 | 29 | 30 | @pytest.mark.asyncio 31 | async def test_basic(monkeypatch): 32 | a = MemoryStorage() 33 | b = MemoryStorage() 34 | status = {} 35 | 36 | await a.set_meta("foo", None) 37 | await metasync(a, b, status, keys=["foo"]) 38 | assert await a.get_meta("foo") is None 39 | assert await b.get_meta("foo") is None 40 | 41 | await a.set_meta("foo", "bar") 42 | await metasync(a, b, status, keys=["foo"]) 43 | assert await a.get_meta("foo") == await b.get_meta("foo") == "bar" 44 | 45 | await a.set_meta("foo", "baz") 46 | await metasync(a, b, status, keys=["foo"]) 47 | assert await a.get_meta("foo") == await b.get_meta("foo") == "baz" 48 | 49 | monkeypatch.setattr(a, "set_meta", blow_up) 50 | monkeypatch.setattr(b, "set_meta", blow_up) 51 | await metasync(a, b, status, keys=["foo"]) 52 | assert await a.get_meta("foo") == await b.get_meta("foo") == "baz" 53 | monkeypatch.undo() 54 | monkeypatch.undo() 55 | 56 | await b.set_meta("foo", None) 57 | await metasync(a, b, status, keys=["foo"]) 58 | assert not await a.get_meta("foo") 59 | assert not await b.get_meta("foo") 60 | 61 | 62 | @pytest_asyncio.fixture 63 | async def conflict_state(request): 64 | a = MemoryStorage() 65 | b = MemoryStorage() 66 | status = {} 67 | await a.set_meta("foo", "bar") 68 | await b.set_meta("foo", "baz") 69 | 70 | async def do_cleanup(): 71 | assert await a.get_meta("foo") == "bar" 72 | assert await b.get_meta("foo") == "baz" 73 | assert not status 74 | 75 | request.addfinalizer(lambda: asyncio.run(do_cleanup())) 76 | 77 | return a, b, status 78 | 79 | 80 | @pytest_asyncio.fixture 81 | async def test_conflict(conflict_state): 82 | a, b, status = conflict_state 83 | 84 | with pytest.raises(MetaSyncConflict): 85 | await metasync(a, b, status, keys=["foo"]) 86 | 87 | 88 | @pytest.mark.asyncio 89 | async def test_invalid_conflict_resolution(conflict_state): 90 | a, b, status = conflict_state 91 | 92 | with pytest.raises(UserError) as excinfo: 93 | await metasync(a, b, status, keys=["foo"], conflict_resolution="foo") 94 | 95 | assert "Invalid conflict resolution setting" in str(excinfo.value) 96 | 97 | 98 | @pytest.mark.asyncio 99 | async def test_warning_on_custom_conflict_commands(conflict_state, monkeypatch): 100 | a, b, status = conflict_state 101 | warnings = [] 102 | monkeypatch.setattr(logger, "warning", warnings.append) 103 | 104 | with pytest.raises(MetaSyncConflict): 105 | await metasync( 106 | a, 107 | b, 108 | status, 109 | keys=["foo"], 110 | conflict_resolution=lambda *a, **kw: None, 111 | ) 112 | 113 | assert warnings == ["Custom commands don't work on metasync."] 114 | 115 | 116 | @pytest.mark.asyncio 117 | async def test_conflict_same_content(): 118 | a = MemoryStorage() 119 | b = MemoryStorage() 120 | status = {} 121 | await a.set_meta("foo", "bar") 122 | await b.set_meta("foo", "bar") 123 | 124 | await metasync(a, b, status, keys=["foo"]) 125 | assert await a.get_meta("foo") == await b.get_meta("foo") == status["foo"] == "bar" 126 | 127 | 128 | @pytest.mark.parametrize("wins", "ab") 129 | @pytest.mark.asyncio 130 | async def test_conflict_x_wins(wins): 131 | a = MemoryStorage() 132 | b = MemoryStorage() 133 | status = {} 134 | await a.set_meta("foo", "bar") 135 | await b.set_meta("foo", "baz") 136 | 137 | await metasync( 138 | a, 139 | b, 140 | status, 141 | keys=["foo"], 142 | conflict_resolution="a wins" if wins == "a" else "b wins", 143 | ) 144 | 145 | assert ( 146 | await a.get_meta("foo") 147 | == await b.get_meta("foo") 148 | == status["foo"] 149 | == ("bar" if wins == "a" else "baz") 150 | ) 151 | 152 | 153 | keys = st.text(min_size=1).filter(lambda x: x.strip() == x) 154 | values = st.text().filter(lambda x: normalize_meta_value(x) == x) 155 | metadata = st.dictionaries(keys, values) 156 | 157 | 158 | @given( 159 | a=metadata, 160 | b=metadata, 161 | status=metadata, 162 | keys=st.sets(keys), 163 | conflict_resolution=st.just("a wins") | st.just("b wins"), 164 | ) 165 | @example( 166 | a={"0": "0"}, b={}, status={"0": "0"}, keys={"0"}, conflict_resolution="a wins" 167 | ) 168 | @example( 169 | a={"0": "0"}, 170 | b={"0": "1"}, 171 | status={"0": "0"}, 172 | keys={"0"}, 173 | conflict_resolution="a wins", 174 | ) 175 | @pytest.mark.asyncio 176 | async def test_fuzzing(a, b, status, keys, conflict_resolution): 177 | def _get_storage(m, instance_name): 178 | s = MemoryStorage(instance_name=instance_name) 179 | s.metadata = m 180 | return s 181 | 182 | a = _get_storage(a, "A") 183 | b = _get_storage(b, "B") 184 | 185 | winning_storage = a if conflict_resolution == "a wins" else b 186 | expected_values = { 187 | key: await winning_storage.get_meta(key) for key in keys if key not in status 188 | } 189 | 190 | await metasync(a, b, status, keys=keys, conflict_resolution=conflict_resolution) 191 | 192 | for key in keys: 193 | s = status.get(key) 194 | assert await a.get_meta(key) == await b.get_meta(key) == s 195 | if expected_values.get(key) and s: 196 | assert s == expected_values[key] 197 | -------------------------------------------------------------------------------- /tests/storage/dav/test_caldav.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import datetime 5 | from textwrap import dedent 6 | 7 | import aiohttp 8 | import aiostream 9 | import pytest 10 | from aioresponses import aioresponses 11 | 12 | from tests import EVENT_TEMPLATE 13 | from tests import TASK_TEMPLATE 14 | from tests import VCARD_TEMPLATE 15 | from tests.storage import format_item 16 | from vdirsyncer import exceptions 17 | from vdirsyncer.storage.dav import CalDAVStorage 18 | 19 | from . import DAVStorageTests 20 | from . import dav_server 21 | 22 | 23 | class TestCalDAVStorage(DAVStorageTests): 24 | storage_class = CalDAVStorage 25 | 26 | @pytest.fixture(params=["VTODO", "VEVENT"]) 27 | def item_type(self, request): 28 | return request.param 29 | 30 | @pytest.mark.asyncio 31 | async def test_doesnt_accept_vcard(self, item_type, get_storage_args): 32 | s = self.storage_class(item_types=(item_type,), **await get_storage_args()) 33 | 34 | # Most storages hard-fail, but xandikos doesn't. 35 | with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError): 36 | await s.upload(format_item(VCARD_TEMPLATE)) 37 | 38 | assert not await aiostream.stream.list(s.list()) 39 | 40 | # The `arg` param is not named `item_types` because that would hit 41 | # https://bitbucket.org/pytest-dev/pytest/issue/745/ 42 | @pytest.mark.parametrize( 43 | ("arg", "calls_num"), 44 | [ 45 | (("VTODO",), 1), 46 | (("VEVENT",), 1), 47 | (("VTODO", "VEVENT"), 2), 48 | (("VTODO", "VEVENT", "VJOURNAL"), 3), 49 | ((), 1), 50 | ], 51 | ) 52 | @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") 53 | @pytest.mark.asyncio 54 | async def test_item_types_performance( 55 | self, get_storage_args, arg, calls_num, monkeypatch 56 | ): 57 | s = self.storage_class(item_types=arg, **await get_storage_args()) 58 | old_parse = s._parse_prop_responses 59 | calls = [] 60 | 61 | def new_parse(*a, **kw): 62 | calls.append(None) 63 | return old_parse(*a, **kw) 64 | 65 | monkeypatch.setattr(s, "_parse_prop_responses", new_parse) 66 | await aiostream.stream.list(s.list()) 67 | assert len(calls) == calls_num 68 | 69 | @pytest.mark.xfail( 70 | dav_server == "radicale", reason="Radicale doesn't support timeranges." 71 | ) 72 | @pytest.mark.asyncio 73 | async def test_timerange_correctness(self, get_storage_args): 74 | start_date = datetime.datetime(2013, 9, 10) 75 | end_date = datetime.datetime(2013, 9, 13) 76 | s = self.storage_class( 77 | start_date=start_date, end_date=end_date, **await get_storage_args() 78 | ) 79 | 80 | too_old_item = format_item( 81 | dedent( 82 | """ 83 | BEGIN:VCALENDAR 84 | VERSION:2.0 85 | PRODID:-//hacksw/handcal//NONSGML v1.0//EN 86 | BEGIN:VEVENT 87 | DTSTART:19970714T170000Z 88 | DTEND:19970715T035959Z 89 | SUMMARY:Bastille Day Party 90 | X-SOMETHING:{r} 91 | UID:{r} 92 | END:VEVENT 93 | END:VCALENDAR 94 | """ 95 | ).strip() 96 | ) 97 | 98 | too_new_item = format_item( 99 | dedent( 100 | """ 101 | BEGIN:VCALENDAR 102 | VERSION:2.0 103 | PRODID:-//hacksw/handcal//NONSGML v1.0//EN 104 | BEGIN:VEVENT 105 | DTSTART:20150714T170000Z 106 | DTEND:20150715T035959Z 107 | SUMMARY:Another Bastille Day Party 108 | X-SOMETHING:{r} 109 | UID:{r} 110 | END:VEVENT 111 | END:VCALENDAR 112 | """ 113 | ).strip() 114 | ) 115 | 116 | good_item = format_item( 117 | dedent( 118 | """ 119 | BEGIN:VCALENDAR 120 | VERSION:2.0 121 | PRODID:-//hacksw/handcal//NONSGML v1.0//EN 122 | BEGIN:VEVENT 123 | DTSTART:20130911T170000Z 124 | DTEND:20130912T035959Z 125 | SUMMARY:What's with all these Bastille Day Partys 126 | X-SOMETHING:{r} 127 | UID:{r} 128 | END:VEVENT 129 | END:VCALENDAR 130 | """ 131 | ).strip() 132 | ) 133 | 134 | await s.upload(too_old_item) 135 | await s.upload(too_new_item) 136 | expected_href, _ = await s.upload(good_item) 137 | 138 | ((actual_href, _),) = await aiostream.stream.list(s.list()) 139 | assert actual_href == expected_href 140 | 141 | @pytest.mark.asyncio 142 | async def test_invalid_resource(self, monkeypatch, get_storage_args): 143 | args = await get_storage_args(collection=None) 144 | 145 | with aioresponses() as m: 146 | m.add(args["url"], method="PROPFIND", status=200, body="Hello world") 147 | 148 | with pytest.raises(ValueError): 149 | s = self.storage_class(**args) 150 | await aiostream.stream.list(s.list()) 151 | 152 | assert len(m.requests) == 1 153 | 154 | @pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT") 155 | @pytest.mark.skipif( 156 | dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs." 157 | ) 158 | @pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.") 159 | @pytest.mark.asyncio 160 | async def test_item_types_general(self, s): 161 | event = (await s.upload(format_item(EVENT_TEMPLATE)))[0] 162 | task = (await s.upload(format_item(TASK_TEMPLATE)))[0] 163 | s.item_types = ("VTODO", "VEVENT") 164 | 165 | async def hrefs(): 166 | return {href async for href, etag in s.list()} 167 | 168 | assert await hrefs() == {event, task} 169 | s.item_types = ("VTODO",) 170 | assert await hrefs() == {task} 171 | s.item_types = ("VEVENT",) 172 | assert await hrefs() == {event} 173 | s.item_types = () 174 | assert await hrefs() == {event, task} 175 | -------------------------------------------------------------------------------- /vdirsyncer/storage/singlefile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import collections 4 | import contextlib 5 | import functools 6 | import glob 7 | import logging 8 | import os 9 | from collections.abc import Iterable 10 | 11 | from vdirsyncer import exceptions 12 | from vdirsyncer.utils import atomic_write 13 | from vdirsyncer.utils import checkfile 14 | from vdirsyncer.utils import expand_path 15 | from vdirsyncer.utils import get_etag_from_file 16 | from vdirsyncer.utils import uniq 17 | from vdirsyncer.vobject import Item 18 | from vdirsyncer.vobject import join_collection 19 | from vdirsyncer.vobject import split_collection 20 | 21 | from .base import Storage 22 | 23 | logger = logging.getLogger(__name__) 24 | 25 | 26 | def _writing_op(f): 27 | """Implement at_once for write operations. 28 | 29 | Wrap an operation which writes to the storage, implementing `at_once` if it has been 30 | requested. Changes are stored in-memory until the at_once block finishes, at which 31 | time they are all written at once. 32 | """ 33 | 34 | @functools.wraps(f) 35 | async def inner(self, *args, **kwargs): 36 | if self._items is None or not self._at_once: 37 | async for _ in self.list(): 38 | pass 39 | assert self._items is not None 40 | rv = await f(self, *args, **kwargs) 41 | if not self._at_once: 42 | self._write() 43 | return rv 44 | 45 | return inner 46 | 47 | 48 | class SingleFileStorage(Storage): 49 | storage_name = "singlefile" 50 | _repr_attributes = ("path",) 51 | 52 | _write_mode = "wb" 53 | _append_mode = "ab" 54 | _read_mode = "rb" 55 | 56 | _items = None 57 | _last_etag = None 58 | 59 | def __init__(self, path, encoding="utf-8", **kwargs): 60 | super().__init__(**kwargs) 61 | path = os.path.abspath(expand_path(path)) 62 | checkfile(path, create=False) 63 | 64 | self.path = path 65 | self.encoding = encoding 66 | self._at_once = False 67 | 68 | @classmethod 69 | async def discover(cls, path, **kwargs): 70 | if kwargs.pop("collection", None) is not None: 71 | raise TypeError("collection argument must not be given.") 72 | 73 | path = os.path.abspath(expand_path(path)) 74 | try: 75 | path_glob = path % "*" 76 | except TypeError: 77 | # If not exactly one '%s' is present, we cannot discover 78 | # collections because we wouldn't know which name to assign. 79 | raise NotImplementedError 80 | 81 | placeholder_pos = path.index("%s") 82 | 83 | for subpath in glob.iglob(path_glob): 84 | if os.path.isfile(subpath): 85 | args = dict(kwargs) 86 | args["path"] = subpath 87 | 88 | collection_end = ( 89 | placeholder_pos + 2 + len(subpath) - len(path) # length of '%s' 90 | ) 91 | collection = subpath[placeholder_pos:collection_end] 92 | args["collection"] = collection 93 | 94 | yield args 95 | 96 | @classmethod 97 | async def create_collection(cls, collection, **kwargs): 98 | path = os.path.abspath(expand_path(kwargs["path"])) 99 | 100 | if collection is not None: 101 | try: 102 | path = path % (collection,) 103 | except TypeError: 104 | raise ValueError( 105 | "Exactly one %s required in path if collection is not null." 106 | ) 107 | 108 | checkfile(path, create=True) 109 | kwargs["path"] = path 110 | kwargs["collection"] = collection 111 | return kwargs 112 | 113 | async def list(self): 114 | self._items = collections.OrderedDict() 115 | 116 | try: 117 | self._last_etag = get_etag_from_file(self.path) 118 | with open(self.path, self._read_mode) as f: 119 | text = f.read().decode(self.encoding) 120 | except OSError as e: 121 | import errno 122 | 123 | if e.errno != errno.ENOENT: # file not found 124 | raise OSError(e) 125 | text = None 126 | 127 | if text: 128 | for item in split_collection(text): 129 | item = Item(item) 130 | etag = item.hash 131 | href = item.ident 132 | self._items[href] = item, etag 133 | 134 | yield href, etag 135 | 136 | async def get(self, href) -> tuple[Item, str]: 137 | if self._items is None or not self._at_once: 138 | async for _ in self.list(): 139 | pass 140 | 141 | assert self._items is not None # type assertion 142 | try: 143 | return self._items[href] 144 | except KeyError: 145 | raise exceptions.NotFoundError(href) 146 | 147 | async def get_multi(self, hrefs: Iterable[str]): 148 | async with self.at_once(): 149 | for href in uniq(hrefs): 150 | item, etag = await self.get(href) 151 | yield href, item, etag 152 | 153 | @_writing_op 154 | async def upload(self, item): 155 | href = item.ident 156 | if href in self._items: 157 | raise exceptions.AlreadyExistingError(existing_href=href) 158 | 159 | self._items[href] = item, item.hash 160 | return href, item.hash 161 | 162 | @_writing_op 163 | async def update(self, href, item, etag): 164 | if href not in self._items: 165 | raise exceptions.NotFoundError(href) 166 | 167 | _, actual_etag = self._items[href] 168 | if etag != actual_etag: 169 | raise exceptions.WrongEtagError(etag, actual_etag) 170 | 171 | self._items[href] = item, item.hash 172 | return item.hash 173 | 174 | @_writing_op 175 | async def delete(self, href, etag): 176 | if href not in self._items: 177 | raise exceptions.NotFoundError(href) 178 | 179 | _, actual_etag = self._items[href] 180 | if etag != actual_etag: 181 | raise exceptions.WrongEtagError(etag, actual_etag) 182 | 183 | del self._items[href] 184 | 185 | def _write(self): 186 | if self._last_etag is not None and self._last_etag != get_etag_from_file( 187 | self.path 188 | ): 189 | raise exceptions.PreconditionFailed( 190 | f"Some other program modified the file {self.path!r}. Re-run the " 191 | "synchronization and make sure absolutely no other program is " 192 | "writing into the same file." 193 | ) 194 | text = join_collection(item.raw for item, etag in self._items.values()) 195 | try: 196 | with atomic_write(self.path, mode="wb", overwrite=True) as f: 197 | f.write(text.encode(self.encoding)) 198 | finally: 199 | self._items = None 200 | self._last_etag = None 201 | 202 | @contextlib.asynccontextmanager 203 | async def at_once(self): 204 | async for _ in self.list(): 205 | pass 206 | self._at_once = True 207 | try: 208 | yield self 209 | self._write() 210 | finally: 211 | self._at_once = False 212 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vdirsyncer.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vdirsyncer.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/vdirsyncer" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vdirsyncer" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /vdirsyncer/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import functools 5 | import os 6 | import sys 7 | import tempfile 8 | import uuid 9 | from inspect import getfullargspec 10 | from typing import Callable 11 | 12 | from . import exceptions 13 | 14 | # This is only a subset of the chars allowed per the spec. In particular `@` is 15 | # not included, because there are some servers that (incorrectly) encode it to 16 | # `%40` when it's part of a URL path, and reject or "repair" URLs that contain 17 | # `@` in the path. So it's better to just avoid it. 18 | SAFE_UID_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+" 19 | 20 | 21 | _missing = object() 22 | 23 | 24 | def expand_path(p: str) -> str: 25 | """Expand $HOME in a path and normalise slashes.""" 26 | p = os.path.expanduser(p) 27 | return os.path.normpath(p) 28 | 29 | 30 | def split_dict(d: dict, f: Callable): 31 | """Puts key into first dict if f(key), otherwise in second dict""" 32 | a = {} 33 | b = {} 34 | for k, v in d.items(): 35 | if f(k): 36 | a[k] = v 37 | else: 38 | b[k] = v 39 | return a, b 40 | 41 | 42 | def uniq(s): 43 | """Filter duplicates while preserving order. ``set`` can almost always be 44 | used instead of this, but preserving order might prove useful for 45 | debugging.""" 46 | d = set() 47 | for x in s: 48 | if x not in d: 49 | d.add(x) 50 | yield x 51 | 52 | 53 | def get_etag_from_file(f): 54 | """Get etag from a filepath or file-like object. 55 | 56 | This function will flush/sync the file as much as necessary to obtain a 57 | correct value. 58 | """ 59 | if hasattr(f, "read"): 60 | f.flush() # Only this is necessary on Linux 61 | if sys.platform == "win32": 62 | os.fsync(f.fileno()) # Apparently necessary on Windows 63 | stat = os.fstat(f.fileno()) 64 | else: 65 | stat = os.stat(f) 66 | 67 | mtime = getattr(stat, "st_mtime_ns", None) 68 | if mtime is None: 69 | mtime = stat.st_mtime 70 | return f"{mtime:.9f};{stat.st_ino}" 71 | 72 | 73 | def get_storage_init_specs(cls, stop_at=object): 74 | if cls is stop_at: 75 | return () 76 | 77 | spec = getfullargspec(cls.__init__) 78 | traverse_superclass = getattr(cls.__init__, "_traverse_superclass", True) 79 | if traverse_superclass: 80 | if traverse_superclass is True: 81 | supercls = next( 82 | getattr(x.__init__, "__objclass__", x) for x in cls.__mro__[1:] 83 | ) 84 | else: 85 | supercls = traverse_superclass 86 | superspecs = get_storage_init_specs(supercls, stop_at=stop_at) 87 | else: 88 | superspecs = () 89 | 90 | return (spec, *superspecs) 91 | 92 | 93 | def get_storage_init_args(cls, stop_at=object): 94 | """ 95 | Get args which are taken during class initialization. Assumes that all 96 | classes' __init__ calls super().__init__ with the rest of the arguments. 97 | 98 | :param cls: The class to inspect. 99 | :returns: (all, required), where ``all`` is a set of all arguments the 100 | class can take, and ``required`` is the subset of arguments the class 101 | requires. 102 | """ 103 | all, required = set(), set() 104 | for spec in get_storage_init_specs(cls, stop_at=stop_at): 105 | all.update(spec.args[1:]) 106 | last = -len(spec.defaults) if spec.defaults else len(spec.args) 107 | required.update(spec.args[1:last]) 108 | 109 | return all, required 110 | 111 | 112 | def checkdir(path: str, create: bool = False, mode: int = 0o750) -> None: 113 | """Check whether ``path`` is a directory. 114 | 115 | :param create: Whether to create the directory (and all parent directories) 116 | if it does not exist. 117 | :param mode: Mode to create missing directories with. 118 | """ 119 | 120 | if not os.path.isdir(path): 121 | if os.path.exists(path): 122 | raise OSError(f"{path} is not a directory.") 123 | if create: 124 | os.makedirs(path, mode) 125 | else: 126 | raise exceptions.CollectionNotFound(f"Directory {path} does not exist.") 127 | 128 | 129 | def checkfile(path, create=False) -> None: 130 | """Check whether ``path`` is a file. 131 | 132 | :param create: Whether to create the file's parent directories if they do 133 | not exist. 134 | :raises CollectionNotFound: if path does not exist. 135 | :raises OSError: if path exists but is not a file. 136 | """ 137 | checkdir(os.path.dirname(path), create=create) 138 | if not os.path.isfile(path): 139 | if os.path.exists(path): 140 | raise OSError(f"{path} is not a file.") 141 | if create: 142 | with open(path, "wb"): 143 | pass 144 | else: 145 | raise exceptions.CollectionNotFound(f"File {path} does not exist.") 146 | 147 | 148 | def href_safe(ident, safe=SAFE_UID_CHARS): 149 | return not bool(set(ident) - set(safe)) 150 | 151 | 152 | def generate_href(ident=None, safe=SAFE_UID_CHARS): 153 | """ 154 | Generate a safe identifier, suitable for URLs, storage hrefs or UIDs. 155 | 156 | If the given ident string is safe, it will be returned, otherwise a random 157 | UUID. 158 | """ 159 | if not ident or not href_safe(ident, safe): 160 | return str(uuid.uuid4()) 161 | return ident 162 | 163 | 164 | def synchronized(lock=None): 165 | if lock is None: 166 | from threading import Lock 167 | 168 | lock = Lock() 169 | 170 | def inner(f): 171 | @functools.wraps(f) 172 | def wrapper(*args, **kwargs): 173 | with lock: 174 | return f(*args, **kwargs) 175 | 176 | return wrapper 177 | 178 | return inner 179 | 180 | 181 | def open_graphical_browser(url, new=0, autoraise=True): 182 | """Open a graphical web browser. 183 | 184 | This is basically like `webbrowser.open`, but without trying to launch CLI 185 | browsers at all. We're excluding those since it's undesirable to launch 186 | those when you're using vdirsyncer on a server. Rather copypaste the URL 187 | into the local browser, or use the URL-yanking features of your terminal 188 | emulator. 189 | """ 190 | import webbrowser 191 | 192 | cli_names = {"www-browser", "links", "links2", "elinks", "lynx", "w3m"} 193 | 194 | if webbrowser._tryorder is None: # Python 3.8 195 | webbrowser.register_standard_browsers() 196 | 197 | for name in webbrowser._tryorder: 198 | if name in cli_names: 199 | continue 200 | 201 | browser = webbrowser.get(name) 202 | if browser.open(url, new, autoraise): 203 | return 204 | 205 | raise RuntimeError("No graphical browser found. Please open the URL manually.") 206 | 207 | 208 | @contextlib.contextmanager 209 | def atomic_write(dest, mode="wb", overwrite=False): 210 | if "w" not in mode: 211 | raise RuntimeError("`atomic_write` requires write access") 212 | 213 | fd, src = tempfile.mkstemp(prefix=os.path.basename(dest), dir=os.path.dirname(dest)) 214 | file = os.fdopen(fd, mode=mode) 215 | 216 | try: 217 | yield file 218 | except Exception: 219 | os.unlink(src) 220 | raise 221 | else: 222 | file.flush() 223 | file.close() 224 | 225 | if overwrite: 226 | os.rename(src, dest) 227 | else: 228 | os.link(src, dest) 229 | os.unlink(src) 230 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\vdirsyncer.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\vdirsyncer.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | --------------------------------------------------------------------------------