├── .flake8 ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .isort.cfg ├── .travis.yml ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── contrib ├── pantalaimon.conf └── pantalaimon.service ├── docs ├── man │ ├── panctl.1 │ ├── panctl.md │ ├── pantalaimon.5 │ ├── pantalaimon.5.md │ ├── pantalaimon.8 │ └── pantalaimon.8.md └── pan.gif ├── pantalaimon ├── __init__.py ├── client.py ├── config.py ├── daemon.py ├── index.py ├── log.py ├── main.py ├── panctl.py ├── store.py ├── thread_messages.py └── ui.py ├── setup.py ├── test-requirements.txt ├── tests ├── conftest.py ├── data │ └── sync.json ├── pan_client_test.py ├── proxy_test.py └── store_test.py └── tox.ini /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 80 3 | select = C,E,F,W,B,B950 4 | ignore = E501,W503 5 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Build Status 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | strategy: 10 | matrix: 11 | python-version: ['3.8', '3.9', '3.10'] 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v2 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | - name: Install Tox and any other packages 19 | run: | 20 | wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2 21 | tar -xvf olm-master.tar.bz2 22 | pushd olm-master && make && sudo make PREFIX="/usr" install && popd 23 | rm -r olm-master 24 | pip install tox 25 | - name: Run Tox 26 | run: tox -e py 27 | 28 | coverage: 29 | runs-on: ubuntu-latest 30 | 31 | steps: 32 | - uses: actions/checkout@v2 33 | - name: Setup Python 34 | uses: actions/setup-python@v2 35 | with: 36 | python-version: "3.10" 37 | - name: Install Tox and any other packages 38 | run: | 39 | wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2 40 | tar -xvf olm-master.tar.bz2 41 | pushd olm-master && make && sudo make PREFIX="/usr" install && popd 42 | rm -r olm-master 43 | pip install tox 44 | - name: Run Tox 45 | run: tox -e coverage 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | dist 3 | pantalaimon.egg-info/ 4 | .tox 5 | .mypy_cache 6 | .coverage 7 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | multi_line_output=3 3 | include_trailing_comma=True 4 | force_grid_wrap=0 5 | use_parentheses=True 6 | line_length=88 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: xenial 3 | sudo: false 4 | 5 | addons: 6 | apt: 7 | packages: 8 | - libdbus-1-dev 9 | - libgirepository1.0-dev 10 | 11 | before_install: 12 | - wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2 13 | - tar -xvf olm-master.tar.bz2 14 | - pushd olm-master && make && sudo make PREFIX="/usr" install && popd 15 | - rm -r olm-master 16 | 17 | matrix: 18 | include: 19 | - python: 3.8 20 | env: TOXENV=py38 21 | - python: 3.9 22 | env: TOXENV=py39 23 | - python: 3.9 24 | env: TOXENV=coverage 25 | 26 | install: pip install tox-travis aioresponses 27 | script: tox 28 | 29 | after_success: 30 | - bash <(curl -s https://codecov.io/bash) 31 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## 0.10.5 2022-09-28 8 | 9 | ### Added 10 | 11 | - [[#137]] Proxy the v3 endpoints as well 12 | 13 | ### Fixed 14 | 15 | - [[#130]] Make sure the token variable is declared 16 | 17 | [#137]: https://github.com/matrix-org/pantalaimon/pull/137 18 | [#130]: https://github.com/matrix-org/pantalaimon/pull/130 19 | 20 | ## 0.10.4 2022-02-04 21 | 22 | ### Fixed 23 | 24 | - [[#122]] Fix the GLib import for panctl on some distributions 25 | - [[#120]] Don't use strip to filter Bearer from the auth header 26 | - [[#118]] Don't use the raw path if we need to sanitize filters, fixing room 27 | history fetching for Fractal 28 | 29 | [#122]: https://github.com/matrix-org/pantalaimon/pull/122 30 | [#120]: https://github.com/matrix-org/pantalaimon/pull/120 31 | [#118]: https://github.com/matrix-org/pantalaimon/pull/118 32 | 33 | ## 0.10.3 2021-09-02 34 | 35 | ### Fixed 36 | 37 | - [[#105]] Use the raw_path when forwarding requests, avoiding URL 38 | decoding/encoding issues. 39 | 40 | [#105]: https://github.com/matrix-org/pantalaimon/pull/105 41 | 42 | 43 | ## 0.10.2 2021-07-14 44 | 45 | ### Fixed 46 | 47 | - [[#103]] Prevent E2EE downgrade on failed syncs 48 | 49 | [#103]: https://github.com/matrix-org/pantalaimon/pull/103 50 | 51 | 52 | ## 0.10.1 2021-07-06 53 | 54 | ### Fixed 55 | 56 | - [[#100]] Don't require the rooms dicts in the sync response 57 | - [[#99]] Thumbnails not generating for media uploaded in unencrypted rooms 58 | whole LRU cache when it shouldn't 59 | 60 | [#100]: https://github.com/matrix-org/pantalaimon/pull/100 61 | [#99]: https://github.com/matrix-org/pantalaimon/pull/99 62 | 63 | 64 | ## 0.10.0 2021-05-14 65 | 66 | ### Added 67 | 68 | - [[#98]] Add the ability to remove old room keys 69 | - [[#95]] Encrypt thumbnails uploaded by a client 70 | 71 | ### Fixed 72 | 73 | - [[#96]] Split out the media cache loading logic to avoid returning the 74 | whole LRU cache when it shouldn't 75 | 76 | [#98]: https://github.com/matrix-org/pantalaimon/pull/98 77 | [#96]: https://github.com/matrix-org/pantalaimon/pull/96 78 | [#95]: https://github.com/matrix-org/pantalaimon/pull/95 79 | 80 | ## 0.9.3 2021-05-14 81 | 82 | ### Changed 83 | 84 | - [[#73f68c7]] Bump the allowed nio version 85 | 86 | [73f68c7]: https://github.com/matrix-org/pantalaimon/commit/73f68c76fb05037bd7fe71688ce39eb1f526a385 87 | 88 | ## 0.9.2 2021-03-10 89 | 90 | ### Changed 91 | 92 | - [[#89]] Bump the allowed nio version 93 | 94 | [#89]: https://github.com/matrix-org/pantalaimon/pull/89 95 | 96 | ## 0.9.1 2021-01-19 97 | 98 | ### Changed 99 | 100 | - [[3baae08]] Bump the allowed nio version 101 | 102 | [3baae08]: https://github.com/matrix-org/pantalaimon/commit/3baae08ac36e258632e224b655e177a765a939f3 103 | 104 | ## 0.9.0 2021-01-19 105 | 106 | ### Fixed 107 | 108 | - [[59051c5]] Fix the notification initialization allowing the DBUS thread to 109 | start again 110 | 111 | ### Added 112 | 113 | - [[#79]] Support media uploads, thanks to @aspacca 114 | 115 | [59051c5]: https://github.com/matrix-org/pantalaimon/commit/59051c530a343a6887ea0f9ccddd6f6964f6d923 116 | [#79]: https://github.com/matrix-org/pantalaimon/pull/79 117 | 118 | ## 0.8.0 2020-09-30 119 | 120 | ### Changed 121 | 122 | - [[#69]] If no password is provided to /login, the daemon will re-use the original login response. 123 | 124 | [#69]: https://github.com/matrix-org/pantalaimon/pull/69 125 | 126 | ## 0.7.0 2020-09-02 127 | 128 | ### Fixed 129 | 130 | - [[#60]] Sanitize the GET /rooms/{room_id}/messages filters as well. 131 | - [[#62]] Store media info when decrypting instead of using a event callback. 132 | 133 | ### Changed 134 | 135 | - [[d425e2d]] Increase the max POST size. 136 | 137 | [#62]: https://github.com/matrix-org/pantalaimon/pull/62 138 | [#60]: https://github.com/matrix-org/pantalaimon/pull/60 139 | [d425e2d]: https://github.com/matrix-org/pantalaimon/commit/d425e2d188aed32c3fe87cac210c0943fd51b085 140 | 141 | ## 0.6.5 2020-07-02 142 | 143 | ### Fixed 144 | 145 | - [[a1ce950]] Allow to send messages using a POST request since Synapse seems to 146 | allow it. 147 | 148 | [a1ce950]: https://github.com/matrix-org/pantalaimon/commit/a1ce95076ecd80c880028691feeced8d28cacad9 149 | 150 | ## 0.6.4 2020-06-21 151 | 152 | ### Changed 153 | - Bump the maximal supported nio version. 154 | 155 | ## 0.6.3 2020-05-28 156 | 157 | ### Fixed 158 | - Fix our dep requirements to avoid incompatibilities between nio and pantalaimon. 159 | 160 | ## 0.6.2 2020-05-27 161 | 162 | ### Fixed 163 | - Don't require exact patch versions for our deps. 164 | 165 | ## 0.6.1 2020-05-12 166 | 167 | ### Fixed 168 | - Bump the version to trigger a docker hub build with the latest nio release. 169 | 170 | ## 0.6.0 2020-05-10 171 | 172 | ### Added 173 | - Add support for Janus 0.5.0. 174 | - Added media endpoint handling to the /media/v1 path. 175 | 176 | ### Fixed 177 | - Modify media events so they contain the unencrypted URL fields as well. 178 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8-slim-buster AS builder 2 | 3 | RUN mkdir -p /app 4 | RUN apt-get update && apt-get install -y git gcc clang cmake g++ pkg-config python3-dev wget 5 | 6 | WORKDIR /app 7 | RUN wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2 \ 8 | && tar -xvf olm-master.tar.bz2 \ 9 | && cd olm-master && make && make PREFIX="/usr" install 10 | 11 | RUN pip --no-cache-dir install --upgrade pip setuptools wheel 12 | 13 | COPY . /app 14 | 15 | RUN pip wheel . --wheel-dir /wheels --find-links /wheels 16 | 17 | FROM python:3.8-slim-buster AS run 18 | 19 | COPY --from=builder /usr/lib/libolm* /usr/lib/ 20 | COPY --from=builder /wheels /wheels 21 | WORKDIR /app 22 | 23 | RUN pip --no-cache-dir install --find-links /wheels --no-index pantalaimon 24 | 25 | VOLUME /data 26 | ENTRYPOINT ["pantalaimon"] 27 | CMD ["-c", "/data/pantalaimon.conf", "--data-path", "/data"] 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include contrib/pantalaimon.service 2 | include doc/man/pantalaimon.8 3 | include doc/man/pantalaimon.5 4 | include doc/man/panctl.1 5 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | test: 2 | python3 -m pytest 3 | python3 -m pytest --black pantalaimon 4 | python3 -m pytest --flake8 pantalaimon 5 | 6 | coverage: 7 | python3 -m pytest --cov=pantalaimon --cov-report term-missing 8 | 9 | typecheck: 10 | mypy --ignore-missing-imports pantalaimon 11 | 12 | run-local: 13 | python -m pantalaimon.main --log-level debug --config ./contrib/pantalaimon.conf 14 | 15 | isort: 16 | isort -y -p pantalaimon 17 | 18 | format: 19 | black pantalaimon/ 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | pantalaimon 2 | =========== 3 | 4 | Pantalaimon is an end-to-end encryption aware Matrix reverse proxy daemon. 5 | Pantalaimon acts as a good man in the middle that handles the encryption for you. 6 | 7 | Messages are transparently encrypted and decrypted for clients inside of 8 | pantalaimon. 9 | 10 | ![Pantalaimon in action](docs/pan.gif) 11 | 12 | Installation 13 | ============ 14 | 15 | The [Olm](https://gitlab.matrix.org/matrix-org/olm) C library is required to 16 | be installed before installing pantalaimon. 17 | 18 | If your distribution provides packages for libolm it is best to use those, note 19 | that a recent version of libolm is required (3.1+). If your distribution doesn't 20 | provide a package building from source is required. Please refer to the Olm 21 | [readme](https://gitlab.matrix.org/matrix-org/olm/blob/master/README.md) 22 | to see how to build the C library from source. 23 | 24 | Installing pantalaimon works like usually with python packages: 25 | 26 | python setup.py install 27 | 28 | or you can use `pip` and install it with: 29 | ``` 30 | pip install .[ui] 31 | ``` 32 | 33 | It is recommended that you create a virtual environment first or install dependencies 34 | via your package manager. They are usually found with `python-`. 35 | 36 | Pantalaimon can also be found on pypi: 37 | 38 | pip install pantalaimon 39 | 40 | Pantalaimon contains a dbus based UI that can be used to control the daemon. 41 | The dbus based UI is completely optional and needs to be installed with the 42 | daemon: 43 | 44 | pip install pantalaimon[ui] 45 | 46 | Do note that man pages can't be installed with pip. 47 | 48 | ### macOS installation 49 | 50 | For instance, on macOS, this means: 51 | 52 | ```bash 53 | brew install dbus 54 | perl -pi -e's#(EXTERNAL)##' $(brew --prefix dbus)/share/dbus-1/session.conf 55 | brew services start dbus 56 | # it may be necessary to restart now to get the whole OS to pick up the 57 | # existence of the dbus daemon 58 | 59 | git clone https://gitlab.matrix.org/matrix-org/olm 60 | (cd olm; make) 61 | git clone https://github.com/matrix-org/pantalaimon 62 | (cd pantalaimon; CFLAGS=-I../olm/include LDFLAGS=-L../olm/build/ python3 setup.py install) 63 | 64 | export DBUS_SESSION_BUS_ADDRESS=unix:path=$(launchctl getenv DBUS_LAUNCHD_SESSION_BUS_SOCKET) 65 | cd pantalaimon 66 | DYLD_LIBRARY_PATH=../olm/build/ pantalaimon -c contrib/pantalaimon.conf 67 | 68 | # for notification center: 69 | git clone https://github.com/fakechris/notification-daemon-mac-py 70 | # if you have django's `foundation` library installed and your filesystem 71 | # is case insensitive (the default) then you will need to `pip uninstall foundation` 72 | # or install PyObjC in a venv... 73 | pip install PyObjC daemon glib dbus-python 74 | cd notification-daemon-mac-py 75 | ./notify.py 76 | ``` 77 | 78 | ### Docker 79 | 80 | An experimental Docker image can be built for Pantalaimon, primarily for use in bots. 81 | 82 | ```bash 83 | docker build -t pantalaimon . 84 | # Create a pantalaimon.conf before running. The directory mentioned in the 85 | # volume below is for where Pantalaimon should dump some data. 86 | docker run -it --rm -v /path/to/pantalaimon/dir:/data -p 8008:8008 pantalaimon 87 | ``` 88 | The Docker image in the above example can alternatively be built straight from any branch or tag without the need to clone the repo, just by using this syntax: 89 | ```bash 90 | docker build -t pantalaimon github.com/matrix-org/pantalaimon#master 91 | ``` 92 | 93 | An example `pantalaimon.conf` for Docker is: 94 | ```conf 95 | [Default] 96 | LogLevel = Debug 97 | SSL = True 98 | 99 | [local-matrix] 100 | Homeserver = https://matrix.org 101 | ListenAddress = 0.0.0.0 102 | ListenPort = 8008 103 | SSL = False 104 | UseKeyring = False 105 | IgnoreVerification = True 106 | ``` 107 | 108 | Usage 109 | ===== 110 | 111 | While pantalaimon is a daemon, it is meant to be run as the same user as the app it is proxying for. It won't 112 | verify devices for you automatically, unless configured to do so, and requires 113 | user interaction to verify, ignore or blacklist devices. A more complete 114 | description of Pantalaimon can be found in the [man page](docs/man/pantalaimon.8.md). 115 | 116 | Pantalaimon requires a configuration file to run. The configuration file 117 | specifies one or more homeservers for pantalaimon to connect to. 118 | 119 | A minimal pantalaimon configuration looks like this: 120 | ```dosini 121 | [local-matrix] 122 | Homeserver = https://localhost:443 123 | ListenAddress = localhost 124 | ListenPort = 8009 125 | ``` 126 | 127 | The configuration file should be placed in `~/.config/pantalaimon/pantalaimon.conf`. 128 | 129 | The full documentation for the pantalaimons configuration can be found in 130 | the [man page](docs/man/pantalaimon.5.md) `pantalaimon(5)`. 131 | 132 | Now that pantalaimon is configured it can be run: 133 | 134 | pantalaimon --log-level debug 135 | 136 | After running the daemon, configure your client to connect to the daemon instead 137 | of your homeserver. The daemon listens by default on localhost and port 8009. 138 | 139 | Note that logging in to the daemon is required to start a sync loop for a user. 140 | After that clients can connect using any valid access token for the user that 141 | logged in. Multiple users per homeserver are supported. 142 | 143 | For convenience a systemd service file is provided. 144 | 145 | To control the daemon an interactive utility is provided in the form of 146 | `panctl`. 147 | 148 | `panctl` can be used to verify, blacklist or ignore devices, import or export 149 | session keys, or to introspect devices of users that we share encrypted rooms 150 | with. 151 | 152 | ### Setup 153 | This is all coming from an excellent comment that you can find [here](https://github.com/matrix-org/pantalaimon/issues/154#issuecomment-1951591191). 154 | 155 | 156 | 157 | 1) Ensure you have an OS keyring installed. In my case I installed `gnome-keyring`. You may also want a GUI like `seahorse` to inspect the keyring. (pantalaimon will work without a keyring but your client will have to log in with the password every time `pantalaimon` is restarted, instead of being able to reuse the access token from the previous successful login.) 158 | 159 | 2) In case you have prior attempts, clean the slate by deleting the `~/.local/share/pantalaimon` directory. 160 | 161 | 3) Start `pantalaimon`. 162 | 163 | 4) Connect a client to the `ListenAddress:ListenPort` you specified in `pantalaimon.conf`, eg to `127.0.0.1:8009`, using the same username and password you would've used to login to your homeserver directly. 164 | 165 | 5) The login should succeed, but at this point all encrypted messages will fail to decrypt. This is fine. 166 | 167 | 6) Start another client that you were already using for your encrypted chats previously. In my case this was `app.element.io`, so the rest of the steps here assume that. 168 | 169 | 7) Run `panctl`. At the prompt, run `start-verification `. `` here is the full user ID like `@arnavion:arnavion.dev`. If you only have the one Element session, `panctl` will show you the device ID as an autocomplete hint so you don't have to look it up. If you do need to look it up, go to Element -> profile icon -> All Settings -> Sessions, expand the "Current session" item, and the "Session ID" is the device ID. 170 | 171 | 8) In Element you will see a popup "Incoming Verification Request". Click "Continue". It will change to a popup containing some emojis, and `panctl` will print the same emojis. Click the "They match" button. It will now change to a popup like "Waiting for other client to confirm..." 172 | 173 | 9) In `panctl`, run `confirm-verification `, ie the same command as before but with `confirm-verification` instead of `start-verification`. 174 | 175 | 10) At this point, if you look at all your sessions in Element (profile icon -> All Settings -> Sessions), you should see "pantalaimon" in the "Other sessions" list as a "Verified" session. 176 | 177 | 11) Export the E2E room keys that Element was using via profile icon -> Security & Privacy -> Export E2E room keys. Pick any password and then save the file to some path. 178 | 179 | 12) Back in `panctl`, run `import-keys `. After a few seconds, in the output of `pantalaimon`, you should see a log like `INFO: pantalaimon: Successfully imported keys for from `. 180 | 181 | 13) Close and restart the client you had used in step 5, ie the one you want to connect to `pantalaimon`. Now, finally, you should be able to see the encrypted chats be decrypted. 182 | 183 | 14) Delete the E2E room keys backup file from step 12. You don't need it any more. 184 | 185 | 186 | 15) If in step 11 you had other unverified sessions from pantalaimon from your prior attempts, you can sign out of them too. 187 | 188 | You will probably have to repeat steps 11-14 any time you start a new encrypted chat in Element. 189 | -------------------------------------------------------------------------------- /contrib/pantalaimon.conf: -------------------------------------------------------------------------------- 1 | [Default] 2 | LogLevel = Debug 3 | SSL = True 4 | Notifications = On 5 | DebugEncryption = True 6 | 7 | [local-matrix] 8 | Homeserver = http://localhost:8008 9 | ListenAddress = localhost 10 | ListenPort = 8009 11 | Proxy = http://localhost:8080 12 | SSL = False 13 | IgnoreVerification = False 14 | UseKeyring = True 15 | DropOldKeys = False 16 | -------------------------------------------------------------------------------- /contrib/pantalaimon.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Pantalaimon E2E Matrix reverse proxy 3 | 4 | [Service] 5 | ExecStart=/usr/bin/pantalaimon 6 | 7 | [Install] 8 | WantedBy=default.target 9 | -------------------------------------------------------------------------------- /docs/man/panctl.1: -------------------------------------------------------------------------------- 1 | .Dd May 23, 2019 2 | .Dt PANCTL 1 3 | .Os 4 | .\" --------------------------------------------------------------------------- 5 | .Sh NAME 6 | .Nm panctl 7 | .Nd Control the Matrix reverse proxy daemon pantalaimon. 8 | .\" --------------------------------------------------------------------------- 9 | .Sh DESCRIPTION 10 | .Nm 11 | is a small utility to control and introspect the state of pantalaimon. 12 | .Pp 13 | .Ss Commands 14 | The commands accepted by 15 | .Nm 16 | are as follows: 17 | .Bl -tag -width 3n 18 | .It Cm list-servers 19 | List the configured homeservers and pan users on each homeserver. 20 | .It Cm list-devices Ar pan-user Ar user-id 21 | List the devices of a user that are known to the 22 | .Ar pan-user . 23 | .It Cm start-verification Ar pan-user Ar user-id 24 | Start an interactive key verification between the given pan-user and user. 25 | .It Cm accept-verification Ar pan-user Ar user-id 26 | Accept an interactive key verification that the given user has started with our 27 | given pan-user. 28 | .It Cm cancel-verification Ar pan-user Ar user-id 29 | Cancel an interactive key verification between the given pan-user and user. 30 | .It Cm confirm-verification Ar pan-user Ar user-id 31 | Confirm that the short authentication string of the interactive key verification 32 | with the given pan-user and user is matching. 33 | .It Cm verify-device Ar pan-user Ar user-id Ar device-id 34 | Manually mark the given device as verified. The device will be marked as verified 35 | only for the given pan-user. 36 | .It Cm unverify-device Ar pan-user Ar user-id Ar device-id 37 | Mark a previously verified device of the given user as unverified. 38 | .It Cm blacklist-device Ar pan-user Ar user-id Ar device-id 39 | Manually mark the given device of the given user as blacklisted. 40 | .It Cm unblacklist-device Ar pan-user Ar user-id Ar device-id 41 | Mark a previously blacklisted device of the given user as unblacklisted. 42 | .It Cm send-anyways Ar pan-user Ar room-id 43 | If a encrypted room contains unverified devices and a connected Matrix client 44 | tries to send an message to such a room 45 | .Nm pantalaimon 46 | will send a notification that the room contains unverified users. Using this 47 | command the user can choose to mark all unverified devices as ignored. Ignored 48 | devices will receive encryption keys but will be left marked as unverified. 49 | The message will be sent away after all devices are marked as ignored. 50 | .It Cm cancel-sending Ar pan-user Ar room-id 51 | In contrast to the 52 | .Cm send-anyways 53 | command this command cancels the sending of a message to an encrypted room with 54 | unverified devices and gives the user the opportunity to verify or blacklist 55 | devices as they see fit. 56 | .It Cm import-keys Ar pan-user Ar file Ar passphrase 57 | Import end-to-end encryption keys from the given file for the given pan-user. 58 | .It Cm export-keys Ar pan-user Ar file Ar passphrase 59 | Export end-to-end encryption keys to the given file for the given pan-user. The 60 | provided passphrase is used to encrypt the file containing the keys. 61 | .El 62 | .\" --------------------------------------------------------------------------- 63 | .Sh EXIT STATUS 64 | .Ex -std 65 | .\" --------------------------------------------------------------------------- 66 | .Sh SEE ALSO 67 | .Xr pantalaimon 8 68 | .Xr pantalaimon 5 69 | .\" --------------------------------------------------------------------------- 70 | .Sh AUTHORS 71 | .Nm 72 | was written by 73 | .An Damir Jelić Aq Mt poljar@termina.org.uk . 74 | -------------------------------------------------------------------------------- /docs/man/panctl.md: -------------------------------------------------------------------------------- 1 | PANCTL(1) - General Commands Manual 2 | 3 | # NAME 4 | 5 | **panctl** - Control the Matrix reverse proxy daemon pantalaimon. 6 | 7 | # DESCRIPTION 8 | 9 | **panctl** 10 | is a small utility to control and introspect the state of pantalaimon. 11 | 12 | ## Commands 13 | 14 | The commands accepted by 15 | **panctl** 16 | are as follows: 17 | 18 | **list-servers** 19 | 20 | > List the configured homeservers and pan users on each homeserver. 21 | 22 | **list-devices** *pan-user* *user-id* 23 | 24 | > List the devices of a user that are known to the 25 | > *pan-user*. 26 | 27 | **start-verification** *pan-user* *user-id* 28 | 29 | > Start an interactive key verification between the given pan-user and user. 30 | 31 | **accept-verification** *pan-user* *user-id* 32 | 33 | > Accept an interactive key verification that the given user has started with our 34 | > given pan-user. 35 | 36 | **cancel-verification** *pan-user* *user-id* 37 | 38 | > Cancel an interactive key verification between the given pan-user and user. 39 | 40 | **confirm-verification** *pan-user* *user-id* 41 | 42 | > Confirm that the short authentication string of the interactive key verification 43 | > with the given pan-user and user is matching. 44 | 45 | **verify-device** *pan-user* *user-id* *device-id* 46 | 47 | > Manually mark the given device as verified. The device will be marked as verified 48 | > only for the given pan-user. 49 | 50 | **unverify-device** *pan-user* *user-id* *device-id* 51 | 52 | > Mark a previously verified device of the given user as unverified. 53 | 54 | **blacklist-device** *pan-user* *user-id* *device-id* 55 | 56 | > Manually mark the given device of the given user as blacklisted. 57 | 58 | **unblacklist-device** *pan-user* *user-id* *device-id* 59 | 60 | > Mark a previously blacklisted device of the given user as unblacklisted. 61 | 62 | **send-anyways** *pan-user* *room-id* 63 | 64 | > If a encrypted room contains unverified devices and a connected Matrix client 65 | > tries to send an message to such a room 66 | > **pantalaimon** 67 | > will send a notification that the room contains unverified users. Using this 68 | > command the user can choose to mark all unverified devices as ignored. Ignored 69 | > devices will receive encryption keys but will be left marked as unverified. 70 | > The message will be sent away after all devices are marked as ignored. 71 | 72 | **cancel-sending** *pan-user* *room-id* 73 | 74 | > In contrast to the 75 | > **send-anyways** 76 | > command this command cancels the sending of a message to an encrypted room with 77 | > unverified devices and gives the user the opportunity to verify or blacklist 78 | > devices as they see fit. 79 | 80 | **import-keys** *pan-user* *file* *passphrase* 81 | 82 | > Import end-to-end encryption keys from the given file for the given pan-user. 83 | 84 | **export-keys** *pan-user* *file* *passphrase* 85 | 86 | > Export end-to-end encryption keys to the given file for the given pan-user. The 87 | > provided passphrase is used to encrypt the file containing the keys. 88 | 89 | # EXIT STATUS 90 | 91 | The **panctl** utility exits 0 on success, and >0 if an error occurs. 92 | 93 | # SEE ALSO 94 | 95 | pantalaimon(8) 96 | pantalaimon(5) 97 | 98 | # AUTHORS 99 | 100 | **panctl** 101 | was written by 102 | Damir Jelić <[poljar@termina.org.uk](mailto:poljar@termina.org.uk)>. 103 | 104 | Linux 5.1.3-arch2-1-ARCH - May 23, 2019 105 | -------------------------------------------------------------------------------- /docs/man/pantalaimon.5: -------------------------------------------------------------------------------- 1 | .Dd May 08, 2019 2 | .Dt PANTALAIMON.CONF 5 3 | .Os 4 | .\" --------------------------------------------------------------------------- 5 | .Sh NAME 6 | .Nm pantalaimon.conf 7 | .Nd pantalaimon configuration file 8 | .\" --------------------------------------------------------------------------- 9 | .Sh DESCRIPTION 10 | .Xr pantalaimon 1 reads configuration data in the INI file format. 11 | The configuration file is used to configure 12 | .Nm pantalaimon 13 | homeservers. 14 | .Pp 15 | The sections inside the configuration file represent a pantalaimon proxy 16 | instance with the section name enclosed in square brackets representing an user 17 | chosen instance name. 18 | .Pp 19 | The following keys are required in the proxy instance sections: 20 | .Bl -tag -width 3n 21 | .It Cm Homeserver 22 | The URI of the homeserver that the pantalaimon proxy should forward requests to, 23 | without the matrix API path but including the http(s) schema. 24 | .El 25 | .Pp 26 | The following keys are optional in the proxy instance sections: 27 | .Bl -tag -width 3n 28 | .It Cm ListenAddress 29 | The address where the daemon will listen to client connections for this 30 | homeserver. Defaults to "localhost". 31 | .It Cm ListenPort 32 | The port where the daemon will listen to client connections for this 33 | homeserver. Note that the listen address/port combination needs to be unique 34 | between different homeservers. Defaults to "8009". 35 | .It Cm Proxy 36 | An URI of a HTTP proxy that the daemon should use when making requests to the 37 | homeserver. 38 | .Nm pantalaimon 39 | only supports HTTP proxies. The default is to make a direct connection to the 40 | homeserver. 41 | .It Cm SSL 42 | A boolean that decides if SSL verification should be enabled for outgoing 43 | connections to the homeserver. Defaults to "True". 44 | .It Cm IgnoreVerification 45 | A boolean that decides if device verification should be enabled. If this is True 46 | devices will be marked as ignored automatically and encryption keys will be 47 | shared with them, if this is False the user needs to verify, blacklist or ignore 48 | devices manually before messages can be sent to a room. Defaults to "False". 49 | .It Cm UseKeyring 50 | This option configures if a proxy instance should use the OS keyring to store 51 | its own access tokens. The access tokens are required for the daemon to resume 52 | operation. If this is set to "No", access tokens are stored in the pantalaimon 53 | database in plaintext. Defaults to "Yes". 54 | .It Cm DropOldKeys 55 | This option configures if a proxy instance should only keep the latest version 56 | of a room key from a certain user around. This effectively means that only newly 57 | incoming messages will be decryptable, the proxy will be unable to decrypt the 58 | room history. Defaults to "No". 59 | .It Cm SearchRequests 60 | This option configures if the proxy should make additional HTTP requests to the 61 | server when clients use the search API endpoint. Some data that is required to 62 | fill out a complete search response is only available on the Homeserver (e.g. 63 | start/end tokens for the event context or room state at a particular point in 64 | time). 65 | 66 | If this option is set to "On" 67 | .Nm pantalaimon 68 | will make additional HTTP requests to fetch the unavailable data from the 69 | Homeserver, note that this will make the search much slower. If this is set to 70 | "Off" 71 | .Nm pantalaimon 72 | will not make any additional HTTP requests and will leave some data fields in 73 | the search response empty. Defaults to "Off". 74 | .It Cm IndexEncryptedOnly 75 | A configuration option to decide if 76 | .Nm pantalaimon 77 | should fetch the history for 78 | unencrytped rooms as well as for encrypted ones. If True, only the history for 79 | encrypted rooms is fetched and indexed. Search requests for non-encrypted 80 | rooms are forwarded to the Homeserver. Defaults to "True". 81 | .It Cm IndexingBatchSize 82 | The number of messages that should be requested from the Homeserver when we 83 | fetch and index messages from the room history. Defaults to 100. 84 | .It Cm HistoryFetchDelay 85 | The amount of time to wait between room message history requests to the 86 | Homeserver in ms. Defaults to 3000. 87 | .El 88 | .Pp 89 | Additional to the homeserver section a special section with the name 90 | .Cm Default 91 | can be used to configure the following values for all homeservers: 92 | .Cm ListenAddress , 93 | .Cm ListenPort , 94 | .Cm Proxy , 95 | .Cm SSL 96 | .Cm IgnoreVerification 97 | .Cm UseKeyring 98 | .Pp 99 | The 100 | .Cm Default 101 | section has the following keys that globally change the behaviour of the daemon: 102 | .Bl -tag -width 3n 103 | .It Cm LogLevel 104 | Set the log level of the daemon, can be one of 105 | .Ar error , 106 | .Ar warning , 107 | .Ar info , 108 | .Ar debug . 109 | Defaults to 110 | .Ar warning . 111 | .It Cm Notifications 112 | The daemon sends out notifications for some actions that require users to 113 | interfere (unverified devices are in a room, interactive key verification 114 | events), this option enables or disables OS notifications. Can be one of 115 | .Ar On , 116 | .Ar Off . 117 | Defaults to 118 | .Ar On . 119 | 120 | .El 121 | .\" --------------------------------------------------------------------------- 122 | .Sh FILES 123 | .Nm pantalaimon 124 | supports the XDG Base Directory Specification, the default locations can be 125 | overridden using appropriate environment variables. 126 | .Pp 127 | .Bl -tag -width 34 -compact 128 | .It Pa ~/.config/pantalaimon/pantalaimon.conf 129 | Default location of the configuration file. 130 | .El 131 | .\" --------------------------------------------------------------------------- 132 | .Sh EXAMPLES 133 | The following example shows a configured pantalaimon proxy with the name 134 | .Em Clocktown , 135 | the homeserver URL is set to 136 | .Em https://localhost:8448 , 137 | the pantalaimon proxy is listening for client connections on the address 138 | .Em localhost , 139 | and port 140 | .Em 8009 . 141 | The pantalaimon proxy is making connections to the homeserver through the proxy 142 | .Em http://localhost:8009 , 143 | finally, SSL verification is disabled. 144 | .Pp 145 | Additionally to the 146 | .Em Clocktown 147 | section the 148 | .Em Default 149 | section is also listed and the default value for SSL verification is set to 150 | True, OS notifications are enabled and the debug level is set to 151 | .Em Debug . 152 | .Bd -literal -offset indent 153 | [Default] 154 | LogLevel = Debug 155 | SSL = True 156 | Notifications = On 157 | 158 | [Clocktown] 159 | Homeserver = https://localhost:8448 160 | ListenAddress = localhost 161 | ListenPort = 8009 162 | Proxy = http://localhost:8080 163 | SSL = False 164 | .Ed 165 | .\" --------------------------------------------------------------------------- 166 | .Sh SEE ALSO 167 | .Xr pantalaimon 8 168 | .\" --------------------------------------------------------------------------- 169 | .Sh AUTHORS 170 | .Nm 171 | was written by 172 | .An Damir Jelić Aq Mt poljar@termina.org.uk . 173 | -------------------------------------------------------------------------------- /docs/man/pantalaimon.5.md: -------------------------------------------------------------------------------- 1 | PANTALAIMON.CONF(5) - File Formats Manual 2 | 3 | # NAME 4 | 5 | **pantalaimon.conf** - pantalaimon configuration file 6 | 7 | # DESCRIPTION 8 | 9 | pantalaimon(1) reads configuration data in the INI file format. 10 | The configuration file is used to configure 11 | **pantalaimon** 12 | homeservers. 13 | 14 | The sections inside the configuration file represent a pantalaimon proxy 15 | instance with the section name enclosed in square brackets representing an user 16 | chosen instance name. 17 | 18 | The following keys are required in the proxy instance sections: 19 | 20 | **Homeserver** 21 | 22 | > The URI of the homeserver that the pantalaimon proxy should forward requests to, 23 | > without the matrix API path but including the http(s) schema. 24 | 25 | The following keys are optional in the proxy instance sections: 26 | 27 | **ListenAddress** 28 | 29 | > The address where the daemon will listen to client connections for this 30 | > homeserver. Defaults to "localhost". 31 | 32 | **ListenPort** 33 | 34 | > The port where the daemon will listen to client connections for this 35 | > homeserver. Note that the listen address/port combination needs to be unique 36 | > between different homeservers. Defaults to "8009". 37 | 38 | **Proxy** 39 | 40 | > An URI of a HTTP proxy that the daemon should use when making requests to the 41 | > homeserver. 42 | > **pantalaimon** 43 | > only supports HTTP proxies. The default is to make a direct connection to the 44 | > homeserver. 45 | 46 | **SSL** 47 | 48 | > A boolean that decides if SSL verification should be enabled for outgoing 49 | > connections to the homeserver. Defaults to "True". 50 | 51 | **IgnoreVerification** 52 | 53 | > A boolean that decides if device verification should be enabled. If this is True 54 | > devices will be marked as ignored automatically and encryption keys will be 55 | > shared with them, if this is False the user needs to verify, blacklist or ignore 56 | > devices manually before messages can be sent to a room. Defaults to "False". 57 | 58 | **UseKeyring** 59 | 60 | > This option configures if a proxy instance should use the OS keyring to store 61 | > its own access tokens. The access tokens are required for the daemon to resume 62 | > operation. If this is set to "No", access tokens are stored in the pantalaimon 63 | > database in plaintext. Defaults to "Yes". 64 | 65 | **DropOldKeys** 66 | 67 | > This option configures if a proxy instance should only keep the latest version 68 | > of a room key from a certain user around. This effectively means that only newly 69 | > incoming messages will be decryptable, the proxy will be unable to decrypt the 70 | > room history. Defaults to "No". 71 | 72 | Additional to the homeserver section a special section with the name 73 | **Default** 74 | can be used to configure the following values for all homeservers: 75 | **ListenAddress**, 76 | **ListenPort**, 77 | **Proxy**, 78 | **SSL** 79 | **IgnoreVerification** 80 | **UseKeyring** 81 | 82 | The 83 | **Default** 84 | section has the following keys that globally change the behaviour of the daemon: 85 | 86 | **LogLevel** 87 | 88 | > Set the log level of the daemon, can be one of 89 | > *error*, 90 | > *warning*, 91 | > *info*, 92 | > *debug*. 93 | > Defaults to 94 | > *warning*. 95 | 96 | **Notifications** 97 | 98 | > The daemon sends out notifications for some actions that require users to 99 | > interfere (unverified devices are in a room, interactive key verification 100 | > events), this option enables or disables OS notifications. Can be one of 101 | > *On*, 102 | > *Off*. 103 | > Defaults to 104 | > *On*. 105 | 106 | # FILES 107 | 108 | **pantalaimon** 109 | supports the XDG Base Directory Specification, the default locations can be 110 | overridden using appropriate environment variables. 111 | 112 | *~/.config/pantalaimon/pantalaimon.conf* 113 | 114 | > Default location of the configuration file. 115 | 116 | # EXAMPLES 117 | 118 | The following example shows a configured pantalaimon proxy with the name 119 | *Clocktown*, 120 | the homeserver URL is set to 121 | *https://localhost:8448*, 122 | the pantalaimon proxy is listening for client connections on the address 123 | *localhost*, 124 | and port 125 | *8009*. 126 | The pantalaimon proxy is making connections to the homeserver through the proxy 127 | *http://localhost:8009*, 128 | finally, SSL verification is disabled. 129 | 130 | Additionally to the 131 | *Clocktown* 132 | section the 133 | *Default* 134 | section is also listed and the default value for SSL verification is set to 135 | True, OS notifications are enabled and the debug level is set to 136 | *Debug*. 137 | 138 | [Default] 139 | LogLevel = Debug 140 | SSL = True 141 | Notifications = On 142 | 143 | [Clocktown] 144 | Homeserver = https://localhost:8448 145 | ListenAddress = localhost 146 | ListenPort = 8009 147 | Proxy = http://localhost:8080 148 | SSL = False 149 | 150 | # SEE ALSO 151 | 152 | pantalaimon(8) 153 | 154 | # AUTHORS 155 | 156 | **pantalaimon.conf** 157 | was written by 158 | Damir Jelić <[poljar@termina.org.uk](mailto:poljar@termina.org.uk)>. 159 | 160 | Linux 5.11.16-arch1-1 - May 8, 2019 161 | -------------------------------------------------------------------------------- /docs/man/pantalaimon.8: -------------------------------------------------------------------------------- 1 | .Dd Oct 18, 2019 2 | .Dt PANTALAIMON 8 3 | .Os 4 | .\" --------------------------------------------------------------------------- 5 | .Sh NAME 6 | .Nm pantalaimon 7 | .Nd End-to-end encryption aware Matrix reverse proxy daemon. 8 | .\" --------------------------------------------------------------------------- 9 | .Sh SYNOPSIS 10 | .Nm 11 | .Op Fl c Ar config 12 | .Op Fl -log-level Ar level 13 | .Op Fl -data-path Ar path 14 | .Op Fl -version 15 | .Op Fl -help 16 | .\" --------------------------------------------------------------------------- 17 | .Sh DESCRIPTION 18 | .Nm 19 | is a daemon that acts as a reverse proxy between a Matrix homeserver and a 20 | Matrix client. The daemon transparently handles end-to-end encryption tasks on 21 | behalf of the client. 22 | .Pp 23 | .Nm 24 | is supposed to run as your own user and listen to connections on a 25 | non-privileged port. A client needs to log in using the standard Matrix HTTP 26 | calls to register itself to the daemon, such a registered user is called a pan 27 | user and will have it's own sync loop to keep up with the server. Multiple matrix 28 | clients can connect and use the same pan user. 29 | .Pp 30 | If user interaction is required 31 | .Nm 32 | will send out OS notifications which the user can react to. 33 | .Nm 34 | also provides a D-Bus API that is used for encryption related tasks that 35 | require user interference (e.g. device verification). 36 | .Pp 37 | .Nm 38 | requires a homeserver to be configured. Multiple homeservers can be configured, 39 | each configured homeserver needs to listen on a separate port. Each homeserver 40 | can handle end-to-end encryption for multiple users. The configuration file 41 | format is specified in 42 | .Xr pantalaimon 5 , 43 | the default location of the configuration file can be found in the 44 | .Sx FILES 45 | section. 46 | .\" --------------------------------------------------------------------------- 47 | .Ss Options 48 | The command line flags to change the behaviour of 49 | .Nm 50 | are as follows: 51 | .Bl -tag -width Ds 52 | .It Fl c , Fl -config Ar file 53 | Use the supplied 54 | .Ar file 55 | as the configuration file instead of the default one. 56 | .It Fl -log-level Ar level 57 | Set the log level of the daemon, can be one of 58 | .Ar error , 59 | .Ar warning , 60 | .Ar info , 61 | .Ar debug . 62 | Defaults to 63 | .Ar warning . 64 | .It Fl -data-path Ar path 65 | Set the directory for the pantalaimon database. This config option takes 66 | precedence over the XDG environment variables. 67 | .It Fl -version 68 | Display the version number and exit. 69 | .It Fl -help 70 | Display the help and exit. 71 | .El 72 | .\" --------------------------------------------------------------------------- 73 | .Sh FILES 74 | .Nm 75 | supports the XDG Base Directory Specification, the default locations can be 76 | overridden using appropriate environment variables. 77 | .Pp 78 | .Bl -tag -width DS -compact 79 | .It Pa ~/.config/pantalaimon/pantalaimon.conf 80 | Default location of the configuration file. 81 | The format of the configuration file is described in 82 | .Xr pantalaimon 5 . 83 | .Pp 84 | .It Pa ~/.local/share/pantalaimon/pan.db 85 | Default location of the pantalaimon database. 86 | This file is used to store a sqlite database holding daemon state and encryption 87 | keys. 88 | .El 89 | .\" --------------------------------------------------------------------------- 90 | .Sh EXIT STATUS 91 | .Ex -std 92 | .\" --------------------------------------------------------------------------- 93 | .Sh SEE ALSO 94 | .Xr panctl 1 95 | .Xr pantalaimon 5 96 | .\" --------------------------------------------------------------------------- 97 | .Sh AUTHORS 98 | .Nm 99 | was written by 100 | .An Damir Jelić Aq Mt poljar@termina.org.uk . 101 | -------------------------------------------------------------------------------- /docs/man/pantalaimon.8.md: -------------------------------------------------------------------------------- 1 | PANTALAIMON(8) - System Manager's Manual 2 | 3 | # NAME 4 | 5 | **pantalaimon** - End-to-end encryption aware Matrix reverse proxy daemon. 6 | 7 | # SYNOPSIS 8 | 9 | **pantalaimon** 10 | \[**-c** *config*] 11 | \[**--log-level** *level*] 12 | \[**--data-path** *path*] 13 | \[**--version**] 14 | \[**--help**] 15 | 16 | # DESCRIPTION 17 | 18 | **pantalaimon** 19 | is a daemon that acts as a reverse proxy between a Matrix homeserver and a 20 | Matrix client. The daemon transparently handles end-to-end encryption tasks on 21 | behalf of the client. 22 | 23 | **pantalaimon** 24 | is supposed to run as your own user and listen to connections on a 25 | non-privileged port. A client needs to log in using the standard Matrix HTTP 26 | calls to register itself to the daemon, such a registered user is called a pan 27 | user and will have its own sync loop to keep up with the server. Multiple matrix 28 | clients can connect and use the same pan user. 29 | 30 | If user interaction is required 31 | **pantalaimon** 32 | will send out OS notifications which the user can react to. 33 | **pantalaimon** 34 | also provides a D-Bus API that is used for encryption related tasks that 35 | require user interference (e.g. device verification). 36 | 37 | **pantalaimon** 38 | requires a homeserver to be configured. Multiple homeservers can be configured, 39 | each configured homeserver needs to listen on a separate port. Each homeserver 40 | can handle end-to-end encryption for multiple users. The configuration file 41 | format is specified in 42 | pantalaimon(5), 43 | the default location of the configuration file can be found in the 44 | *FILES* 45 | section. 46 | 47 | ## Options 48 | 49 | The command line flags to change the behaviour of 50 | **pantalaimon** 51 | are as follows: 52 | 53 | **-c**, **--config** *file* 54 | 55 | > Use the supplied 56 | > *file* 57 | > as the configuration file instead of the default one. 58 | 59 | **--log-level** *level* 60 | 61 | > Set the log level of the daemon, can be one of 62 | > *error*, 63 | > *warning*, 64 | > *info*, 65 | > *debug*. 66 | > Defaults to 67 | > *warning*. 68 | 69 | **--data-path** *path* 70 | 71 | > Set the directory for the pantalaimon database. This config option takes 72 | > precedence over the XDG environment variables. 73 | 74 | **--version** 75 | 76 | > Display the version number and exit. 77 | 78 | **--help** 79 | 80 | > Display the help and exit. 81 | 82 | # FILES 83 | 84 | **pantalaimon** 85 | supports the XDG Base Directory Specification, the default locations can be 86 | overridden using appropriate environment variables. 87 | 88 | *~/.config/pantalaimon/pantalaimon.conf* 89 | 90 | > Default location of the configuration file. 91 | > The format of the configuration file is described in 92 | > pantalaimon(5). 93 | 94 | *~/.local/share/pantalaimon/pan.db* 95 | 96 | > Default location of the pantalaimon database. 97 | > This file is used to store a sqlite database holding daemon state and encryption 98 | > keys. 99 | 100 | # EXIT STATUS 101 | 102 | The **pantalaimon** utility exits 0 on success, and >0 if an error occurs. 103 | 104 | # SEE ALSO 105 | 106 | panctl(1) 107 | pantalaimon(5) 108 | 109 | # AUTHORS 110 | 111 | **pantalaimon** 112 | was written by 113 | Damir Jelić <[poljar@termina.org.uk](mailto:poljar@termina.org.uk)>. 114 | 115 | Linux 5.3.5-arch1-1-ARCH - October 18, 2019 116 | -------------------------------------------------------------------------------- /docs/pan.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/pantalaimon/9fe0e801284b8afe11071443b3143b419fc95d27/docs/pan.gif -------------------------------------------------------------------------------- /pantalaimon/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/pantalaimon/9fe0e801284b8afe11071443b3143b419fc95d27/pantalaimon/__init__.py -------------------------------------------------------------------------------- /pantalaimon/config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import configparser 16 | import os 17 | from ipaddress import IPv4Address, IPv6Address, ip_address 18 | from typing import Union 19 | from urllib.parse import ParseResult, urlparse 20 | 21 | import attr 22 | import logbook 23 | 24 | 25 | class PanConfigParser(configparser.ConfigParser): 26 | def __init__(self): 27 | super().__init__( 28 | default_section="Default", 29 | defaults={ 30 | "SSL": "True", 31 | "IgnoreVerification": "False", 32 | "ListenAddress": "localhost", 33 | "ListenPort": "8009", 34 | "LogLevel": "warning", 35 | "Notifications": "on", 36 | "UseKeyring": "yes", 37 | "SearchRequests": "off", 38 | "IndexEncryptedOnly": "True", 39 | "IndexingBatchSize": "100", 40 | "HistoryFetchDelay": "3000", 41 | "DebugEncryption": "False", 42 | "DropOldKeys": "False", 43 | }, 44 | converters={ 45 | "address": parse_address, 46 | "url": parse_url, 47 | "loglevel": parse_log_level, 48 | }, 49 | ) 50 | 51 | 52 | def parse_address(value): 53 | # type: (str) -> Union[IPv4Address, IPv6Address] 54 | if value == "localhost": 55 | return ip_address("127.0.0.1") 56 | 57 | return ip_address(value) 58 | 59 | 60 | def parse_url(v): 61 | # type: (str) -> ParseResult 62 | value = urlparse(v) 63 | 64 | if value.scheme not in ("http", "https"): 65 | raise ValueError( 66 | f"Invalid URL scheme {value.scheme}. " f"Only HTTP(s) URLs are allowed" 67 | ) 68 | value.port 69 | 70 | return value 71 | 72 | 73 | def parse_log_level(value): 74 | # type: (str) -> logbook 75 | value = value.lower() 76 | 77 | if value == "info": 78 | return logbook.INFO 79 | elif value == "warning": 80 | return logbook.WARNING 81 | elif value == "error": 82 | return logbook.ERROR 83 | elif value == "debug": 84 | return logbook.DEBUG 85 | 86 | return logbook.WARNING 87 | 88 | 89 | class PanConfigError(Exception): 90 | """Pantalaimon configuration error.""" 91 | 92 | pass 93 | 94 | 95 | @attr.s 96 | class ServerConfig: 97 | """Server configuration. 98 | 99 | Args: 100 | name (str): A unique user chosen name that identifies the server. 101 | homeserver (ParseResult): The URL of the Matrix homeserver that we want 102 | to forward requests to. 103 | listen_address (str): The local address where pantalaimon will listen 104 | for connections. 105 | listen_port (int): The port where pantalaimon will listen for 106 | connections. 107 | proxy (ParseResult): 108 | A proxy that the daemon should use when making connections to the 109 | homeserver. 110 | ssl (bool): Enable or disable SSL for the connection between 111 | pantalaimon and the homeserver. 112 | ignore_verification (bool): Enable or disable device verification for 113 | E2E encrypted messages. 114 | keyring (bool): Enable or disable the OS keyring for the storage of 115 | access tokens. 116 | search_requests (bool): Enable or disable additional Homeserver requests 117 | for the search API endpoint. 118 | index_encrypted_only (bool): Enable or disable message indexing fro 119 | non-encrypted rooms. 120 | indexing_batch_size (int): The number of messages that should be 121 | requested from the Homeserver when we fetch and index messages from 122 | the room history. 123 | history_fetch_delay (int): The delay between room history fetching 124 | requests in seconds. 125 | drop_old_keys (bool): Should Pantalaimon only keep the most recent 126 | decryption key around. 127 | """ 128 | 129 | name = attr.ib(type=str) 130 | homeserver = attr.ib(type=ParseResult) 131 | listen_address = attr.ib( 132 | type=Union[IPv4Address, IPv6Address], default=ip_address("127.0.0.1") 133 | ) 134 | listen_port = attr.ib(type=int, default=8009) 135 | proxy = attr.ib(type=str, default="") 136 | ssl = attr.ib(type=bool, default=True) 137 | ignore_verification = attr.ib(type=bool, default=False) 138 | keyring = attr.ib(type=bool, default=True) 139 | search_requests = attr.ib(type=bool, default=False) 140 | index_encrypted_only = attr.ib(type=bool, default=True) 141 | indexing_batch_size = attr.ib(type=int, default=100) 142 | history_fetch_delay = attr.ib(type=int, default=3) 143 | drop_old_keys = attr.ib(type=bool, default=False) 144 | 145 | 146 | @attr.s 147 | class PanConfig: 148 | """Pantalaimon configuration. 149 | 150 | Args: 151 | config_path (str): The path where we should search for a configuration 152 | file. 153 | filename (str): The name of the file that we should read. 154 | debug_encryption (bool): Should debug logs be enabled for the Matrix 155 | encryption support. 156 | """ 157 | 158 | config_file = attr.ib() 159 | 160 | log_level = attr.ib(default=None) 161 | debug_encryption = attr.ib(type=bool, default=None) 162 | notifications = attr.ib(default=None) 163 | servers = attr.ib(init=False, default=attr.Factory(dict)) 164 | 165 | def read(self): 166 | """Read the configuration file. 167 | 168 | Raises OSError if the file can't be read or PanConfigError if there is 169 | a syntax error with the config file. 170 | """ 171 | config = PanConfigParser() 172 | try: 173 | config.read(os.path.abspath(self.config_file)) 174 | except configparser.Error as e: 175 | raise PanConfigError(e) 176 | 177 | if self.log_level is None: 178 | self.log_level = config["Default"].getloglevel("LogLevel") 179 | 180 | if self.notifications is None: 181 | self.notifications = config["Default"].getboolean("Notifications") 182 | 183 | self.debug_encryption = config["Default"].getboolean("DebugEncryption") 184 | 185 | listen_set = set() 186 | 187 | try: 188 | for section_name, section in config.items(): 189 | if section_name == "Default": 190 | continue 191 | 192 | homeserver = section.geturl("Homeserver") 193 | 194 | if not homeserver: 195 | raise PanConfigError( 196 | f"Homserver is not set for " f"section {section_name}" 197 | ) 198 | 199 | listen_address = section.getaddress("ListenAddress") 200 | listen_port = section.getint("ListenPort") 201 | ssl = section.getboolean("SSL") 202 | ignore_verification = section.getboolean("IgnoreVerification") 203 | keyring = section.getboolean("UseKeyring") 204 | proxy = section.geturl("Proxy") 205 | search_requests = section.getboolean("SearchRequests") 206 | index_encrypted_only = section.getboolean("IndexEncryptedOnly") 207 | 208 | indexing_batch_size = section.getint("IndexingBatchSize") 209 | 210 | if not 1 < indexing_batch_size <= 1000: 211 | raise PanConfigError( 212 | "The indexing batch size needs to be " 213 | "a positive integer between 1 and " 214 | "1000" 215 | ) 216 | 217 | history_fetch_delay = section.getint("HistoryFetchDelay") 218 | 219 | if not 100 < history_fetch_delay <= 10000: 220 | raise PanConfigError( 221 | "The history fetch delay needs to be " 222 | "a positive integer between 100 and " 223 | "10000" 224 | ) 225 | 226 | listen_tuple = (listen_address, listen_port) 227 | 228 | if listen_tuple in listen_set: 229 | raise PanConfigError( 230 | f"The listen address/port combination" 231 | f" for section {section_name} was " 232 | f"already defined before." 233 | ) 234 | listen_set.add(listen_tuple) 235 | drop_old_keys = section.getboolean("DropOldKeys") 236 | 237 | server_conf = ServerConfig( 238 | section_name, 239 | homeserver, 240 | listen_address, 241 | listen_port, 242 | proxy, 243 | ssl, 244 | ignore_verification, 245 | keyring, 246 | search_requests, 247 | index_encrypted_only, 248 | indexing_batch_size, 249 | history_fetch_delay / 1000, 250 | drop_old_keys, 251 | ) 252 | 253 | self.servers[section_name] = server_conf 254 | 255 | except ValueError as e: 256 | raise PanConfigError(e) 257 | -------------------------------------------------------------------------------- /pantalaimon/index.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | class InvalidQueryError(Exception): 17 | pass 18 | 19 | 20 | if False: 21 | import asyncio 22 | import datetime 23 | import json 24 | import os 25 | from functools import partial 26 | 27 | import attr 28 | import tantivy 29 | from nio import ( 30 | RoomEncryptedMedia, 31 | RoomMessageMedia, 32 | RoomMessageText, 33 | RoomNameEvent, 34 | RoomTopicEvent, 35 | ) 36 | from peewee import ( 37 | SQL, 38 | DateTimeField, 39 | ForeignKeyField, 40 | Model, 41 | SqliteDatabase, 42 | TextField, 43 | ) 44 | 45 | from pantalaimon.store import use_database 46 | 47 | INDEXING_ENABLED = True 48 | 49 | class DictField(TextField): 50 | def python_value(self, value): # pragma: no cover 51 | return json.loads(value) 52 | 53 | def db_value(self, value): # pragma: no cover 54 | return json.dumps(value) 55 | 56 | class StoreUser(Model): 57 | user_id = TextField() 58 | 59 | class Meta: 60 | constraints = [SQL("UNIQUE(user_id)")] 61 | 62 | class Profile(Model): 63 | user_id = TextField() 64 | avatar_url = TextField(null=True) 65 | display_name = TextField(null=True) 66 | 67 | class Meta: 68 | constraints = [SQL("UNIQUE(user_id,avatar_url,display_name)")] 69 | 70 | class Event(Model): 71 | event_id = TextField() 72 | sender = TextField() 73 | date = DateTimeField() 74 | room_id = TextField() 75 | 76 | source = DictField() 77 | 78 | profile = ForeignKeyField(model=Profile, column_name="profile_id") 79 | 80 | class Meta: 81 | constraints = [SQL("UNIQUE(event_id, room_id, sender, profile_id)")] 82 | 83 | class UserMessages(Model): 84 | user = ForeignKeyField(model=StoreUser, column_name="user_id") 85 | event = ForeignKeyField(model=Event, column_name="event_id") 86 | 87 | @attr.s 88 | class MessageStore: 89 | user = attr.ib(type=str) 90 | store_path = attr.ib(type=str) 91 | database_name = attr.ib(type=str) 92 | database = attr.ib(type=SqliteDatabase, init=False) 93 | database_path = attr.ib(type=str, init=False) 94 | 95 | models = [StoreUser, Event, Profile, UserMessages] 96 | 97 | def __attrs_post_init__(self): 98 | self.database_path = os.path.join( 99 | os.path.abspath(self.store_path), self.database_name 100 | ) 101 | 102 | self.database = self._create_database() 103 | self.database.connect() 104 | 105 | with self.database.bind_ctx(self.models): 106 | self.database.create_tables(self.models) 107 | 108 | def _create_database(self): 109 | return SqliteDatabase( 110 | self.database_path, pragmas={"foreign_keys": 1, "secure_delete": 1} 111 | ) 112 | 113 | @use_database 114 | def event_in_store(self, event_id, room_id): 115 | user, _ = StoreUser.get_or_create(user_id=self.user) 116 | query = ( 117 | Event.select() 118 | .join(UserMessages) 119 | .where( 120 | (Event.room_id == room_id) 121 | & (Event.event_id == event_id) 122 | & (UserMessages.user == user) 123 | ) 124 | .execute() 125 | ) 126 | 127 | for _ in query: 128 | return True 129 | 130 | return False 131 | 132 | def save_event(self, event, room_id, display_name=None, avatar_url=None): 133 | user, _ = StoreUser.get_or_create(user_id=self.user) 134 | 135 | profile_id, _ = Profile.get_or_create( 136 | user_id=event.sender, display_name=display_name, avatar_url=avatar_url 137 | ) 138 | 139 | event_source = event.source 140 | event_source["room_id"] = room_id 141 | 142 | event_id = ( 143 | Event.insert( 144 | event_id=event.event_id, 145 | sender=event.sender, 146 | date=datetime.datetime.fromtimestamp(event.server_timestamp / 1000), 147 | room_id=room_id, 148 | source=event_source, 149 | profile=profile_id, 150 | ) 151 | .on_conflict_ignore() 152 | .execute() 153 | ) 154 | 155 | if event_id <= 0: 156 | return None 157 | 158 | _, created = UserMessages.get_or_create(user=user, event=event_id) 159 | 160 | if created: 161 | return event_id 162 | 163 | return None 164 | 165 | def _load_context(self, user, event, before, after): 166 | context = {} 167 | 168 | if before > 0: 169 | query = ( 170 | Event.select() 171 | .join(UserMessages) 172 | .where( 173 | (Event.date <= event.date) 174 | & (Event.room_id == event.room_id) 175 | & (Event.id != event.id) 176 | & (UserMessages.user == user) 177 | ) 178 | .order_by(Event.date.desc()) 179 | .limit(before) 180 | ) 181 | 182 | context["events_before"] = [e.source for e in query] 183 | else: 184 | context["events_before"] = [] 185 | 186 | if after > 0: 187 | query = ( 188 | Event.select() 189 | .join(UserMessages) 190 | .where( 191 | (Event.date >= event.date) 192 | & (Event.room_id == event.room_id) 193 | & (Event.id != event.id) 194 | & (UserMessages.user == user) 195 | ) 196 | .order_by(Event.date) 197 | .limit(after) 198 | ) 199 | 200 | context["events_after"] = [e.source for e in query] 201 | else: 202 | context["events_after"] = [] 203 | 204 | return context 205 | 206 | @use_database 207 | def load_events( 208 | self, 209 | search_result, # type: List[Tuple[int, int]] 210 | include_profile=False, # type: bool 211 | order_by_recent=False, # type: bool 212 | before=0, # type: int 213 | after=0, # type: int 214 | ): 215 | # type: (...) -> Dict[Any, Any] 216 | user, _ = StoreUser.get_or_create(user_id=self.user) 217 | 218 | search_dict = {r[1]: r[0] for r in search_result} 219 | columns = list(search_dict.keys()) 220 | 221 | result_dict = {"results": []} 222 | 223 | query = ( 224 | UserMessages.select() 225 | .where( 226 | (UserMessages.user_id == user) & (UserMessages.event.in_(columns)) 227 | ) 228 | .execute() 229 | ) 230 | 231 | for message in query: 232 | event = message.event 233 | 234 | event_dict = { 235 | "rank": 1 if order_by_recent else search_dict[event.id], 236 | "result": event.source, 237 | "context": {}, 238 | } 239 | 240 | if include_profile: 241 | event_profile = event.profile 242 | 243 | event_dict["context"]["profile_info"] = { 244 | event_profile.user_id: { 245 | "display_name": event_profile.display_name, 246 | "avatar_url": event_profile.avatar_url, 247 | } 248 | } 249 | 250 | context = self._load_context(user, event, before, after) 251 | 252 | event_dict["context"]["events_before"] = context["events_before"] 253 | event_dict["context"]["events_after"] = context["events_after"] 254 | 255 | result_dict["results"].append(event_dict) 256 | 257 | return result_dict 258 | 259 | def sanitize_room_id(room_id): 260 | return room_id.replace(":", "/").replace("!", "") 261 | 262 | class Searcher: 263 | def __init__( 264 | self, 265 | index, 266 | body_field, 267 | name_field, 268 | topic_field, 269 | column_field, 270 | room_field, 271 | timestamp_field, 272 | searcher, 273 | ): 274 | self._index = index 275 | self._searcher = searcher 276 | 277 | self.body_field = body_field 278 | self.name_field = topic_field 279 | self.topic_field = name_field 280 | self.column_field = column_field 281 | self.room_field = room_field 282 | self.timestamp_field = timestamp_field 283 | 284 | def search(self, search_term, room=None, max_results=10, order_by_recent=False): 285 | # type (str, str, int, bool) -> List[int, int] 286 | """Search for events in the index. 287 | 288 | Returns the score and the column id for the event. 289 | """ 290 | queryparser = tantivy.QueryParser.for_index( 291 | self._index, 292 | [self.body_field, self.name_field, self.topic_field, self.room_field], 293 | ) 294 | 295 | # This currently supports only a single room since the query parser 296 | # doesn't seem to work with multiple room fields here. 297 | if room: 298 | query_string = "{} AND room:{}".format( 299 | search_term, sanitize_room_id(room) 300 | ) 301 | else: 302 | query_string = search_term 303 | 304 | try: 305 | query = queryparser.parse_query(query_string) 306 | except ValueError: 307 | raise InvalidQueryError(f"Invalid search term: {search_term}") 308 | 309 | if order_by_recent: 310 | collector = tantivy.TopDocs( 311 | max_results, order_by_field=self.timestamp_field 312 | ) 313 | else: 314 | collector = tantivy.TopDocs(max_results) 315 | 316 | result = self._searcher.search(query, collector) 317 | 318 | retrieved_result = [] 319 | 320 | for score, doc_address in result: 321 | doc = self._searcher.doc(doc_address) 322 | column = doc.get_first(self.column_field) 323 | retrieved_result.append((score, column)) 324 | 325 | return retrieved_result 326 | 327 | class Index: 328 | def __init__(self, path=None, num_searchers=None): 329 | schema_builder = tantivy.SchemaBuilder() 330 | 331 | self.body_field = schema_builder.add_text_field("body") 332 | self.name_field = schema_builder.add_text_field("name") 333 | self.topic_field = schema_builder.add_text_field("topic") 334 | 335 | self.timestamp_field = schema_builder.add_unsigned_field( 336 | "server_timestamp", fast="single" 337 | ) 338 | self.date_field = schema_builder.add_date_field("message_date") 339 | self.room_field = schema_builder.add_facet_field("room") 340 | 341 | self.column_field = schema_builder.add_unsigned_field( 342 | "database_column", indexed=True, stored=True, fast="single" 343 | ) 344 | 345 | schema = schema_builder.build() 346 | 347 | self.index = tantivy.Index(schema, path) 348 | 349 | self.reader = self.index.reader(num_searchers=num_searchers) 350 | self.writer = self.index.writer() 351 | 352 | def add_event(self, column_id, event, room_id): 353 | doc = tantivy.Document() 354 | 355 | room_path = "/{}".format(sanitize_room_id(room_id)) 356 | 357 | room_facet = tantivy.Facet.from_string(room_path) 358 | 359 | doc.add_unsigned(self.column_field, column_id) 360 | doc.add_facet(self.room_field, room_facet) 361 | doc.add_date( 362 | self.date_field, 363 | datetime.datetime.fromtimestamp(event.server_timestamp / 1000), 364 | ) 365 | doc.add_unsigned(self.timestamp_field, event.server_timestamp) 366 | 367 | if isinstance(event, RoomMessageText): 368 | doc.add_text(self.body_field, event.body) 369 | elif isinstance(event, (RoomMessageMedia, RoomEncryptedMedia)): 370 | doc.add_text(self.body_field, event.body) 371 | elif isinstance(event, RoomNameEvent): 372 | doc.add_text(self.name_field, event.name) 373 | elif isinstance(event, RoomTopicEvent): 374 | doc.add_text(self.topic_field, event.topic) 375 | else: 376 | raise ValueError("Invalid event passed.") 377 | 378 | self.writer.add_document(doc) 379 | 380 | def commit(self): 381 | self.writer.commit() 382 | 383 | def searcher(self): 384 | self.reader.reload() 385 | return Searcher( 386 | self.index, 387 | self.body_field, 388 | self.name_field, 389 | self.topic_field, 390 | self.column_field, 391 | self.room_field, 392 | self.timestamp_field, 393 | self.reader.searcher(), 394 | ) 395 | 396 | @attr.s 397 | class StoreItem: 398 | event = attr.ib() 399 | room_id = attr.ib() 400 | display_name = attr.ib(default=None) 401 | avatar_url = attr.ib(default=None) 402 | 403 | @attr.s 404 | class IndexStore: 405 | user = attr.ib(type=str) 406 | index_path = attr.ib(type=str) 407 | store_path = attr.ib(type=str, default=None) 408 | store_name = attr.ib(default="events.db") 409 | 410 | index = attr.ib(type=Index, init=False) 411 | store = attr.ib(type=MessageStore, init=False) 412 | event_queue = attr.ib(factory=list) 413 | write_lock = attr.ib(factory=asyncio.Lock) 414 | read_semaphore = attr.ib(type=asyncio.Semaphore, init=False) 415 | 416 | def __attrs_post_init__(self): 417 | self.store_path = self.store_path or self.index_path 418 | num_searchers = os.cpu_count() 419 | self.index = Index(self.index_path, num_searchers) 420 | self.read_semaphore = asyncio.Semaphore(num_searchers or 1) 421 | self.store = MessageStore(self.user, self.store_path, self.store_name) 422 | 423 | def add_event(self, event, room_id, display_name, avatar_url): 424 | item = StoreItem(event, room_id, display_name, avatar_url) 425 | self.event_queue.append(item) 426 | 427 | @staticmethod 428 | def write_events(store, index, event_queue): 429 | with store.database.bind_ctx(store.models): 430 | with store.database.atomic(): 431 | for item in event_queue: 432 | column_id = store.save_event(item.event, item.room_id) 433 | 434 | if column_id: 435 | index.add_event(column_id, item.event, item.room_id) 436 | index.commit() 437 | 438 | async def commit_events(self): 439 | loop = asyncio.get_event_loop() 440 | 441 | event_queue = self.event_queue 442 | 443 | if not event_queue: 444 | return 445 | 446 | self.event_queue = [] 447 | 448 | async with self.write_lock: 449 | write_func = partial( 450 | IndexStore.write_events, self.store, self.index, event_queue 451 | ) 452 | await loop.run_in_executor(None, write_func) 453 | 454 | def event_in_store(self, event_id, room_id): 455 | return self.store.event_in_store(event_id, room_id) 456 | 457 | async def search( 458 | self, 459 | search_term, # type: str 460 | room=None, # type: Optional[str] 461 | max_results=10, # type: int 462 | order_by_recent=False, # type: bool 463 | include_profile=False, # type: bool 464 | before_limit=0, # type: int 465 | after_limit=0, # type: int 466 | ): 467 | # type: (...) -> Dict[Any, Any] 468 | """Search the indexstore for an event.""" 469 | loop = asyncio.get_event_loop() 470 | 471 | # Getting a searcher from tantivy may block if there is no searcher 472 | # available. To avoid blocking we set up the number of searchers to be 473 | # the number of CPUs and the semaphore has the same counter value. 474 | async with self.read_semaphore: 475 | searcher = self.index.searcher() 476 | search_func = partial( 477 | searcher.search, 478 | search_term, 479 | room=room, 480 | max_results=max_results, 481 | order_by_recent=order_by_recent, 482 | ) 483 | 484 | result = await loop.run_in_executor(None, search_func) 485 | 486 | load_event_func = partial( 487 | self.store.load_events, 488 | result, 489 | include_profile, 490 | order_by_recent, 491 | before_limit, 492 | after_limit, 493 | ) 494 | 495 | search_result = await loop.run_in_executor(None, load_event_func) 496 | 497 | search_result["count"] = len(search_result["results"]) 498 | search_result["highlights"] = [] 499 | 500 | return search_result 501 | 502 | else: 503 | INDEXING_ENABLED = False 504 | -------------------------------------------------------------------------------- /pantalaimon/log.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import logbook 16 | from logbook import Logger 17 | 18 | logger = Logger("pantalaimon") 19 | logger.level = logbook.WARNING 20 | -------------------------------------------------------------------------------- /pantalaimon/main.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import asyncio 16 | import os 17 | import signal 18 | 19 | import click 20 | import janus 21 | import keyring 22 | import logbook 23 | import nio 24 | from aiohttp import web 25 | from platformdirs import user_config_dir, user_data_dir 26 | from logbook import StderrHandler 27 | 28 | from pantalaimon.config import PanConfig, PanConfigError, parse_log_level 29 | from pantalaimon.daemon import ProxyDaemon 30 | from pantalaimon.log import logger 31 | from pantalaimon.store import KeyDroppingSqliteStore 32 | from pantalaimon.thread_messages import DaemonResponse 33 | from pantalaimon.ui import UI_ENABLED 34 | 35 | 36 | def create_dirs(data_dir, conf_dir): 37 | try: 38 | os.makedirs(data_dir) 39 | except OSError: 40 | pass 41 | 42 | try: 43 | os.makedirs(conf_dir) 44 | except OSError: 45 | pass 46 | 47 | 48 | async def init(data_dir, server_conf, send_queue, recv_queue): 49 | """Initialize the proxy and the http server.""" 50 | store_class = KeyDroppingSqliteStore if server_conf.drop_old_keys else None 51 | 52 | proxy = ProxyDaemon( 53 | server_conf.name, 54 | server_conf.homeserver, 55 | server_conf, 56 | data_dir, 57 | send_queue=send_queue.async_q if send_queue else None, 58 | recv_queue=recv_queue.async_q if recv_queue else None, 59 | proxy=server_conf.proxy.geturl() if server_conf.proxy else None, 60 | ssl=None if server_conf.ssl is True else False, 61 | client_store_class=store_class, 62 | ) 63 | 64 | # 100 MB max POST size 65 | app = web.Application(client_max_size=1024**2 * 100) 66 | 67 | app.add_routes( 68 | [ 69 | web.post("/_matrix/client/r0/login", proxy.login), 70 | web.post("/_matrix/client/v3/login", proxy.login), 71 | web.get("/_matrix/client/r0/sync", proxy.sync), 72 | web.get("/_matrix/client/v3/sync", proxy.sync), 73 | web.post("/_matrix/client/r0/createRoom", proxy.createRoom), 74 | web.post("/_matrix/client/v3/createRoom", proxy.createRoom), 75 | web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages), 76 | web.get("/_matrix/client/v3/rooms/{room_id}/messages", proxy.messages), 77 | web.put( 78 | r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}", 79 | proxy.send_message, 80 | ), 81 | web.put( 82 | r"/_matrix/client/v3/rooms/{room_id}/send/{event_type}/{txnid}", 83 | proxy.send_message, 84 | ), 85 | web.post( 86 | r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}", 87 | proxy.send_message, 88 | ), 89 | web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter), 90 | web.post("/_matrix/client/v3/user/{user_id}/filter", proxy.filter), 91 | web.post("/.well-known/matrix/client", proxy.well_known), 92 | web.get("/.well-known/matrix/client", proxy.well_known), 93 | web.post("/_matrix/client/r0/search", proxy.search), 94 | web.post("/_matrix/client/v3/search", proxy.search), 95 | web.options("/_matrix/client/r0/search", proxy.search_opts), 96 | web.options("/_matrix/client/v3/search", proxy.search_opts), 97 | web.get( 98 | "/_matrix/media/v1/download/{server_name}/{media_id}", proxy.download 99 | ), 100 | web.get( 101 | "/_matrix/media/v3/download/{server_name}/{media_id}", proxy.download 102 | ), 103 | web.get( 104 | "/_matrix/media/v1/download/{server_name}/{media_id}/{file_name}", 105 | proxy.download, 106 | ), 107 | web.get( 108 | "/_matrix/media/v3/download/{server_name}/{media_id}/{file_name}", 109 | proxy.download, 110 | ), 111 | web.get( 112 | "/_matrix/media/r0/download/{server_name}/{media_id}", proxy.download 113 | ), 114 | web.get( 115 | "/_matrix/media/r0/download/{server_name}/{media_id}/{file_name}", 116 | proxy.download, 117 | ), 118 | web.post( 119 | r"/_matrix/media/r0/upload", 120 | proxy.upload, 121 | ), 122 | web.post( 123 | r"/_matrix/media/v3/upload", 124 | proxy.upload, 125 | ), 126 | web.put( 127 | r"/_matrix/client/r0/profile/{userId}/avatar_url", 128 | proxy.profile, 129 | ), 130 | web.put( 131 | r"/_matrix/client/v3/profile/{userId}/avatar_url", 132 | proxy.profile, 133 | ), 134 | ] 135 | ) 136 | app.router.add_route("*", "/" + "{proxyPath:.*}", proxy.router) 137 | app.on_shutdown.append(proxy.shutdown) 138 | 139 | runner = web.AppRunner(app) 140 | await runner.setup() 141 | 142 | site = web.TCPSite(runner, str(server_conf.listen_address), server_conf.listen_port) 143 | 144 | return proxy, runner, site 145 | 146 | 147 | async def message_router(receive_queue, send_queue, proxies): 148 | """Find the recipient of a message and forward it to the right proxy.""" 149 | 150 | def find_proxy_by_user(user): 151 | # type: (str) -> Optional[ProxyDaemon] 152 | for proxy in proxies: 153 | if user in proxy.pan_clients: 154 | return proxy 155 | 156 | return None 157 | 158 | async def send_info(message_id, pan_user, code, string): 159 | message = DaemonResponse(message_id, pan_user, code, string) 160 | await send_queue.put(message) 161 | 162 | while True: 163 | message = await receive_queue.get() 164 | logger.debug(f"Router got message {message}") 165 | 166 | proxy = find_proxy_by_user(message.pan_user) 167 | 168 | if not proxy: 169 | msg = f"No pan client found for {message.pan_user}." 170 | logger.warn(msg) 171 | await send_info( 172 | message.message_id, message.pan_user, "m.unknown_client", msg 173 | ) 174 | 175 | await proxy.receive_message(message) 176 | 177 | 178 | async def daemon(context, log_level, debug_encryption, config, data_path): 179 | loop = asyncio.get_event_loop() 180 | 181 | conf_dir = user_config_dir("pantalaimon", "") 182 | data_dir = user_data_dir("pantalaimon", "") 183 | create_dirs(data_dir, conf_dir) 184 | 185 | config = config or os.path.join(conf_dir, "pantalaimon.conf") 186 | data_dir = data_path or data_dir 187 | 188 | if log_level: 189 | log_level = parse_log_level(log_level) 190 | 191 | pan_conf = PanConfig(config, log_level) 192 | 193 | try: 194 | pan_conf.read() 195 | except (OSError, PanConfigError) as e: 196 | context.fail(e) 197 | 198 | if not pan_conf.servers: 199 | context.fail("Homeserver is not configured.") 200 | 201 | logger.level = pan_conf.log_level 202 | 203 | if pan_conf.debug_encryption or debug_encryption: 204 | nio.crypto.logger.level = logbook.DEBUG 205 | 206 | StderrHandler().push_application() 207 | 208 | servers = [] 209 | proxies = [] 210 | 211 | if UI_ENABLED: 212 | from pantalaimon.ui import GlibT 213 | 214 | pan_queue = janus.Queue() 215 | ui_queue = janus.Queue() 216 | 217 | glib_thread = GlibT( 218 | pan_queue.sync_q, 219 | ui_queue.sync_q, 220 | data_dir, 221 | pan_conf.servers.values(), 222 | pan_conf, 223 | ) 224 | 225 | glib_fut = loop.run_in_executor(None, glib_thread.run) 226 | message_router_task = asyncio.create_task( 227 | message_router(ui_queue.async_q, pan_queue.async_q, proxies) 228 | ) 229 | 230 | else: 231 | glib_thread = None 232 | glib_fut = None 233 | pan_queue = None 234 | ui_queue = None 235 | message_router_task = None 236 | 237 | try: 238 | for server_conf in pan_conf.servers.values(): 239 | proxy, runner, site = await init(data_dir, server_conf, pan_queue, ui_queue) 240 | servers.append((proxy, runner, site)) 241 | proxies.append(proxy) 242 | 243 | except keyring.errors.KeyringError as e: 244 | context.fail(f"Error initializing keyring: {e}") 245 | 246 | async def wait_for_glib(glib_thread, fut): 247 | glib_thread.stop() 248 | await fut 249 | 250 | home = os.path.expanduser("~") 251 | os.chdir(home) 252 | 253 | event = asyncio.Event() 254 | 255 | def handler(signum, frame): 256 | raise KeyboardInterrupt 257 | 258 | signal.signal(signal.SIGTERM, handler) 259 | 260 | try: 261 | for proxy, _, site in servers: 262 | click.echo( 263 | f"======== Starting daemon for homeserver " 264 | f"{proxy.name} on {site.name} ========" 265 | ) 266 | await site.start() 267 | 268 | click.echo("(Press CTRL+C to quit)") 269 | await event.wait() 270 | except (KeyboardInterrupt, asyncio.CancelledError): 271 | for _, runner, _ in servers: 272 | await runner.cleanup() 273 | 274 | if glib_fut: 275 | await wait_for_glib(glib_thread, glib_fut) 276 | 277 | if message_router_task: 278 | message_router_task.cancel() 279 | await asyncio.wait({message_router_task}) 280 | 281 | raise 282 | 283 | 284 | @click.command( 285 | help=( 286 | "pantalaimon is a reverse proxy for matrix homeservers that " 287 | "transparently encrypts and decrypts messages for clients that " 288 | "connect to pantalaimon." 289 | ) 290 | ) 291 | @click.version_option(version="0.10.5", prog_name="pantalaimon") 292 | @click.option( 293 | "--log-level", 294 | type=click.Choice(["error", "warning", "info", "debug"]), 295 | default=None, 296 | ) 297 | @click.option("--debug-encryption", is_flag=True) 298 | @click.option("-c", "--config", type=click.Path(exists=True)) 299 | @click.option("--data-path", type=click.Path(exists=True)) 300 | @click.pass_context 301 | def main(context, log_level, debug_encryption, config, data_path): 302 | try: 303 | asyncio.run(daemon(context, log_level, debug_encryption, config, data_path)) 304 | except (KeyboardInterrupt, asyncio.CancelledError): 305 | pass 306 | 307 | return 308 | 309 | 310 | if __name__ == "__main__": 311 | main() 312 | -------------------------------------------------------------------------------- /pantalaimon/panctl.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Cli utility to control pantalaimon.""" 16 | 17 | import argparse 18 | import asyncio 19 | import sys 20 | from collections import defaultdict 21 | from itertools import zip_longest 22 | from typing import List 23 | from shlex import split 24 | 25 | import attr 26 | import click 27 | 28 | try: 29 | from gi.repository import GLib 30 | except ModuleNotFoundError: 31 | from pgi.repository import GLib 32 | 33 | from prompt_toolkit import __version__ as ptk_version 34 | from prompt_toolkit import HTML, PromptSession, print_formatted_text 35 | from prompt_toolkit.completion import Completer, Completion, PathCompleter 36 | from prompt_toolkit.document import Document 37 | from prompt_toolkit.patch_stdout import patch_stdout 38 | from pydbus import SessionBus 39 | 40 | PTK2 = ptk_version.startswith("2.") 41 | 42 | if PTK2: 43 | from prompt_toolkit.eventloop.defaults import use_asyncio_event_loop 44 | 45 | use_asyncio_event_loop() 46 | 47 | 48 | class ParseError(Exception): 49 | pass 50 | 51 | 52 | class PanctlArgParse(argparse.ArgumentParser): 53 | def print_usage(self, file=None): 54 | pass 55 | 56 | def error(self, message): 57 | message = f"Error: {message} " f"(see help)" 58 | print(message) 59 | raise ParseError 60 | 61 | 62 | class PanctlParser: 63 | def __init__(self, commands): 64 | self.commands = commands 65 | self.parser = PanctlArgParse() 66 | subparsers = self.parser.add_subparsers(dest="subcommand") 67 | subparsers.add_parser("list-servers") 68 | 69 | help = subparsers.add_parser("help") 70 | help.add_argument("command", choices=self.commands) 71 | 72 | list_devices = subparsers.add_parser("list-devices") 73 | list_devices.add_argument("pan_user", type=str) 74 | list_devices.add_argument("user_id", type=str) 75 | 76 | start = subparsers.add_parser("start-verification") 77 | start.add_argument("pan_user", type=str) 78 | start.add_argument("user_id", type=str) 79 | start.add_argument("device_id", type=str) 80 | 81 | cancel = subparsers.add_parser("cancel-verification") 82 | cancel.add_argument("pan_user", type=str) 83 | cancel.add_argument("user_id", type=str) 84 | cancel.add_argument("device_id", type=str) 85 | 86 | accept = subparsers.add_parser("accept-verification") 87 | accept.add_argument("pan_user", type=str) 88 | accept.add_argument("user_id", type=str) 89 | accept.add_argument("device_id", type=str) 90 | 91 | confirm = subparsers.add_parser("confirm-verification") 92 | confirm.add_argument("pan_user", type=str) 93 | confirm.add_argument("user_id", type=str) 94 | confirm.add_argument("device_id", type=str) 95 | 96 | verify = subparsers.add_parser("verify-device") 97 | verify.add_argument("pan_user", type=str) 98 | verify.add_argument("user_id", type=str) 99 | verify.add_argument("device_id", type=str) 100 | 101 | unverify = subparsers.add_parser("unverify-device") 102 | unverify.add_argument("pan_user", type=str) 103 | unverify.add_argument("user_id", type=str) 104 | unverify.add_argument("device_id", type=str) 105 | 106 | blacklist = subparsers.add_parser("blacklist-device") 107 | blacklist.add_argument("pan_user", type=str) 108 | blacklist.add_argument("user_id", type=str) 109 | blacklist.add_argument("device_id", type=str) 110 | 111 | unblacklist = subparsers.add_parser("unblacklist-device") 112 | unblacklist.add_argument("pan_user", type=str) 113 | unblacklist.add_argument("user_id", type=str) 114 | unblacklist.add_argument("device_id", type=str) 115 | 116 | import_keys = subparsers.add_parser("import-keys") 117 | import_keys.add_argument("pan_user", type=str) 118 | import_keys.add_argument("path", type=str) 119 | import_keys.add_argument("passphrase", type=str) 120 | 121 | export_keys = subparsers.add_parser("export-keys") 122 | export_keys.add_argument("pan_user", type=str) 123 | export_keys.add_argument("path", type=str) 124 | export_keys.add_argument("passphrase", type=str) 125 | 126 | send_anyways = subparsers.add_parser("send-anyways") 127 | send_anyways.add_argument("pan_user", type=str) 128 | send_anyways.add_argument("room_id", type=str) 129 | 130 | cancel_sending = subparsers.add_parser("cancel-sending") 131 | cancel_sending.add_argument("pan_user", type=str) 132 | cancel_sending.add_argument("room_id", type=str) 133 | 134 | continue_key_share = subparsers.add_parser("continue-keyshare") 135 | continue_key_share.add_argument("pan_user", type=str) 136 | continue_key_share.add_argument("user_id", type=str) 137 | continue_key_share.add_argument("device_id", type=str) 138 | 139 | cancel_key_share = subparsers.add_parser("cancel-keyshare") 140 | cancel_key_share.add_argument("pan_user", type=str) 141 | cancel_key_share.add_argument("user_id", type=str) 142 | cancel_key_share.add_argument("device_id", type=str) 143 | 144 | def parse_args(self, argv): 145 | return self.parser.parse_args(argv) 146 | 147 | 148 | @attr.s 149 | class PanCompleter(Completer): 150 | """Completer for panctl commands.""" 151 | 152 | commands = attr.ib(type=List[str]) 153 | ctl = attr.ib() 154 | devices = attr.ib() 155 | rooms = attr.ib(init=False, default=attr.Factory(lambda: defaultdict(set))) 156 | path_completer = PathCompleter(expanduser=True) 157 | 158 | def complete_commands(self, last_word): 159 | """Complete the available commands.""" 160 | compl_words = self.filter_words(self.commands, last_word) 161 | for compl_word in compl_words: 162 | yield Completion(compl_word, -len(last_word)) 163 | 164 | def complete_users(self, last_word, pan_user): 165 | devices = self.devices.List(pan_user) 166 | users = set(device["user_id"] for device in devices) 167 | compl_words = self.filter_words(users, last_word) 168 | 169 | for compl_word in compl_words: 170 | yield Completion(compl_word, -len(last_word)) 171 | 172 | return "" 173 | 174 | def complete_devices(self, last_word, pan_user, user_id): 175 | devices = self.devices.ListUserDevices(pan_user, user_id) 176 | device_ids = [device["device_id"] for device in devices] 177 | compl_words = self.filter_words(device_ids, last_word) 178 | 179 | for compl_word in compl_words: 180 | yield Completion(compl_word, -len(last_word)) 181 | 182 | return "" 183 | 184 | def filter_words(self, words, last_word): 185 | compl_words = [] 186 | 187 | for word in words: 188 | if last_word in word: 189 | compl_words.append(word) 190 | 191 | return compl_words 192 | 193 | def complete_pan_users(self, last_word): 194 | servers = self.ctl.ListServers() 195 | users = [item[0] for sublist in servers.values() for item in sublist] 196 | compl_words = self.filter_words(users, last_word) 197 | 198 | for compl_word in compl_words: 199 | yield Completion(compl_word, -len(last_word)) 200 | 201 | def complete_verification(self, command, last_word, words): 202 | if len(words) == 2: 203 | return self.complete_pan_users(last_word) 204 | elif len(words) == 3: 205 | pan_user = words[1] 206 | return self.complete_users(last_word, pan_user) 207 | elif len(words) == 4: 208 | pan_user = words[1] 209 | user_id = words[2] 210 | return self.complete_devices(last_word, pan_user, user_id) 211 | 212 | return "" 213 | 214 | def complete_key_file_cmds( 215 | self, document, complete_event, command, last_word, words 216 | ): 217 | if len(words) == 2: 218 | return self.complete_pan_users(last_word) 219 | elif len(words) == 3: 220 | return self.path_completer.get_completions( 221 | Document(last_word), complete_event 222 | ) 223 | 224 | return "" 225 | 226 | def complete_rooms(self, pan_user, last_word, words): 227 | rooms = self.rooms[pan_user] 228 | compl_words = self.filter_words(list(rooms), last_word) 229 | 230 | for compl_word in compl_words: 231 | yield Completion(compl_word, -len(last_word)) 232 | 233 | return "" 234 | 235 | def complete_send_cmds(self, last_word, words): 236 | if len(words) == 2: 237 | return self.complete_pan_users(last_word) 238 | elif len(words) == 3: 239 | pan_user = words[1] 240 | return self.complete_rooms(pan_user, last_word, words) 241 | 242 | return "" 243 | 244 | def complete_list_devices(self, last_word, words): 245 | if len(words) == 2: 246 | return self.complete_pan_users(last_word) 247 | elif len(words) == 3: 248 | pan_user = words[1] 249 | return self.complete_users(last_word, pan_user) 250 | 251 | return "" 252 | 253 | def get_completions(self, document, complete_event): 254 | """Build the completions.""" 255 | text_before_cursor = document.text_before_cursor 256 | text_before_cursor = str(text_before_cursor) 257 | words = text_before_cursor.split(" ") 258 | 259 | last_word = words[-1] 260 | 261 | if len(words) == 1: 262 | return self.complete_commands(last_word) 263 | 264 | if len(words) > 1: 265 | command = words[0] 266 | 267 | if command in [ 268 | "start-verification", 269 | "accept-verification", 270 | "confirm-verification", 271 | "cancel-verification", 272 | "verify-device", 273 | "unverify-device", 274 | "blacklist-device", 275 | "unblacklist-device", 276 | ]: 277 | return self.complete_verification(command, last_word, words) 278 | 279 | elif command in ["export-keys", "import-keys"]: 280 | return self.complete_key_file_cmds( 281 | document, complete_event, command, last_word, words 282 | ) 283 | 284 | elif command in ["send-anyways", "cancel-sending"]: 285 | return self.complete_send_cmds(last_word, words) 286 | 287 | elif command == "list-devices": 288 | return self.complete_list_devices(last_word, words) 289 | 290 | elif command == "help": 291 | if len(words) == 2: 292 | return self.complete_commands(last_word) 293 | else: 294 | return "" 295 | 296 | elif command in ["cancel-keyshare", "continue-keyshare"]: 297 | return self.complete_verification(command, last_word, words) 298 | 299 | return "" 300 | 301 | 302 | def grouper(iterable, n, fillvalue=None): 303 | "Collect data into fixed-length chunks or blocks" 304 | # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" 305 | args = [iter(iterable)] * n 306 | return zip_longest(*args, fillvalue=fillvalue) 307 | 308 | 309 | def partition_key(key): 310 | groups = grouper(key, 4, " ") 311 | return " ".join("".join(g) for g in groups) 312 | 313 | 314 | def get_color(string): 315 | def djb2(string): 316 | hash = 5381 317 | for x in string: 318 | hash = ((hash << 5) + hash) + ord(x) 319 | return hash & 0xFFFFFFFF 320 | 321 | colors = [ 322 | "ansiblue", 323 | "ansigreen", 324 | "ansired", 325 | "ansiyellow", 326 | "ansicyan", 327 | "ansimagenta", 328 | ] 329 | 330 | return colors[djb2(string) % 5] 331 | 332 | 333 | @attr.s 334 | class PanCtl: 335 | bus = attr.ib(init=False) 336 | pan_bus = attr.ib(init=False) 337 | ctl = attr.ib(init=False) 338 | devices = attr.ib(init=False) 339 | completer = attr.ib(init=False) 340 | own_message_ids = attr.ib(init=False) 341 | 342 | command_help = { 343 | "help": "Display help about commands.", 344 | "list-servers": ( 345 | "List the configured homeservers and pan users on each homeserver." 346 | ), 347 | "list-devices": ("List the devices of a user that are known to the pan-user."), 348 | "start-verification": ( 349 | "Start an interactive key verification between " 350 | "the given pan-user and user." 351 | ), 352 | "accept-verification": ( 353 | "Accept an interactive key verification that " 354 | "the given user has started with our given " 355 | "pan-user." 356 | ), 357 | "cancel-verification": ( 358 | "Cancel an interactive key verification " 359 | "between the given pan-user and user." 360 | ), 361 | "confirm-verification": ( 362 | "Confirm that the short authentication " 363 | "string of the interactive key verification " 364 | "with the given pan-user and user is " 365 | "matching." 366 | ), 367 | "verify-device": ("Manually mark the given device as verified."), 368 | "unverify-device": ( 369 | "Mark a previously verified device of the given user as unverified." 370 | ), 371 | "blacklist-device": ( 372 | "Manually mark the given device of the given user as blacklisted." 373 | ), 374 | "unblacklist-device": ( 375 | "Mark a previously blacklisted device of the " 376 | "given user as unblacklisted." 377 | ), 378 | "send-anyways": ( 379 | "Send a room message despite having unverified " 380 | "devices in the room and mark the devices as " 381 | "ignored." 382 | ), 383 | "cancel-sending": ( 384 | "Cancel the send of a room message in a room that " 385 | "contains unverified devices" 386 | ), 387 | "import-keys": ( 388 | "Import end-to-end encryption keys from the given " 389 | "file for the given pan-user." 390 | ), 391 | "export-keys": ( 392 | "Export end-to-end encryption keys to the given file " 393 | "for the given pan-user." 394 | ), 395 | "continue-keyshare": ( 396 | "Export end-to-end encryption keys to the given file " 397 | "for the given pan-user." 398 | ), 399 | "cancel-keyshare": ( 400 | "Export end-to-end encryption keys to the given file " 401 | "for the given pan-user." 402 | ), 403 | } 404 | 405 | commands = list(command_help.keys()) 406 | 407 | def __attrs_post_init__(self): 408 | self.bus = SessionBus() 409 | self.pan_bus = self.bus.get("org.pantalaimon1") 410 | 411 | self.ctl = self.pan_bus["org.pantalaimon1.control"] 412 | self.devices = self.pan_bus["org.pantalaimon1.devices"] 413 | 414 | self.own_message_ids = [] 415 | 416 | self.ctl.Response.connect(self.show_response) 417 | self.ctl.UnverifiedDevices.connect(self.unverified_devices) 418 | 419 | self.completer = PanCompleter(self.commands, self.ctl, self.devices) 420 | 421 | self.devices.VerificationInvite.connect(self.show_sas_invite) 422 | self.devices.VerificationString.connect(self.show_sas) 423 | self.devices.VerificationDone.connect(self.sas_done) 424 | 425 | self.devices.KeyRequest.connect(self.show_key_request) 426 | self.devices.KeyRequestCancel.connect(self.show_key_request_cancel) 427 | 428 | def show_help(self, command): 429 | print(self.command_help[command]) 430 | 431 | def unverified_devices(self, pan_user, room_id, display_name): 432 | self.completer.rooms[pan_user].add(room_id) 433 | print( 434 | f"Error sending message for user {pan_user}, " 435 | f"there are unverified devices in the room {display_name} " 436 | f"({room_id}).\nUse the send-anyways or cancel-sending commands " 437 | f"to ignore the devices or cancel the sending." 438 | ) 439 | 440 | def show_response(self, response_id, pan_user, message): 441 | if response_id not in self.own_message_ids: 442 | return 443 | 444 | self.own_message_ids.remove(response_id) 445 | 446 | print(message["message"]) 447 | 448 | def show_key_request(self, pan_user, user_id, device_id, request_id): 449 | print( 450 | f"{user_id} has requested room keys from our pan " 451 | f"user {pan_user}, but the requesting device " 452 | f"{device_id} is unverified\n" 453 | f"After verifying the device accept the key share request with " 454 | f"the continue-keyshare, alternatively cancel the " 455 | f"request with the cancel-keyshare command." 456 | ) 457 | 458 | def show_key_request_cancel(self, pan_user, user_id, device_id, request_id): 459 | print( 460 | f"{user_id} via {device_id} has " 461 | f"canceled the room key request from our pan user " 462 | f"{pan_user}." 463 | ) 464 | 465 | def sas_done(self, pan_user, user_id, device_id, _): 466 | print( 467 | f"Device {device_id} of user {user_id}" 468 | f" successfully verified for pan user {pan_user}." 469 | ) 470 | 471 | def show_sas_invite(self, pan_user, user_id, device_id, _): 472 | print( 473 | f"{user_id} has started an interactive device " 474 | f"verification for their device {device_id} with pan user " 475 | f"{pan_user}\n" 476 | f"Accept the invitation with the accept-verification command." 477 | ) 478 | 479 | # The emoji printing logic was taken from weechat-matrix and was written by 480 | # dkasak. 481 | def show_sas(self, pan_user, user_id, device_id, _, emoji): 482 | emojis = [x[0] for x in emoji] 483 | descriptions = [x[1] for x in emoji] 484 | 485 | centered_width = 12 486 | 487 | def center_emoji(emoji, width): 488 | # Assume each emoji has width 2 489 | emoji_width = 2 490 | 491 | # These are emojis that need VARIATION-SELECTOR-16 (U+FE0F) so 492 | # that they are rendered with coloured glyphs. For these, we 493 | # need to add an extra space after them so that they are 494 | # rendered properly in weechat. 495 | variation_selector_emojis = ["☁️", "❤️", "☂️", "✏️", "✂️", "☎️", "✈️"] 496 | 497 | if emoji in variation_selector_emojis: 498 | emoji += " " 499 | 500 | # This is a trick to account for the fact that emojis are wider 501 | # than other monospace characters. 502 | placeholder = "." * emoji_width 503 | 504 | return placeholder.center(width).replace(placeholder, emoji) 505 | 506 | emoji_str = "".join(center_emoji(e, centered_width) for e in emojis) 507 | desc = "".join(d.center(centered_width) for d in descriptions) 508 | short_string = "\n".join([emoji_str, desc]) 509 | 510 | print( 511 | f"Short authentication string for pan " 512 | f"user {pan_user} from {user_id} via " 513 | f"{device_id}:\n{short_string}" 514 | ) 515 | 516 | def list_servers(self): 517 | """List the daemons users.""" 518 | servers = self.ctl.ListServers() 519 | 520 | print("pantalaimon servers:") 521 | 522 | for server, server_users in servers.items(): 523 | server_c = get_color(server) 524 | 525 | print_formatted_text(HTML(f" - Name: <{server_c}>{server}")) 526 | 527 | user_list = [] 528 | 529 | for user, device in server_users: 530 | user_c = get_color(user) 531 | device_c = get_color(device) 532 | 533 | user_list.append( 534 | f" - <{user_c}>{user} " 535 | f"<{device_c}>{device}" 536 | ) 537 | 538 | if user_list: 539 | print(" - Pan users:") 540 | user_string = "\n".join(user_list) 541 | print_formatted_text(HTML(user_string)) 542 | 543 | def list_devices(self, args): 544 | devices = self.devices.ListUserDevices(args.pan_user, args.user_id) 545 | 546 | print_formatted_text(HTML(f"Devices for user {args.user_id}:")) 547 | 548 | for device in devices: 549 | if device["trust_state"] == "verified": 550 | trust_state = "Verified" 551 | elif device["trust_state"] == "blacklisted": 552 | trust_state = "Blacklisted" 553 | elif device["trust_state"] == "ignored": 554 | trust_state = "Ignored" 555 | else: 556 | trust_state = "Unset" 557 | 558 | key = partition_key(device["ed25519"]) 559 | color = get_color(device["device_id"]) 560 | print_formatted_text( 561 | HTML( 562 | f" - Display name: " 563 | f"{device['device_display_name']}\n" 564 | f" - Device id: " 565 | f"<{color}>{device['device_id']}\n" 566 | f" - Device key: " 567 | f"{key}\n" 568 | f" - Trust state: " 569 | f"{trust_state}" 570 | ) 571 | ) 572 | 573 | async def loop(self): 574 | """Event loop for panctl.""" 575 | promptsession = PromptSession("panctl> ", completer=self.completer) 576 | 577 | while True: 578 | with patch_stdout(): 579 | try: 580 | if PTK2: 581 | result = await promptsession.prompt(async_=True) 582 | else: 583 | result = await promptsession.prompt_async() 584 | except EOFError: 585 | break 586 | 587 | if not result: 588 | continue 589 | 590 | parser = PanctlParser(self.commands) 591 | 592 | try: 593 | args = parser.parse_args(split(result, posix=False)) 594 | except ParseError: 595 | continue 596 | 597 | command = args.subcommand 598 | 599 | if command == "list-servers": 600 | self.list_servers() 601 | 602 | if command == "help": 603 | self.show_help(args.command) 604 | 605 | elif command == "import-keys": 606 | self.own_message_ids.append( 607 | self.ctl.ImportKeys(args.pan_user, args.path, args.passphrase) 608 | ) 609 | 610 | elif command == "export-keys": 611 | self.own_message_ids.append( 612 | self.ctl.ExportKeys(args.pan_user, args.path, args.passphrase) 613 | ) 614 | 615 | elif command == "send-anyways": 616 | self.own_message_ids.append( 617 | self.ctl.SendAnyways(args.pan_user, args.room_id) 618 | ) 619 | 620 | elif command == "cancel-sending": 621 | self.own_message_ids.append( 622 | self.ctl.CancelSending(args.pan_user, args.room_id) 623 | ) 624 | 625 | elif command == "list-devices": 626 | self.list_devices(args) 627 | 628 | elif command == "verify-device": 629 | self.own_message_ids.append( 630 | self.devices.Verify(args.pan_user, args.user_id, args.device_id) 631 | ) 632 | 633 | elif command == "unverify-device": 634 | self.own_message_ids.append( 635 | self.devices.Unverify(args.pan_user, args.user_id, args.device_id) 636 | ) 637 | 638 | elif command == "blacklist-device": 639 | self.own_message_ids.append( 640 | self.devices.Blacklist(args.pan_user, args.user_id, args.device_id) 641 | ) 642 | 643 | elif command == "unblacklist-device": 644 | self.own_message_ids.append( 645 | self.devices.Unblacklist( 646 | args.pan_user, args.user_id, args.device_id 647 | ) 648 | ) 649 | 650 | elif command == "start-verification": 651 | self.own_message_ids.append( 652 | self.devices.StartKeyVerification( 653 | args.pan_user, args.user_id, args.device_id 654 | ) 655 | ) 656 | 657 | elif command == "cancel-verification": 658 | self.own_message_ids.append( 659 | self.devices.CancelKeyVerification( 660 | args.pan_user, args.user_id, args.device_id 661 | ) 662 | ) 663 | 664 | elif command == "accept-verification": 665 | self.own_message_ids.append( 666 | self.devices.AcceptKeyVerification( 667 | args.pan_user, args.user_id, args.device_id 668 | ) 669 | ) 670 | 671 | elif command == "confirm-verification": 672 | self.own_message_ids.append( 673 | self.devices.ConfirmKeyVerification( 674 | args.pan_user, args.user_id, args.device_id 675 | ) 676 | ) 677 | 678 | elif command == "continue-keyshare": 679 | self.own_message_ids.append( 680 | self.devices.ContinueKeyShare( 681 | args.pan_user, args.user_id, args.device_id 682 | ) 683 | ) 684 | 685 | elif command == "cancel-keyshare": 686 | self.own_message_ids.append( 687 | self.devices.CancelKeyShare( 688 | args.pan_user, args.user_id, args.device_id 689 | ) 690 | ) 691 | 692 | 693 | @click.command( 694 | help=( 695 | "panctl is a small interactive repl to introspect and control" 696 | "the pantalaimon daemon." 697 | ) 698 | ) 699 | @click.version_option(version="0.10.5", prog_name="panctl") 700 | def main(): 701 | loop = asyncio.new_event_loop() 702 | glib_loop = GLib.MainLoop() 703 | 704 | try: 705 | panctl = PanCtl() 706 | except GLib.Error as e: 707 | print(f"Error, {e}") 708 | sys.exit(-1) 709 | 710 | fut = loop.run_in_executor(None, glib_loop.run) 711 | 712 | try: 713 | loop.run_until_complete(panctl.loop()) 714 | except KeyboardInterrupt: 715 | pass 716 | 717 | GLib.idle_add(glib_loop.quit) 718 | loop.run_until_complete(fut) 719 | 720 | 721 | if __name__ == "__main__": 722 | main() 723 | -------------------------------------------------------------------------------- /pantalaimon/store.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | import os 17 | from collections import defaultdict 18 | from typing import Any, Dict 19 | 20 | import attr 21 | from nio.crypto import TrustState, GroupSessionStore 22 | from nio.store import ( 23 | Accounts, 24 | MegolmInboundSessions, 25 | DeviceKeys, 26 | SqliteStore, 27 | DeviceTrustState, 28 | use_database, 29 | use_database_atomic, 30 | ) 31 | from peewee import SQL, DoesNotExist, ForeignKeyField, Model, SqliteDatabase, TextField 32 | from cachetools import LRUCache 33 | 34 | MAX_LOADED_MEDIA = 10000 35 | MAX_LOADED_UPLOAD = 10000 36 | 37 | 38 | @attr.s 39 | class FetchTask: 40 | room_id = attr.ib(type=str) 41 | token = attr.ib(type=str) 42 | 43 | 44 | @attr.s 45 | class MediaInfo: 46 | mxc_server = attr.ib(type=str) 47 | mxc_path = attr.ib(type=str) 48 | key = attr.ib(type=dict) 49 | iv = attr.ib(type=str) 50 | hashes = attr.ib(type=dict) 51 | 52 | def to_content(self, content: Dict, mime_type: str) -> Dict[Any, Any]: 53 | content["file"] = { 54 | "v": "v2", 55 | "key": self.key, 56 | "iv": self.iv, 57 | "hashes": self.hashes, 58 | "url": content["url"], 59 | "mimetype": mime_type, 60 | } 61 | 62 | def to_thumbnail(self, content: Dict, mime_type: str) -> Dict[Any, Any]: 63 | content["info"]["thumbnail_file"] = { 64 | "v": "v2", 65 | "key": self.key, 66 | "iv": self.iv, 67 | "hashes": self.hashes, 68 | "url": content["info"]["thumbnail_url"], 69 | "mimetype": mime_type, 70 | } 71 | 72 | 73 | @attr.s 74 | class UploadInfo: 75 | content_uri = attr.ib(type=str) 76 | filename = attr.ib(type=str) 77 | mimetype = attr.ib(type=str) 78 | 79 | 80 | class DictField(TextField): 81 | def python_value(self, value): # pragma: no cover 82 | return json.loads(value) 83 | 84 | def db_value(self, value): # pragma: no cover 85 | return json.dumps(value) 86 | 87 | 88 | class AccessTokens(Model): 89 | token = TextField() 90 | account = ForeignKeyField( 91 | model=Accounts, primary_key=True, backref="access_token", on_delete="CASCADE" 92 | ) 93 | 94 | 95 | class Servers(Model): 96 | name = TextField() 97 | 98 | class Meta: 99 | constraints = [SQL("UNIQUE(name)")] 100 | 101 | 102 | class ServerUsers(Model): 103 | user_id = TextField() 104 | server = ForeignKeyField( 105 | model=Servers, column_name="server_id", backref="users", on_delete="CASCADE" 106 | ) 107 | 108 | class Meta: 109 | constraints = [SQL("UNIQUE(user_id,server_id)")] 110 | 111 | 112 | class PanSyncTokens(Model): 113 | token = TextField() 114 | user = ForeignKeyField(model=ServerUsers, column_name="user_id") 115 | 116 | class Meta: 117 | constraints = [SQL("UNIQUE(user_id)")] 118 | 119 | 120 | class PanFetcherTasks(Model): 121 | user = ForeignKeyField( 122 | model=ServerUsers, column_name="user_id", backref="fetcher_tasks" 123 | ) 124 | room_id = TextField() 125 | token = TextField() 126 | 127 | class Meta: 128 | constraints = [SQL("UNIQUE(user_id, room_id, token)")] 129 | 130 | 131 | class PanMediaInfo(Model): 132 | server = ForeignKeyField( 133 | model=Servers, column_name="server_id", backref="media", on_delete="CASCADE" 134 | ) 135 | mxc_server = TextField() 136 | mxc_path = TextField() 137 | key = DictField() 138 | hashes = DictField() 139 | iv = TextField() 140 | 141 | class Meta: 142 | constraints = [SQL("UNIQUE(server_id, mxc_server, mxc_path)")] 143 | 144 | 145 | class PanUploadInfo(Model): 146 | server = ForeignKeyField( 147 | model=Servers, column_name="server_id", backref="upload", on_delete="CASCADE" 148 | ) 149 | content_uri = TextField() 150 | filename = TextField() 151 | mimetype = TextField() 152 | 153 | class Meta: 154 | constraints = [SQL("UNIQUE(server_id, content_uri)")] 155 | 156 | 157 | @attr.s 158 | class ClientInfo: 159 | user_id = attr.ib(type=str) 160 | access_token = attr.ib(type=str) 161 | 162 | 163 | @attr.s 164 | class PanStore: 165 | store_path = attr.ib(type=str) 166 | database_name = attr.ib(type=str, default="pan.db") 167 | database = attr.ib(type=SqliteDatabase, init=False) 168 | database_path = attr.ib(type=str, init=False) 169 | models = [ 170 | Accounts, 171 | AccessTokens, 172 | Servers, 173 | ServerUsers, 174 | DeviceKeys, 175 | DeviceTrustState, 176 | PanSyncTokens, 177 | PanFetcherTasks, 178 | PanMediaInfo, 179 | PanUploadInfo, 180 | ] 181 | 182 | def __attrs_post_init__(self): 183 | self.database_path = os.path.join( 184 | os.path.abspath(self.store_path), self.database_name 185 | ) 186 | 187 | self.database = self._create_database() 188 | self.database.connect() 189 | 190 | with self.database.bind_ctx(self.models): 191 | self.database.create_tables(self.models) 192 | 193 | def _create_database(self): 194 | return SqliteDatabase( 195 | self.database_path, pragmas={"foreign_keys": 1, "secure_delete": 1} 196 | ) 197 | 198 | @use_database 199 | def _get_account(self, user_id, device_id): 200 | try: 201 | return Accounts.get( 202 | Accounts.user_id == user_id, Accounts.device_id == device_id 203 | ) 204 | except DoesNotExist: 205 | return None 206 | 207 | @use_database 208 | def save_upload(self, server, content_uri, filename, mimetype): 209 | server = Servers.get(name=server) 210 | 211 | PanUploadInfo.insert( 212 | server=server, 213 | content_uri=content_uri, 214 | filename=filename, 215 | mimetype=mimetype, 216 | ).on_conflict_ignore().execute() 217 | 218 | @use_database 219 | def load_upload(self, server, content_uri=None): 220 | server, _ = Servers.get_or_create(name=server) 221 | 222 | if not content_uri: 223 | upload_cache = LRUCache(maxsize=MAX_LOADED_UPLOAD) 224 | 225 | for i, u in enumerate(server.upload): 226 | if i > MAX_LOADED_UPLOAD: 227 | break 228 | 229 | upload = UploadInfo(u.content_uri, u.filename, u.mimetype) 230 | upload_cache[u.content_uri] = upload 231 | 232 | return upload_cache 233 | else: 234 | u = PanUploadInfo.get_or_none( 235 | PanUploadInfo.server == server, 236 | PanUploadInfo.content_uri == content_uri, 237 | ) 238 | 239 | if not u: 240 | return None 241 | 242 | return UploadInfo(u.content_uri, u.filename, u.mimetype) 243 | 244 | @use_database 245 | def save_media(self, server, media): 246 | server = Servers.get(name=server) 247 | 248 | PanMediaInfo.insert( 249 | server=server, 250 | mxc_server=media.mxc_server, 251 | mxc_path=media.mxc_path, 252 | key=media.key, 253 | iv=media.iv, 254 | hashes=media.hashes, 255 | ).on_conflict_ignore().execute() 256 | 257 | @use_database 258 | def load_media_cache(self, server): 259 | server, _ = Servers.get_or_create(name=server) 260 | media_cache = LRUCache(maxsize=MAX_LOADED_MEDIA) 261 | 262 | for i, m in enumerate(server.media): 263 | if i > MAX_LOADED_MEDIA: 264 | break 265 | 266 | media = MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes) 267 | media_cache[(m.mxc_server, m.mxc_path)] = media 268 | 269 | return media_cache 270 | 271 | @use_database 272 | def load_media(self, server, mxc_server=None, mxc_path=None): 273 | server, _ = Servers.get_or_create(name=server) 274 | 275 | m = PanMediaInfo.get_or_none( 276 | PanMediaInfo.server == server, 277 | PanMediaInfo.mxc_server == mxc_server, 278 | PanMediaInfo.mxc_path == mxc_path, 279 | ) 280 | 281 | if not m: 282 | return None 283 | 284 | return MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes) 285 | 286 | @use_database_atomic 287 | def replace_fetcher_task(self, server, pan_user, old_task, new_task): 288 | server = Servers.get(name=server) 289 | user = ServerUsers.get(server=server, user_id=pan_user) 290 | 291 | PanFetcherTasks.delete().where( 292 | PanFetcherTasks.user == user, 293 | PanFetcherTasks.room_id == old_task.room_id, 294 | PanFetcherTasks.token == old_task.token, 295 | ).execute() 296 | 297 | PanFetcherTasks.replace( 298 | user=user, room_id=new_task.room_id, token=new_task.token 299 | ).execute() 300 | 301 | @use_database 302 | def save_fetcher_task(self, server, pan_user, task): 303 | server = Servers.get(name=server) 304 | user = ServerUsers.get(server=server, user_id=pan_user) 305 | 306 | PanFetcherTasks.replace( 307 | user=user, room_id=task.room_id, token=task.token 308 | ).execute() 309 | 310 | @use_database 311 | def load_fetcher_tasks(self, server, pan_user): 312 | server = Servers.get(name=server) 313 | user = ServerUsers.get(server=server, user_id=pan_user) 314 | 315 | tasks = [] 316 | 317 | for t in user.fetcher_tasks: 318 | tasks.append(FetchTask(t.room_id, t.token)) 319 | 320 | return tasks 321 | 322 | @use_database 323 | def delete_fetcher_task(self, server, pan_user, task): 324 | server = Servers.get(name=server) 325 | user = ServerUsers.get(server=server, user_id=pan_user) 326 | 327 | PanFetcherTasks.delete().where( 328 | PanFetcherTasks.user == user, 329 | PanFetcherTasks.room_id == task.room_id, 330 | PanFetcherTasks.token == task.token, 331 | ).execute() 332 | 333 | @use_database 334 | def save_token(self, server, pan_user, token): 335 | # type: (str, str, str) -> None 336 | """Save a sync token for a pan user.""" 337 | server = Servers.get(name=server) 338 | user = ServerUsers.get(server=server, user_id=pan_user) 339 | 340 | PanSyncTokens.replace(user=user, token=token).execute() 341 | 342 | @use_database 343 | def load_token(self, server, pan_user): 344 | # type: (str, str) -> Optional[str] 345 | """Load a sync token for a pan user. 346 | 347 | Returns the sync token if one is found. 348 | """ 349 | server = Servers.get(name=server) 350 | user = ServerUsers.get(server=server, user_id=pan_user) 351 | 352 | token = PanSyncTokens.get_or_none(user=user) 353 | 354 | if token: 355 | return token.token 356 | 357 | return None 358 | 359 | @use_database 360 | def save_server_user(self, server_name, user_id): 361 | # type: (str, str) -> None 362 | server, _ = Servers.get_or_create(name=server_name) 363 | 364 | ServerUsers.insert( 365 | user_id=user_id, server=server 366 | ).on_conflict_ignore().execute() 367 | 368 | @use_database 369 | def load_all_users(self): 370 | users = [] 371 | 372 | query = Accounts.select(Accounts.user_id, Accounts.device_id) 373 | 374 | for account in query: 375 | users.append((account.user_id, account.device_id)) 376 | 377 | return users 378 | 379 | @use_database 380 | def load_users(self, server_name): 381 | # type: (str) -> List[Tuple[str, str]] 382 | users = [] 383 | 384 | server = Servers.get_or_none(Servers.name == server_name) 385 | 386 | if not server: 387 | return [] 388 | 389 | server_users = [] 390 | 391 | for u in server.users: 392 | server_users.append(u.user_id) 393 | 394 | query = Accounts.select(Accounts.user_id, Accounts.device_id).where( 395 | Accounts.user_id.in_(server_users) 396 | ) 397 | 398 | for account in query: 399 | users.append((account.user_id, account.device_id)) 400 | 401 | return users 402 | 403 | @use_database 404 | def save_access_token(self, user_id, device_id, access_token): 405 | account = self._get_account(user_id, device_id) 406 | assert account 407 | 408 | AccessTokens.replace(account=account, token=access_token).execute() 409 | 410 | @use_database 411 | def load_access_token(self, user_id, device_id): 412 | # type: (str, str) -> Optional[str] 413 | account = self._get_account(user_id, device_id) 414 | 415 | if not account: 416 | return None 417 | 418 | try: 419 | return account.access_token[0].token 420 | except IndexError: 421 | return None 422 | 423 | @use_database 424 | def load_all_devices(self): 425 | # type (str, str) -> Dict[str, Dict[str, DeviceStore]] 426 | store = dict() 427 | 428 | query = Accounts.select() 429 | 430 | for account in query: 431 | device_store = defaultdict(dict) 432 | 433 | for d in account.device_keys: 434 | if d.deleted: 435 | continue 436 | 437 | try: 438 | trust_state = d.trust_state[0].state 439 | except IndexError: 440 | trust_state = TrustState.unset 441 | 442 | keys = {k.key_type: k.key for k in d.keys} 443 | 444 | device_store[d.user_id][d.device_id] = { 445 | "user_id": d.user_id, 446 | "device_id": d.device_id, 447 | "ed25519": keys["ed25519"], 448 | "curve25519": keys["curve25519"], 449 | "trust_state": trust_state.name, 450 | "device_display_name": d.display_name, 451 | } 452 | 453 | store[account.user_id] = device_store 454 | 455 | return store 456 | 457 | 458 | class KeyDroppingSqliteStore(SqliteStore): 459 | @use_database 460 | def save_inbound_group_session(self, session): 461 | """Save the provided Megolm inbound group session to the database. 462 | 463 | Args: 464 | session (InboundGroupSession): The session to save. 465 | """ 466 | account = self._get_account() 467 | assert account 468 | 469 | MegolmInboundSessions.delete().where( 470 | MegolmInboundSessions.sender_key == session.sender_key, 471 | MegolmInboundSessions.account == account, 472 | MegolmInboundSessions.room_id == session.room_id, 473 | ).execute() 474 | 475 | super().save_inbound_group_session(session) 476 | 477 | @use_database 478 | def load_inbound_group_sessions(self): 479 | store = super().load_inbound_group_sessions() 480 | 481 | return KeyDroppingGroupSessionStore.from_group_session_store(store) 482 | 483 | 484 | class KeyDroppingGroupSessionStore(GroupSessionStore): 485 | def from_group_session_store(store): 486 | new_store = KeyDroppingGroupSessionStore() 487 | new_store._entries = store._entries 488 | 489 | return new_store 490 | 491 | def add(self, session) -> bool: 492 | room_id = session.room_id 493 | sender_key = session.sender_key 494 | if session in self._entries[room_id][sender_key].values(): 495 | return False 496 | 497 | self._entries[room_id][sender_key].clear() 498 | self._entries[room_id][sender_key][session.id] = session 499 | return True 500 | -------------------------------------------------------------------------------- /pantalaimon/thread_messages.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import attr 16 | 17 | 18 | @attr.s 19 | class Message: 20 | pass 21 | 22 | 23 | @attr.s 24 | class UnverifiedDevicesSignal(Message): 25 | pan_user = attr.ib() 26 | room_id = attr.ib() 27 | room_display_name = attr.ib() 28 | 29 | 30 | @attr.s 31 | class UnverifiedResponse(Message): 32 | message_id = attr.ib() 33 | pan_user = attr.ib() 34 | room_id = attr.ib() 35 | 36 | 37 | @attr.s 38 | class SendAnywaysMessage(UnverifiedResponse): 39 | pass 40 | 41 | 42 | @attr.s 43 | class CancelSendingMessage(UnverifiedResponse): 44 | pass 45 | 46 | 47 | @attr.s 48 | class KeyRequestMessage(Message): 49 | pan_user = attr.ib(type=str) 50 | event = attr.ib() 51 | 52 | 53 | @attr.s 54 | class _KeyShare(Message): 55 | message_id = attr.ib() 56 | pan_user = attr.ib() 57 | user_id = attr.ib() 58 | device_id = attr.ib() 59 | 60 | 61 | @attr.s 62 | class ContinueKeyShare(_KeyShare): 63 | pass 64 | 65 | 66 | @attr.s 67 | class CancelKeyShare(_KeyShare): 68 | pass 69 | 70 | 71 | @attr.s 72 | class DaemonResponse(Message): 73 | message_id = attr.ib() 74 | pan_user = attr.ib() 75 | code = attr.ib() 76 | message = attr.ib() 77 | 78 | 79 | @attr.s 80 | class UpdateUsersMessage(Message): 81 | server = attr.ib() 82 | user_id = attr.ib() 83 | device_id = attr.ib() 84 | 85 | 86 | @attr.s 87 | class UpdateDevicesMessage(Message): 88 | pan_user = attr.ib(type=str) 89 | devices = attr.ib(type=dict) 90 | 91 | 92 | @attr.s 93 | class _KeysOperation(Message): 94 | message_id = attr.ib() 95 | pan_user = attr.ib() 96 | file_path = attr.ib() 97 | passphrase = attr.ib() 98 | 99 | 100 | @attr.s 101 | class ImportKeysMessage(_KeysOperation): 102 | pass 103 | 104 | 105 | @attr.s 106 | class ExportKeysMessage(_KeysOperation): 107 | pass 108 | 109 | 110 | @attr.s 111 | class _VerificationMessage(Message): 112 | message_id = attr.ib() 113 | pan_user = attr.ib() 114 | user_id = attr.ib() 115 | device_id = attr.ib() 116 | 117 | 118 | @attr.s 119 | class DeviceVerifyMessage(_VerificationMessage): 120 | pass 121 | 122 | 123 | @attr.s 124 | class DeviceUnverifyMessage(_VerificationMessage): 125 | pass 126 | 127 | 128 | @attr.s 129 | class DeviceBlacklistMessage(_VerificationMessage): 130 | pass 131 | 132 | 133 | @attr.s 134 | class DeviceUnblacklistMessage(_VerificationMessage): 135 | pass 136 | 137 | 138 | @attr.s 139 | class SasMessage(_VerificationMessage): 140 | pass 141 | 142 | 143 | @attr.s 144 | class StartSasMessage(SasMessage): 145 | pass 146 | 147 | 148 | @attr.s 149 | class CancelSasMessage(SasMessage): 150 | pass 151 | 152 | 153 | @attr.s 154 | class ConfirmSasMessage(SasMessage): 155 | pass 156 | 157 | 158 | @attr.s 159 | class AcceptSasMessage(SasMessage): 160 | pass 161 | 162 | 163 | @attr.s 164 | class _SasSignal: 165 | pan_user = attr.ib() 166 | user_id = attr.ib() 167 | device_id = attr.ib() 168 | transaction_id = attr.ib() 169 | 170 | 171 | @attr.s 172 | class InviteSasSignal(_SasSignal): 173 | pass 174 | 175 | 176 | @attr.s 177 | class ShowSasSignal(_SasSignal): 178 | emoji = attr.ib() 179 | 180 | 181 | @attr.s 182 | class SasDoneSignal(_SasSignal): 183 | pass 184 | -------------------------------------------------------------------------------- /pantalaimon/ui.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation CIC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from importlib import util 16 | 17 | UI_ENABLED = ( 18 | util.find_spec("gi") is not None 19 | and util.find_spec("gi.repository") is not None 20 | and util.find_spec("pydbus") is not None 21 | ) 22 | 23 | if UI_ENABLED: 24 | from collections import defaultdict 25 | from queue import Empty 26 | 27 | import attr 28 | import dbus 29 | import notify2 30 | from gi.repository import GLib 31 | from pydbus import SessionBus 32 | from pydbus.generic import signal 33 | from dbus.mainloop.glib import DBusGMainLoop 34 | 35 | from nio import RoomKeyRequest, RoomKeyRequestCancellation 36 | 37 | from pantalaimon.log import logger 38 | from pantalaimon.thread_messages import ( 39 | AcceptSasMessage, 40 | CancelSasMessage, 41 | CancelSendingMessage, 42 | ConfirmSasMessage, 43 | DaemonResponse, 44 | DeviceBlacklistMessage, 45 | DeviceUnblacklistMessage, 46 | DeviceUnverifyMessage, 47 | DeviceVerifyMessage, 48 | ExportKeysMessage, 49 | ImportKeysMessage, 50 | InviteSasSignal, 51 | SasDoneSignal, 52 | SendAnywaysMessage, 53 | ShowSasSignal, 54 | StartSasMessage, 55 | UnverifiedDevicesSignal, 56 | UpdateDevicesMessage, 57 | UpdateUsersMessage, 58 | KeyRequestMessage, 59 | ContinueKeyShare, 60 | CancelKeyShare, 61 | ) 62 | 63 | UI_ENABLED = True 64 | 65 | class IdCounter: 66 | def __init__(self): 67 | self._message_id = 0 68 | 69 | @property 70 | def message_id(self): 71 | ret = self._message_id 72 | self._message_id += 1 73 | 74 | return ret 75 | 76 | class Control: 77 | """ 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | """ 125 | 126 | Response = signal() 127 | UnverifiedDevices = signal() 128 | 129 | def __init__(self, queue, server_list, id_counter): 130 | self.queue = queue 131 | self.server_list = server_list 132 | self.id_counter = id_counter 133 | self.users = defaultdict(set) 134 | 135 | def update_users(self, message): 136 | self.users[message.server].add((message.user_id, message.device_id)) 137 | 138 | @property 139 | def message_id(self): 140 | return self.id_counter.message_id 141 | 142 | def ListServers(self): 143 | """Return the list of pan users.""" 144 | return self.users 145 | 146 | def ExportKeys(self, pan_user, filepath, passphrase): 147 | message = ExportKeysMessage(self.message_id, pan_user, filepath, passphrase) 148 | self.queue.put(message) 149 | return message.message_id 150 | 151 | def ImportKeys(self, pan_user, filepath, passphrase): 152 | message = ImportKeysMessage(self.message_id, pan_user, filepath, passphrase) 153 | self.queue.put(message) 154 | return message.message_id 155 | 156 | def SendAnyways(self, pan_user, room_id): 157 | message = SendAnywaysMessage(self.message_id, pan_user, room_id) 158 | self.queue.put(message) 159 | return message.message_id 160 | 161 | def CancelSending(self, pan_user, room_id): 162 | message = CancelSendingMessage(self.message_id, pan_user, room_id) 163 | self.queue.put(message) 164 | return message.message_id 165 | 166 | class Devices: 167 | """ 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | """ 299 | 300 | VerificationInvite = signal() 301 | VerificationCancel = signal() 302 | VerificationString = signal() 303 | VerificationDone = signal() 304 | 305 | KeyRequest = signal() 306 | KeyRequestCancel = signal() 307 | 308 | def __init__(self, queue, id_counter): 309 | self.device_list = dict() 310 | self.queue = queue 311 | self.id_counter = id_counter 312 | 313 | self.key_requests = dict() 314 | 315 | @property 316 | def message_id(self): 317 | return self.id_counter.message_id 318 | 319 | def List(self, pan_user): 320 | device_store = self.device_list.get(pan_user, None) 321 | 322 | if not device_store: 323 | return [] 324 | 325 | device_list = [ 326 | device 327 | for device_list in device_store.values() 328 | for device in device_list.values() 329 | ] 330 | 331 | return device_list 332 | 333 | def ListUserDevices(self, pan_user, user_id): 334 | device_store = self.device_list.get(pan_user, None) 335 | 336 | if not device_store: 337 | return [] 338 | 339 | device_list = device_store.get(user_id, None) 340 | 341 | if not device_list: 342 | return [] 343 | 344 | return device_list.values() 345 | 346 | def Verify(self, pan_user, user_id, device_id): 347 | message = DeviceVerifyMessage(self.message_id, pan_user, user_id, device_id) 348 | self.queue.put(message) 349 | return message.message_id 350 | 351 | def Unverify(self, pan_user, user_id, device_id): 352 | message = DeviceUnverifyMessage( 353 | self.message_id, pan_user, user_id, device_id 354 | ) 355 | self.queue.put(message) 356 | return message.message_id 357 | 358 | def Blacklist(self, pan_user, user_id, device_id): 359 | message = DeviceBlacklistMessage( 360 | self.message_id, pan_user, user_id, device_id 361 | ) 362 | self.queue.put(message) 363 | return message.message_id 364 | 365 | def Unblacklist(self, pan_user, user_id, device_id): 366 | message = DeviceUnblacklistMessage( 367 | self.message_id, pan_user, user_id, device_id 368 | ) 369 | self.queue.put(message) 370 | return message.message_id 371 | 372 | def StartKeyVerification(self, pan_user, user_id, device_id): 373 | message = StartSasMessage(self.message_id, pan_user, user_id, device_id) 374 | self.queue.put(message) 375 | return message.message_id 376 | 377 | def CancelKeyVerification(self, pan_user, user_id, device_id): 378 | message = CancelSasMessage(self.message_id, pan_user, user_id, device_id) 379 | self.queue.put(message) 380 | return message.message_id 381 | 382 | def ConfirmKeyVerification(self, pan_user, user_id, device_id): 383 | message = ConfirmSasMessage(self.message_id, pan_user, user_id, device_id) 384 | self.queue.put(message) 385 | return message.message_id 386 | 387 | def AcceptKeyVerification(self, pan_user, user_id, device_id): 388 | message = AcceptSasMessage(self.message_id, pan_user, user_id, device_id) 389 | self.queue.put(message) 390 | return message.message_id 391 | 392 | def ContinueKeyShare(self, pan_user, user_id, device_id): 393 | message = ContinueKeyShare(self.message_id, pan_user, user_id, device_id) 394 | self.queue.put(message) 395 | return message.message_id 396 | 397 | def CancelKeyShare(self, pan_user, user_id, device_id): 398 | message = CancelKeyShare(self.message_id, pan_user, user_id, device_id) 399 | self.queue.put(message) 400 | return message.message_id 401 | 402 | def update_devices(self, message): 403 | if message.pan_user not in self.device_list: 404 | self.device_list[message.pan_user] = defaultdict(dict) 405 | 406 | device_list = self.device_list.get(message.pan_user) 407 | 408 | for user_devices in message.devices.values(): 409 | for device in user_devices.values(): 410 | if device["deleted"]: 411 | try: 412 | device_list[device["user_id"]].pop(device["device_id"]) 413 | except KeyError: 414 | pass 415 | continue 416 | 417 | device.pop("deleted") 418 | device_list[device["user_id"]][device["device_id"]] = device 419 | 420 | def update_key_requests(self, message): 421 | # type: (KeyRequestMessage) -> None 422 | event = message.event 423 | 424 | if isinstance(event, RoomKeyRequest): 425 | self.key_requests[event.request_id] = event 426 | self.KeyRequest( 427 | message.pan_user, 428 | event.sender, 429 | event.requesting_device_id, 430 | event.request_id, 431 | ) 432 | 433 | elif isinstance(event, RoomKeyRequestCancellation): 434 | self.key_requests.pop(event.request_id, None) 435 | self.KeyRequestCancel( 436 | message.pan_user, 437 | event.sender, 438 | event.requesting_device_id, 439 | event.request_id, 440 | ) 441 | 442 | @attr.s 443 | class GlibT: 444 | receive_queue = attr.ib() 445 | send_queue = attr.ib() 446 | data_dir = attr.ib() 447 | server_list = attr.ib() 448 | config = attr.ib() 449 | 450 | loop = attr.ib(init=False) 451 | dbus_loop = attr.ib(init=False) 452 | store = attr.ib(init=False) 453 | users = attr.ib(init=False) 454 | devices = attr.ib(init=False) 455 | bus = attr.ib(init=False) 456 | control_if = attr.ib(init=False) 457 | device_if = attr.ib(init=False) 458 | notifications = attr.ib(type=bool, default=False, init=False) 459 | 460 | def __attrs_post_init__(self): 461 | self.loop = None 462 | self.dbus_loop = None 463 | 464 | id_counter = IdCounter() 465 | 466 | self.control_if = Control(self.send_queue, self.server_list, id_counter) 467 | self.device_if = Devices(self.send_queue, id_counter) 468 | 469 | self.bus = SessionBus() 470 | self.bus.publish("org.pantalaimon1", self.control_if, self.device_if) 471 | 472 | def unverified_notification(self, message): 473 | notification = notify2.Notification( 474 | "Unverified devices.", 475 | message=( 476 | f"There are unverified devices in the room " 477 | f"{message.room_display_name}." 478 | ), 479 | ) 480 | notification.set_category("im") 481 | 482 | def send_cb(notification, action_key, user_data): 483 | message = user_data 484 | self.control_if.SendAnyways(message.pan_user, message.room_id) 485 | 486 | def cancel_cb(notification, action_key, user_data): 487 | message = user_data 488 | self.control_if.CancelSending(message.pan_user, message.room_id) 489 | 490 | if "actions" in notify2.get_server_caps(): 491 | notification.add_action("send", "Send anyways", send_cb, message) 492 | notification.add_action("cancel", "Cancel sending", cancel_cb, message) 493 | 494 | notification.show() 495 | 496 | def sas_invite_notification(self, message): 497 | notification = notify2.Notification( 498 | "Key verification invite", 499 | message=( 500 | f"{message.user_id} via {message.device_id} has started " 501 | f"a key verification process." 502 | ), 503 | ) 504 | notification.set_category("im") 505 | 506 | def accept_cb(notification, action_key, user_data): 507 | message = user_data 508 | self.device_if.AcceptKeyVerification( 509 | message.pan_user, message.user_id, message.device_id 510 | ) 511 | 512 | def cancel_cb(notification, action_key, user_data): 513 | message = user_data 514 | self.device_if.CancelKeyVerification( 515 | message.pan_user, message.user_id, message.device_id 516 | ) 517 | 518 | if "actions" in notify2.get_server_caps(): 519 | notification.add_action("accept", "Accept", accept_cb, message) 520 | notification.add_action("cancel", "Cancel", cancel_cb, message) 521 | 522 | notification.show() 523 | 524 | def sas_show_notification(self, message): 525 | emojis = [x[0] for x in message.emoji] 526 | 527 | emoji_str = " ".join(emojis) 528 | 529 | notification = notify2.Notification( 530 | "Short authentication string", 531 | message=( 532 | f"Short authentication string for the key verification of" 533 | f" {message.user_id} via {message.device_id}:\n" 534 | f"{emoji_str}" 535 | ), 536 | ) 537 | notification.set_category("im") 538 | 539 | def confirm_cb(notification, action_key, user_data): 540 | message = user_data 541 | self.device_if.ConfirmKeyVerification( 542 | message.pan_user, message.user_id, message.device_id 543 | ) 544 | 545 | def cancel_cb(notification, action_key, user_data): 546 | message = user_data 547 | self.device_if.CancelKeyVerification( 548 | message.pan_user, message.user_id, message.device_id 549 | ) 550 | 551 | if "actions" in notify2.get_server_caps(): 552 | notification.add_action("confirm", "Confirm", confirm_cb, message) 553 | notification.add_action("cancel", "Cancel", cancel_cb, message) 554 | 555 | notification.show() 556 | 557 | def sas_done_notification(self, message): 558 | notification = notify2.Notification( 559 | "Device successfully verified.", 560 | message=( 561 | f"Device {message.device_id} of user {message.user_id} " 562 | f"successfully verified." 563 | ), 564 | ) 565 | notification.set_category("im") 566 | notification.show() 567 | 568 | def message_callback(self): 569 | try: 570 | message = self.receive_queue.get_nowait() 571 | except Empty: 572 | return True 573 | 574 | logger.debug(f"UI loop received message {message}") 575 | 576 | if isinstance(message, UpdateDevicesMessage): 577 | self.device_if.update_devices(message) 578 | 579 | elif isinstance(message, UpdateUsersMessage): 580 | self.control_if.update_users(message) 581 | 582 | elif isinstance(message, UnverifiedDevicesSignal): 583 | self.control_if.UnverifiedDevices( 584 | message.pan_user, message.room_id, message.room_display_name 585 | ) 586 | 587 | if self.notifications: 588 | self.unverified_notification(message) 589 | 590 | elif isinstance(message, InviteSasSignal): 591 | self.device_if.VerificationInvite( 592 | message.pan_user, 593 | message.user_id, 594 | message.device_id, 595 | message.transaction_id, 596 | ) 597 | 598 | if self.notifications: 599 | self.sas_invite_notification(message) 600 | 601 | elif isinstance(message, ShowSasSignal): 602 | self.device_if.VerificationString( 603 | message.pan_user, 604 | message.user_id, 605 | message.device_id, 606 | message.transaction_id, 607 | message.emoji, 608 | ) 609 | 610 | if self.notifications: 611 | self.sas_show_notification(message) 612 | 613 | elif isinstance(message, SasDoneSignal): 614 | self.device_if.VerificationDone( 615 | message.pan_user, 616 | message.user_id, 617 | message.device_id, 618 | message.transaction_id, 619 | ) 620 | 621 | if self.notifications: 622 | self.sas_done_notification(message) 623 | 624 | elif isinstance(message, DaemonResponse): 625 | self.control_if.Response( 626 | message.message_id, 627 | message.pan_user, 628 | {"code": message.code, "message": message.message}, 629 | ) 630 | 631 | elif isinstance(message, KeyRequestMessage): 632 | self.device_if.update_key_requests(message) 633 | 634 | self.receive_queue.task_done() 635 | return True 636 | 637 | def run(self): 638 | self.dbus_loop = DBusGMainLoop() 639 | self.loop = GLib.MainLoop() 640 | 641 | if self.config.notifications: 642 | try: 643 | notify2.init("pantalaimon", mainloop=self.dbus_loop) 644 | self.notifications = True 645 | except dbus.DBusException: 646 | logger.error( 647 | "Notifications are enabled but no notification " 648 | "server could be found, disabling notifications." 649 | ) 650 | self.notifications = False 651 | 652 | GLib.timeout_add(100, self.message_callback) 653 | 654 | if not self.loop: 655 | return 656 | 657 | self.loop.run() 658 | 659 | def stop(self): 660 | if self.loop: 661 | self.loop.quit() 662 | self.loop = None 663 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from setuptools import find_packages, setup 4 | 5 | with open("README.md", encoding="utf-8") as f: 6 | long_description = f.read() 7 | 8 | setup( 9 | name="pantalaimon", 10 | version="0.10.5", 11 | url="https://github.com/matrix-org/pantalaimon", 12 | author="The Matrix.org Team", 13 | author_email="poljar@termina.org.uk", 14 | description=("A Matrix proxy daemon that adds E2E encryption " "capabilities."), 15 | long_description=long_description, 16 | long_description_content_type="text/markdown", 17 | license="Apache License, Version 2.0", 18 | packages=find_packages(), 19 | install_requires=[ 20 | "attrs >= 19.3.0", 21 | "aiohttp >= 3.6, < 4.0", 22 | "platformdirs >= 4.3.6", 23 | "click >= 7.1.2", 24 | "keyring >= 21.2.1", 25 | "logbook >= 1.5.3", 26 | "peewee >= 3.13.1", 27 | "janus >= 0.5", 28 | "cachetools >= 3.0.0", 29 | "prompt_toolkit > 2, < 4", 30 | "typing;python_version<'3.5'", 31 | "matrix-nio[e2e] >= 0.24, < 0.25.2", 32 | ], 33 | extras_require={ 34 | "ui": [ 35 | "dbus-python >= 1.2, < 1.3", 36 | "PyGObject >= 3.46, < 3.50", 37 | "pydbus >= 0.6, < 0.7", 38 | "notify2 >= 0.3, < 0.4", 39 | ] 40 | }, 41 | entry_points={ 42 | "console_scripts": [ 43 | "pantalaimon=pantalaimon.main:main", 44 | "panctl=pantalaimon.panctl:main", 45 | ], 46 | }, 47 | zip_safe=False, 48 | ) 49 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-flake8 3 | pytest-isort 4 | pytest-cov 5 | faker 6 | aiohttp 7 | pytest-aiohttp 8 | aioresponses 9 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import shutil 3 | import tempfile 4 | from random import choices 5 | from string import ascii_letters, ascii_uppercase, digits 6 | from urllib.parse import urlparse 7 | 8 | import janus 9 | import pytest 10 | from aiohttp import web 11 | from aioresponses import aioresponses 12 | from faker import Faker 13 | from faker.providers import BaseProvider 14 | from nio.crypto import OlmAccount, OlmDevice 15 | from nio.store import SqliteStore 16 | 17 | from pantalaimon.config import ServerConfig 18 | from pantalaimon.daemon import ProxyDaemon 19 | from pantalaimon.store import ClientInfo, PanStore 20 | 21 | faker = Faker() 22 | 23 | 24 | class Provider(BaseProvider): 25 | def mx_id(self): 26 | return "@{}:{}".format(faker.user_name(), faker.hostname()) 27 | 28 | def device_id(self): 29 | return "".join(choices(ascii_uppercase, k=10)) 30 | 31 | def access_token(self): 32 | return "MDA" + "".join(choices(digits + ascii_letters, k=272)) 33 | 34 | def client(self): 35 | return ClientInfo(faker.mx_id(), faker.access_token()) 36 | 37 | def avatar_url(self): 38 | return "mxc://{}/{}#auto".format( 39 | faker.hostname(), "".join(choices(ascii_letters) for i in range(24)) 40 | ) 41 | 42 | def olm_key_pair(self): 43 | return OlmAccount().identity_keys 44 | 45 | def olm_device(self): 46 | user_id = faker.mx_id() 47 | device_id = faker.device_id() 48 | key_pair = faker.olm_key_pair() 49 | 50 | return OlmDevice( 51 | user_id, 52 | device_id, 53 | key_pair, 54 | ) 55 | 56 | 57 | faker.add_provider(Provider) 58 | 59 | 60 | @pytest.fixture 61 | def access_token(): 62 | return faker.access_token() 63 | 64 | 65 | @pytest.fixture 66 | def client(): 67 | return faker.client() 68 | 69 | 70 | @pytest.fixture 71 | def tempdir(): 72 | newpath = tempfile.mkdtemp() 73 | yield newpath 74 | shutil.rmtree(newpath) 75 | 76 | 77 | @pytest.fixture 78 | def panstore(tempdir): 79 | for _ in range(10): 80 | store = SqliteStore(faker.mx_id(), faker.device_id(), tempdir, "", "pan.db") 81 | account = OlmAccount() 82 | store.save_account(account) 83 | 84 | store = PanStore(tempdir, "pan.db") 85 | return store 86 | 87 | 88 | @pytest.fixture 89 | def panstore_with_users(panstore): 90 | accounts = panstore.load_all_users() 91 | user_id, device_id = accounts[0] 92 | server = "example" 93 | 94 | panstore.save_server_user(server, user_id) 95 | 96 | server2 = "localhost" 97 | user_id2, device_id2 = accounts[1] 98 | panstore.save_server_user(server2, user_id2) 99 | 100 | return panstore 101 | 102 | 103 | @pytest.fixture 104 | async def pan_proxy_server(tempdir, aiohttp_server): 105 | loop = asyncio.get_event_loop() 106 | app = web.Application() 107 | 108 | server_name = faker.hostname() 109 | 110 | config = ServerConfig(server_name, urlparse("https://example.org"), keyring=False) 111 | 112 | pan_queue = janus.Queue() 113 | ui_queue = janus.Queue() 114 | 115 | proxy = ProxyDaemon( 116 | config.name, 117 | config.homeserver, 118 | config, 119 | tempdir, 120 | send_queue=pan_queue.async_q, 121 | recv_queue=ui_queue.async_q, 122 | proxy=None, 123 | ssl=False, 124 | client_store_class=SqliteStore, 125 | ) 126 | 127 | app.add_routes( 128 | [ 129 | web.post("/_matrix/client/r0/login", proxy.login), 130 | web.get("/_matrix/client/r0/sync", proxy.sync), 131 | web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages), 132 | web.put( 133 | r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}", 134 | proxy.send_message, 135 | ), 136 | web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter), 137 | web.post("/_matrix/client/r0/search", proxy.search), 138 | web.options("/_matrix/client/r0/search", proxy.search_opts), 139 | ] 140 | ) 141 | 142 | server = await aiohttp_server(app) 143 | 144 | yield server, proxy, (pan_queue, ui_queue) 145 | 146 | await proxy.shutdown(app) 147 | 148 | 149 | @pytest.fixture 150 | async def running_proxy(pan_proxy_server, aioresponse, aiohttp_client): 151 | server, proxy, queues = pan_proxy_server 152 | 153 | login_response = { 154 | "access_token": "abc123", 155 | "device_id": "GHTYAJCE", 156 | "home_server": "example.org", 157 | "user_id": "@example:example.org", 158 | } 159 | 160 | aioclient = await aiohttp_client(server) 161 | 162 | aioresponse.post( 163 | "https://example.org/_matrix/client/r0/login", 164 | status=200, 165 | payload=login_response, 166 | repeat=True, 167 | ) 168 | 169 | await aioclient.post( 170 | "/_matrix/client/r0/login", 171 | json={ 172 | "type": "m.login.password", 173 | "user": "example", 174 | "password": "wordpass", 175 | }, 176 | ) 177 | 178 | yield server, aioclient, proxy, queues 179 | 180 | 181 | @pytest.fixture 182 | def aioresponse(): 183 | with aioresponses(passthrough=["http://127.0.0.1"]) as m: 184 | yield m 185 | -------------------------------------------------------------------------------- /tests/data/sync.json: -------------------------------------------------------------------------------- 1 | { 2 | "device_one_time_keys_count": {}, 3 | "next_batch": "s526_47314_0_7_1_1_1_11444_1", 4 | "device_lists": { 5 | "changed": [ 6 | "@example:example.org" 7 | ], 8 | "left": [] 9 | }, 10 | 11 | "rooms": { 12 | "invite": {}, 13 | "join": { 14 | "!SVkFJHzfwvuaIEawgC:localhost": { 15 | "account_data": { 16 | "events": [] 17 | }, 18 | "ephemeral": { 19 | "events": [ 20 | { 21 | "content": { 22 | "$151680659217152dPKjd:localhost": { 23 | "m.read": { 24 | "@example:localhost": { 25 | "ts": 1516809890615 26 | } 27 | } 28 | } 29 | }, 30 | "type": "m.receipt" 31 | } 32 | ] 33 | }, 34 | "state": { 35 | "events": [ 36 | { 37 | "content": { 38 | "join_rule": "public" 39 | }, 40 | "event_id": "$15139375514WsgmR:localhost", 41 | "origin_server_ts": 1513937551539, 42 | "sender": "@example:localhost", 43 | "state_key": "", 44 | "type": "m.room.join_rules", 45 | "unsigned": { 46 | "age": 7034220355 47 | } 48 | }, 49 | { 50 | "content": { 51 | "avatar_url": null, 52 | "displayname": "example", 53 | "membership": "join" 54 | }, 55 | "event_id": "$151800140517rfvjc:localhost", 56 | "membership": "join", 57 | "origin_server_ts": 1518001405556, 58 | "sender": "@example:localhost", 59 | "state_key": "@example:localhost", 60 | "type": "m.room.member", 61 | "unsigned": { 62 | "age": 2970366338, 63 | "replaces_state": "$151800111315tsynI:localhost" 64 | } 65 | }, 66 | { 67 | "content": { 68 | "history_visibility": "shared" 69 | }, 70 | "event_id": "$15139375515VaJEY:localhost", 71 | "origin_server_ts": 1513937551613, 72 | "sender": "@example:localhost", 73 | "state_key": "", 74 | "type": "m.room.history_visibility", 75 | "unsigned": { 76 | "age": 7034220281 77 | } 78 | }, 79 | { 80 | "content": { 81 | "creator": "@example:localhost" 82 | }, 83 | "event_id": "$15139375510KUZHi:localhost", 84 | "origin_server_ts": 1513937551203, 85 | "sender": "@example:localhost", 86 | "state_key": "", 87 | "type": "m.room.create", 88 | "unsigned": { 89 | "age": 7034220691 90 | } 91 | }, 92 | { 93 | "content": { 94 | "aliases": [ 95 | "#tutorial:localhost" 96 | ] 97 | }, 98 | "event_id": "$15139375516NUgtD:localhost", 99 | "origin_server_ts": 1513937551720, 100 | "sender": "@example:localhost", 101 | "state_key": "localhost", 102 | "type": "m.room.aliases", 103 | "unsigned": { 104 | "age": 7034220174 105 | } 106 | }, 107 | { 108 | "content": { 109 | "topic": "\ud83d\ude00" 110 | }, 111 | "event_id": "$151957878228ssqrJ:localhost", 112 | "origin_server_ts": 1519578782185, 113 | "sender": "@example:localhost", 114 | "state_key": "", 115 | "type": "m.room.topic", 116 | "unsigned": { 117 | "age": 1392989709, 118 | "prev_content": { 119 | "topic": "test" 120 | }, 121 | "prev_sender": "@example:localhost", 122 | "replaces_state": "$151957069225EVYKm:localhost" 123 | } 124 | }, 125 | { 126 | "content": { 127 | "ban": 50, 128 | "events": { 129 | "m.room.avatar": 50, 130 | "m.room.canonical_alias": 50, 131 | "m.room.history_visibility": 100, 132 | "m.room.name": 50, 133 | "m.room.power_levels": 100 134 | }, 135 | "events_default": 0, 136 | "invite": 0, 137 | "kick": 50, 138 | "redact": 50, 139 | "state_default": 50, 140 | "users": { 141 | "@example:localhost": 100 142 | }, 143 | "users_default": 0 144 | }, 145 | "event_id": "$15139375512JaHAW:localhost", 146 | "origin_server_ts": 1513937551359, 147 | "sender": "@example:localhost", 148 | "state_key": "", 149 | "type": "m.room.power_levels", 150 | "unsigned": { 151 | "age": 7034220535 152 | } 153 | }, 154 | { 155 | "content": { 156 | "alias": "#tutorial:localhost" 157 | }, 158 | "event_id": "$15139375513VdeRF:localhost", 159 | "origin_server_ts": 1513937551461, 160 | "sender": "@example:localhost", 161 | "state_key": "", 162 | "type": "m.room.canonical_alias", 163 | "unsigned": { 164 | "age": 7034220433 165 | } 166 | }, 167 | { 168 | "content": { 169 | "avatar_url": null, 170 | "displayname": "example2", 171 | "membership": "join" 172 | }, 173 | "event_id": "$152034824468gOeNB:localhost", 174 | "membership": "join", 175 | "origin_server_ts": 1520348244605, 176 | "sender": "@example2:localhost", 177 | "state_key": "@example2:localhost", 178 | "type": "m.room.member", 179 | "unsigned": { 180 | "age": 623527289, 181 | "prev_content": { 182 | "membership": "leave" 183 | }, 184 | "prev_sender": "@example:localhost", 185 | "replaces_state": "$152034819067QWJxM:localhost" 186 | } 187 | } 188 | ] 189 | }, 190 | "timeline": { 191 | "events": [ 192 | { 193 | "content": { 194 | "body": "baba", 195 | "format": "org.matrix.custom.html", 196 | "formatted_body": "baba", 197 | "msgtype": "m.text" 198 | }, 199 | "event_id": "$152037280074GZeOm:localhost", 200 | "origin_server_ts": 1520372800469, 201 | "sender": "@example:localhost", 202 | "type": "m.room.message", 203 | "unsigned": { 204 | "age": 598971425 205 | } 206 | } 207 | ], 208 | "limited": true, 209 | "prev_batch": "t392-516_47314_0_7_1_1_1_11444_1" 210 | }, 211 | "unread_notifications": { 212 | "highlight_count": 0, 213 | "notification_count": 11 214 | } 215 | } 216 | }, 217 | "leave": {} 218 | }, 219 | "to_device": { 220 | "events": [] 221 | } 222 | } 223 | -------------------------------------------------------------------------------- /tests/pan_client_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | import janus 5 | import pytest 6 | from nio import ( 7 | LoginResponse, 8 | KeysQueryResponse, 9 | KeysUploadResponse, 10 | SyncResponse, 11 | ) 12 | from nio.crypto import Olm, OlmDevice 13 | from nio.store import SqliteMemoryStore 14 | from nio.store import SqliteStore 15 | 16 | from pantalaimon.client import PanClient 17 | from pantalaimon.config import ServerConfig 18 | from pantalaimon.store import PanStore 19 | from pantalaimon.index import INDEXING_ENABLED 20 | 21 | TEST_ROOM_ID = "!SVkFJHzfwvuaIEawgC:localhost" 22 | TEST_ROOM2 = "!testroom:localhost" 23 | 24 | ALICE_ID = "@alice:example.org" 25 | 26 | 27 | @pytest.fixture 28 | async def client(tmpdir): 29 | store = PanStore(tmpdir) 30 | queue = janus.Queue() 31 | conf = ServerConfig("example", "https://example.org") 32 | conf.history_fetch_delay = 0.1 33 | 34 | store.save_server_user("example", "@example:example.org") 35 | 36 | pan_client = PanClient( 37 | "example", 38 | store, 39 | conf, 40 | "https://example.org", 41 | queue.async_q, 42 | "@example:example.org", 43 | "DEVICEID", 44 | tmpdir, 45 | store_class=SqliteStore, 46 | ) 47 | 48 | yield pan_client 49 | 50 | await pan_client.close() 51 | 52 | 53 | class TestClass(object): 54 | @property 55 | def login_response(self): 56 | return LoginResponse.from_dict( 57 | { 58 | "access_token": "abc123", 59 | "device_id": "DEVICEID", 60 | "home_server": "example.org", 61 | "user_id": "@example:example.org", 62 | } 63 | ) 64 | 65 | @property 66 | def initial_sync_response(self): 67 | return { 68 | "device_one_time_keys_count": {}, 69 | "next_batch": "s526_47314_0_7_1_1_1_11444_1", 70 | "device_lists": {"changed": ["@example:example.org"], "left": []}, 71 | "rooms": { 72 | "invite": {}, 73 | "join": { 74 | "!SVkFJHzfwvuaIEawgC:localhost": { 75 | "account_data": {"events": []}, 76 | "ephemeral": {"events": []}, 77 | "state": { 78 | "events": [ 79 | { 80 | "content": { 81 | "avatar_url": None, 82 | "displayname": "example", 83 | "membership": "join", 84 | }, 85 | "event_id": "$151800140517rfvjc:localhost", 86 | "membership": "join", 87 | "origin_server_ts": 1518001405556, 88 | "sender": "@example:localhost", 89 | "state_key": "@example:localhost", 90 | "type": "m.room.member", 91 | "unsigned": { 92 | "age": 2970366338, 93 | "replaces_state": "$151800111315tsynI:localhost", 94 | }, 95 | }, 96 | { 97 | "content": {"history_visibility": "shared"}, 98 | "event_id": "$15139375515VaJEY:localhost", 99 | "origin_server_ts": 1513937551613, 100 | "sender": "@example:localhost", 101 | "state_key": "", 102 | "type": "m.room.history_visibility", 103 | "unsigned": {"age": 7034220281}, 104 | }, 105 | { 106 | "content": {"creator": "@example:localhost"}, 107 | "event_id": "$15139375510KUZHi:localhost", 108 | "origin_server_ts": 1513937551203, 109 | "sender": "@example:localhost", 110 | "state_key": "", 111 | "type": "m.room.create", 112 | "unsigned": {"age": 7034220691}, 113 | }, 114 | { 115 | "content": {"aliases": ["#tutorial:localhost"]}, 116 | "event_id": "$15139375516NUgtD:localhost", 117 | "origin_server_ts": 1513937551720, 118 | "sender": "@example:localhost", 119 | "state_key": "localhost", 120 | "type": "m.room.aliases", 121 | "unsigned": {"age": 7034220174}, 122 | }, 123 | { 124 | "content": {"topic": "\ud83d\ude00"}, 125 | "event_id": "$151957878228ssqrJ:localhost", 126 | "origin_server_ts": 1519578782185, 127 | "sender": "@example:localhost", 128 | "state_key": "", 129 | "type": "m.room.topic", 130 | "unsigned": { 131 | "age": 1392989709, 132 | "prev_content": {"topic": "test"}, 133 | "prev_sender": "@example:localhost", 134 | "replaces_state": "$151957069225EVYKm:localhost", 135 | }, 136 | }, 137 | { 138 | "content": { 139 | "ban": 50, 140 | "events": { 141 | "m.room.avatar": 50, 142 | "m.room.canonical_alias": 50, 143 | "m.room.history_visibility": 100, 144 | "m.room.name": 50, 145 | "m.room.power_levels": 100, 146 | }, 147 | "events_default": 0, 148 | "invite": 0, 149 | "kick": 50, 150 | "redact": 50, 151 | "state_default": 50, 152 | "users": {"@example:localhost": 100}, 153 | "users_default": 0, 154 | }, 155 | "event_id": "$15139375512JaHAW:localhost", 156 | "origin_server_ts": 1513937551359, 157 | "sender": "@example:localhost", 158 | "state_key": "", 159 | "type": "m.room.power_levels", 160 | "unsigned": {"age": 7034220535}, 161 | }, 162 | { 163 | "content": {"alias": "#tutorial:localhost"}, 164 | "event_id": "$15139375513VdeRF:localhost", 165 | "origin_server_ts": 1513937551461, 166 | "sender": "@example:localhost", 167 | "state_key": "", 168 | "type": "m.room.canonical_alias", 169 | "unsigned": {"age": 7034220433}, 170 | }, 171 | { 172 | "content": { 173 | "avatar_url": None, 174 | "displayname": "example2", 175 | "membership": "join", 176 | }, 177 | "event_id": "$152034824468gOeNB:localhost", 178 | "membership": "join", 179 | "origin_server_ts": 1520348244605, 180 | "sender": "@example2:localhost", 181 | "state_key": "@example2:localhost", 182 | "type": "m.room.member", 183 | "unsigned": { 184 | "age": 623527289, 185 | "prev_content": {"membership": "leave"}, 186 | "prev_sender": "@example:localhost", 187 | "replaces_state": "$152034819067QWJxM:localhost", 188 | }, 189 | }, 190 | { 191 | "content": { 192 | "algorithm": "m.megolm.v1.aes-sha2", 193 | "rotation_period_ms": 604800000, 194 | "rotation_period_msgs": 100, 195 | }, 196 | "event_id": "$143273582443PhrSn:example.org", 197 | "origin_server_ts": 1432735824653, 198 | "room_id": "!jEsUZKDJdhlrceRyVU:example.org", 199 | "sender": "@example:example.org", 200 | "state_key": "", 201 | "type": "m.room.encryption", 202 | "unsigned": {"age": 1234}, 203 | }, 204 | ] 205 | }, 206 | "timeline": { 207 | "events": [ 208 | { 209 | "content": { 210 | "body": "baba", 211 | "format": "org.matrix.custom.html", 212 | "formatted_body": "baba", 213 | "msgtype": "m.text", 214 | }, 215 | "event_id": "$152037280074GZeOm:localhost", 216 | "origin_server_ts": 1520372800469, 217 | "sender": "@example:localhost", 218 | "type": "m.room.message", 219 | "unsigned": {"age": 598971425}, 220 | } 221 | ], 222 | "limited": True, 223 | "prev_batch": "t392-516_47314_0_7_1_1_1_11444_1", 224 | }, 225 | "unread_notifications": { 226 | "highlight_count": 0, 227 | "notification_count": 11, 228 | }, 229 | } 230 | }, 231 | "leave": {}, 232 | }, 233 | "to_device": {"events": []}, 234 | } 235 | 236 | @property 237 | def keys_upload_response(self): 238 | return {"one_time_key_counts": {"curve25519": 10, "signed_curve25519": 20}} 239 | 240 | @property 241 | def keys_query_response(self): 242 | return { 243 | "device_keys": { 244 | "@alice:example.org": { 245 | "JLAFKJWSCS": { 246 | "algorithms": [ 247 | "m.olm.v1.curve25519-aes-sha2", 248 | "m.megolm.v1.aes-sha2", 249 | ], 250 | "device_id": "JLAFKJWSCS", 251 | "user_id": "@alice:example.org", 252 | "keys": { 253 | "curve25519:JLAFKJWSCS": "wjLpTLRqbqBzLs63aYaEv2Boi6cFEbbM/sSRQ2oAKk4", 254 | "ed25519:JLAFKJWSCS": "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM", 255 | }, 256 | "signatures": { 257 | "@alice:example.org": { 258 | "ed25519:JLAFKJWSCS": "m53Wkbh2HXkc3vFApZvCrfXcX3AI51GsDHustMhKwlv3TuOJMj4wistcOTM8q2+e/Ro7rWFUb9ZfnNbwptSUBA" 259 | } 260 | }, 261 | } 262 | } 263 | }, 264 | "failures": {}, 265 | } 266 | 267 | @property 268 | def empty_sync(self): 269 | return { 270 | "account_data": {"events": []}, 271 | "device_lists": {"changed": [], "left": []}, 272 | "device_one_time_keys_count": {"signed_curve25519": 50}, 273 | "groups": {"invite": {}, "join": {}, "leave": {}}, 274 | "next_batch": "s1059_133339_44_763_246_1_586_12411_1", 275 | "presence": {"events": []}, 276 | "rooms": {"invite": {}, "join": {}, "leave": {}}, 277 | "to_device": {"events": []}, 278 | } 279 | 280 | @property 281 | def messages_response(self): 282 | return { 283 | "chunk": [ 284 | { 285 | "age": 1042, 286 | "content": {"body": "hello world", "msgtype": "m.text"}, 287 | "event_id": "$1444812213350496Caaaa:example.com", 288 | "origin_server_ts": 1444812213737, 289 | "room_id": "!Xq3620DUiqCaoxq:example.com", 290 | "sender": "@alice:example.com", 291 | "type": "m.room.message", 292 | }, 293 | { 294 | "age": 20123, 295 | "content": {"body": "the world is big", "msgtype": "m.text"}, 296 | "event_id": "$1444812213350496Cbbbb:example.com", 297 | "origin_server_ts": 1444812194656, 298 | "room_id": "!Xq3620DUiqCaoxq:example.com", 299 | "sender": "@alice:example.com", 300 | "type": "m.room.message", 301 | }, 302 | { 303 | "age": 50789, 304 | "content": {"name": "New room name"}, 305 | "event_id": "$1444812213350496Ccccc:example.com", 306 | "origin_server_ts": 1444812163990, 307 | "prev_content": {"name": "Old room name"}, 308 | "room_id": "!Xq3620DUiqCaoxq:example.com", 309 | "sender": "@alice:example.com", 310 | "state_key": "", 311 | "type": "m.room.name", 312 | }, 313 | ], 314 | "end": "t47409-4357353_219380_26003_2265", 315 | "start": "t47429-4392820_219380_26003_2265", 316 | } 317 | 318 | @property 319 | def empty_messages(self): 320 | return { 321 | "chunk": [], 322 | "end": "t47429-4392820_219380_26003_2277", 323 | "start": "t47409-4357353_219380_26003_2265", 324 | } 325 | 326 | async def test_login(self, client): 327 | await client.receive_response(self.login_response) 328 | assert client.logged_in 329 | 330 | async def test_start_loop(self, client, aioresponse): 331 | sync_url = re.compile( 332 | r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" 333 | ) 334 | 335 | aioresponse.get( 336 | sync_url, status=200, payload=self.initial_sync_response, repeat=True 337 | ) 338 | 339 | aioresponse.post( 340 | "https://example.org/_matrix/client/r0/keys/upload?access_token=abc123", 341 | status=200, 342 | payload=self.keys_upload_response, 343 | repeat=True, 344 | ) 345 | 346 | aioresponse.post( 347 | "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", 348 | status=200, 349 | payload=self.keys_query_response, 350 | repeat=True, 351 | ) 352 | 353 | await client.receive_response(self.login_response) 354 | 355 | # Set a big history fetch delay so it doesn't consume the fetch tasks. 356 | client.pan_conf.history_fetch_delay = 10 357 | client.start_loop(100) 358 | 359 | # Sync tasks are done after we get a sync event so wait for two of them 360 | await client.synced.wait() 361 | await client.synced.wait() 362 | 363 | # Make sure that we have only a single history fetch task for the 364 | # single room we have 365 | assert not client.history_fetch_queue.empty() 366 | assert client.history_fetch_queue.qsize() == 1 367 | 368 | # Do another round to be sure we don't get more tasks than necessary. 369 | await client.synced.wait() 370 | assert client.history_fetch_queue.qsize() == 1 371 | 372 | await client.loop_stop() 373 | 374 | async def test_history_fetching_tasks(self, client, aioresponse): 375 | if not INDEXING_ENABLED: 376 | pytest.skip("Indexing needs to be enabled to test this") 377 | 378 | sync_url = re.compile( 379 | r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" 380 | ) 381 | 382 | aioresponse.get( 383 | sync_url, 384 | status=200, 385 | payload=self.initial_sync_response, 386 | ) 387 | 388 | aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True) 389 | 390 | aioresponse.post( 391 | "https://example.org/_matrix/client/r0/keys/upload?access_token=abc123", 392 | status=200, 393 | payload=self.keys_upload_response, 394 | repeat=True, 395 | ) 396 | 397 | aioresponse.post( 398 | "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", 399 | status=200, 400 | payload=self.keys_query_response, 401 | repeat=True, 402 | ) 403 | 404 | messages_url = re.compile( 405 | r"^https://example\.org/_matrix/client/r0/rooms/{}/messages\?.*".format( 406 | TEST_ROOM_ID 407 | ) 408 | ) 409 | 410 | aioresponse.get(messages_url, status=200, payload=self.messages_response) 411 | 412 | aioresponse.get( 413 | messages_url, status=200, payload=self.empty_messages, repeat=True 414 | ) 415 | 416 | await client.receive_response(self.login_response) 417 | 418 | client.start_loop(100) 419 | 420 | await client.new_fetch_task.wait() 421 | 422 | # Load the currently waiting task 423 | tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id) 424 | assert len(tasks) == 1 425 | 426 | # Check that the task is our prev_batch from the sync response 427 | assert tasks[0].room_id == TEST_ROOM_ID 428 | assert tasks[0].token == "t392-516_47314_0_7_1_1_1_11444_1" 429 | 430 | # Let's wait for the next fetch task 431 | await client.new_fetch_task.wait() 432 | 433 | tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id) 434 | assert len(tasks) == 1 435 | 436 | # Check that the task is our end token from the messages response 437 | assert tasks[0].room_id == TEST_ROOM_ID 438 | assert tasks[0].token == "t47409-4357353_219380_26003_2265" 439 | 440 | # Wait for the next fetch loop iteration. 441 | await client.fetch_loop_event.wait() 442 | 443 | tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id) 444 | # Check that there are no more tasks since we reached the start of the 445 | # room timeline. 446 | assert not tasks 447 | 448 | await client.loop_stop() 449 | 450 | async def test_history_fetching_resume(self, client, aioresponse): 451 | if not INDEXING_ENABLED: 452 | pytest.skip("Indexing needs to be enabled to test this") 453 | 454 | sync_url = re.compile( 455 | r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" 456 | ) 457 | 458 | aioresponse.get( 459 | sync_url, 460 | status=200, 461 | payload=self.initial_sync_response, 462 | ) 463 | 464 | aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True) 465 | 466 | aioresponse.post( 467 | "https://example.org/_matrix/client/r0/keys/upload?access_token=abc123", 468 | status=200, 469 | payload=self.keys_upload_response, 470 | repeat=True, 471 | ) 472 | 473 | aioresponse.post( 474 | "https://example.org/_matrix/client/r0/keys/query?access_token=abc123", 475 | status=200, 476 | payload=self.keys_query_response, 477 | repeat=True, 478 | ) 479 | 480 | messages_url = re.compile( 481 | r"^https://example\.org/_matrix/client/r0/rooms/{}/messages\?.*".format( 482 | TEST_ROOM_ID 483 | ) 484 | ) 485 | 486 | aioresponse.get(messages_url, status=200, payload=self.messages_response) 487 | 488 | aioresponse.get( 489 | messages_url, status=200, payload=self.empty_messages, repeat=True 490 | ) 491 | 492 | await client.receive_response(self.login_response) 493 | 494 | client.start_loop(100) 495 | 496 | await client.new_fetch_task.wait() 497 | await client.new_fetch_task.wait() 498 | 499 | await client.loop_stop() 500 | 501 | index_path = os.path.join(client.store_path, client.server_name, client.user_id) 502 | 503 | # Remove the lock file since the GC won't do it for us 504 | writer_lock = os.path.join(index_path, ".tantivy-writer.lock") 505 | os.remove(writer_lock) 506 | 507 | # Create a new client 508 | client2 = PanClient( 509 | client.server_name, 510 | client.pan_store, 511 | client.pan_conf, 512 | client.homeserver, 513 | client.queue, 514 | client.user_id, 515 | client.device_id, 516 | client.store_path, 517 | ) 518 | client2.user_id = client.user_id 519 | client2.access_token = client.access_token 520 | 521 | tasks = client2.pan_store.load_fetcher_tasks( 522 | client2.server_name, client2.user_id 523 | ) 524 | assert len(tasks) == 1 525 | 526 | # Check that the task is our end token from the messages response 527 | assert tasks[0].room_id == TEST_ROOM_ID 528 | assert tasks[0].token == "t47409-4357353_219380_26003_2265" 529 | 530 | client2.start_loop(100) 531 | 532 | # We wait for two events here because the event gets fired at the start 533 | # of the loop 534 | await client2.fetch_loop_event.wait() 535 | await client2.fetch_loop_event.wait() 536 | 537 | tasks = client2.pan_store.load_fetcher_tasks( 538 | client2.server_name, client2.user_id 539 | ) 540 | # Check that there are no more tasks since we reached the start of the 541 | # room timeline. 542 | assert not tasks 543 | 544 | await client2.loop_stop() 545 | 546 | async def test_room_key_on_client_sync_stream(self, client): 547 | await client.receive_response(self.login_response) 548 | await client.receive_response( 549 | SyncResponse.from_dict(self.initial_sync_response) 550 | ) 551 | await client.receive_response( 552 | KeysUploadResponse.from_dict(self.keys_upload_response) 553 | ) 554 | await client.receive_response( 555 | KeysQueryResponse.from_dict(self.keys_query_response) 556 | ) 557 | 558 | BobId = "@bob:example.org" 559 | Bob_device = "BOBDEVICE" 560 | 561 | bob_olm = Olm(BobId, Bob_device, SqliteMemoryStore("ephemeral", "DEVICEID")) 562 | 563 | alice_device = OlmDevice( 564 | client.user_id, client.device_id, client.olm.account.identity_keys 565 | ) 566 | 567 | bob_device = OlmDevice( 568 | bob_olm.user_id, bob_olm.device_id, bob_olm.account.identity_keys 569 | ) 570 | 571 | client.olm.device_store.add(bob_device) 572 | bob_olm.device_store.add(alice_device) 573 | bob_olm.store.save_device_keys( 574 | {client.user_id: {client.device_id: alice_device}} 575 | ) 576 | 577 | client.olm.account.generate_one_time_keys(1) 578 | one_time = list(client.olm.account.one_time_keys["curve25519"].values())[0] 579 | client.olm.account.mark_keys_as_published() 580 | 581 | bob_olm.create_session(one_time, alice_device.curve25519) 582 | 583 | _, to_device = bob_olm.share_group_session( 584 | TEST_ROOM_ID, [client.user_id], ignore_unverified_devices=True 585 | ) 586 | outbound_session = bob_olm.outbound_group_sessions[TEST_ROOM_ID] 587 | olm_content = to_device["messages"][client.user_id][client.device_id] 588 | 589 | payload = { 590 | "sender": bob_olm.user_id, 591 | "type": "m.room.encrypted", 592 | "content": olm_content, 593 | } 594 | 595 | sync_response = self.empty_sync 596 | sync_response["to_device"]["events"].append(payload) 597 | 598 | session = client.olm.inbound_group_store.get( 599 | TEST_ROOM_ID, bob_device.curve25519, outbound_session.id 600 | ) 601 | assert not session 602 | 603 | client.handle_to_device_from_sync_body(sync_response) 604 | 605 | session = client.olm.inbound_group_store.get( 606 | TEST_ROOM_ID, bob_device.curve25519, outbound_session.id 607 | ) 608 | assert session 609 | -------------------------------------------------------------------------------- /tests/proxy_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from collections import defaultdict 4 | 5 | from nio.crypto import OlmDevice 6 | 7 | from conftest import faker 8 | from pantalaimon.thread_messages import UpdateDevicesMessage, UpdateUsersMessage 9 | 10 | BOB_ID = "@bob:example.org" 11 | BOB_DEVICE = "AGMTSWVYML" 12 | BOB_CURVE = "T9tOKF+TShsn6mk1zisW2IBsBbTtzDNvw99RBFMJOgI" 13 | BOB_ONETIME = "6QlQw3mGUveS735k/JDaviuoaih5eEi6S1J65iHjfgU" 14 | 15 | 16 | class TestClass(object): 17 | @staticmethod 18 | def _load_response(filename): 19 | with open(filename) as f: 20 | return json.loads(f.read()) 21 | 22 | @property 23 | def login_response(self): 24 | return { 25 | "access_token": "abc123", 26 | "device_id": "GHTYAJCE", 27 | "home_server": "example.org", 28 | "user_id": "@example:example.org", 29 | } 30 | 31 | @property 32 | def sync_response(self): 33 | return self._load_response("tests/data/sync.json") 34 | 35 | @property 36 | def keys_upload_response(self): 37 | return {"one_time_key_counts": {"curve25519": 10, "signed_curve25519": 20}} 38 | 39 | @property 40 | def example_devices(self): 41 | devices = defaultdict(dict) 42 | 43 | for _ in range(10): 44 | device = faker.olm_device() 45 | devices[device.user_id][device.id] = device 46 | 47 | bob_device = OlmDevice( 48 | BOB_ID, BOB_DEVICE, {"ed25519": BOB_ONETIME, "curve25519": BOB_CURVE} 49 | ) 50 | 51 | devices[BOB_ID][BOB_DEVICE] = bob_device 52 | 53 | return devices 54 | 55 | async def test_daemon_start(self, pan_proxy_server, aiohttp_client, aioresponse): 56 | server, daemon, _ = pan_proxy_server 57 | 58 | client = await aiohttp_client(server) 59 | 60 | aioresponse.post( 61 | "https://example.org/_matrix/client/r0/login", 62 | status=200, 63 | payload=self.login_response, 64 | repeat=True, 65 | ) 66 | 67 | assert not daemon.pan_clients 68 | 69 | resp = await client.post( 70 | "/_matrix/client/r0/login", 71 | json={ 72 | "type": "m.login.password", 73 | "user": "example", 74 | "password": "wordpass", 75 | }, 76 | ) 77 | 78 | assert resp.status == 200 79 | 80 | assert len(daemon.pan_clients) == 1 81 | 82 | pan_client = list(daemon.pan_clients.values())[0] 83 | 84 | # Check if our pan client is logged in 85 | assert pan_client.logged_in 86 | # Check if our pan client has a sync loop started 87 | assert pan_client.task 88 | 89 | async def test_pan_client_sync(self, pan_proxy_server, aiohttp_client, aioresponse): 90 | server, daemon, _ = pan_proxy_server 91 | 92 | client = await aiohttp_client(server) 93 | 94 | aioresponse.post( 95 | "https://example.org/_matrix/client/r0/login", 96 | status=200, 97 | payload=self.login_response, 98 | repeat=True, 99 | ) 100 | 101 | sync_url = re.compile( 102 | r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" 103 | ) 104 | 105 | aioresponse.get( 106 | sync_url, 107 | status=200, 108 | payload=self.sync_response, 109 | ) 110 | 111 | await client.post( 112 | "/_matrix/client/r0/login", 113 | json={ 114 | "type": "m.login.password", 115 | "user": "example", 116 | "password": "wordpass", 117 | }, 118 | ) 119 | 120 | # Check that the pan client started to sync after logging in. 121 | pan_client = list(daemon.pan_clients.values())[0] 122 | assert len(pan_client.rooms) == 1 123 | 124 | async def test_pan_client_keys_upload( 125 | self, pan_proxy_server, aiohttp_client, aioresponse 126 | ): 127 | server, daemon, _ = pan_proxy_server 128 | 129 | client = await aiohttp_client(server) 130 | 131 | aioresponse.post( 132 | "https://example.org/_matrix/client/r0/login", 133 | status=200, 134 | payload=self.login_response, 135 | repeat=True, 136 | ) 137 | 138 | sync_url = re.compile( 139 | r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*" 140 | ) 141 | 142 | aioresponse.get( 143 | sync_url, 144 | status=200, 145 | payload=self.sync_response, 146 | ) 147 | 148 | keys_upload_url = re.compile( 149 | r"^https://example\.org/_matrix/client/r0/keys/upload\?.*" 150 | ) 151 | 152 | aioresponse.post( 153 | keys_upload_url, 154 | status=200, 155 | payload=self.keys_upload_response, 156 | ) 157 | 158 | await client.post( 159 | "/_matrix/client/r0/login", 160 | json={ 161 | "type": "m.login.password", 162 | "user": "example", 163 | "password": "wordpass", 164 | }, 165 | ) 166 | 167 | pan_client = list(daemon.pan_clients.values())[0] 168 | 169 | assert pan_client.olm.account.shared 170 | 171 | async def test_server_users_update(self, running_proxy): 172 | _, _, _, queues = running_proxy 173 | queue, _ = queues 174 | queue = queue.sync_q 175 | 176 | message = queue.get_nowait() 177 | 178 | assert isinstance(message, UpdateUsersMessage) 179 | 180 | assert message.user_id == "@example:example.org" 181 | assert message.device_id == "GHTYAJCE" 182 | 183 | async def tests_server_devices_update(self, running_proxy): 184 | _, _, proxy, queues = running_proxy 185 | queue, _ = queues 186 | queue = queue.sync_q 187 | 188 | devices = self.example_devices 189 | bob_device = devices[BOB_ID][BOB_DEVICE] 190 | 191 | message = queue.get_nowait() 192 | assert isinstance(message, UpdateUsersMessage) 193 | 194 | client = list(proxy.pan_clients.values())[0] 195 | client.store.save_device_keys(devices) 196 | 197 | await client.send_update_device(bob_device) 198 | 199 | message = queue.get_nowait() 200 | assert isinstance(message, UpdateDevicesMessage) 201 | 202 | assert BOB_DEVICE in message.devices[BOB_ID] 203 | -------------------------------------------------------------------------------- /tests/store_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import pprint 3 | import pytest 4 | 5 | from nio import RoomMessage, RoomEncryptedMedia 6 | 7 | from urllib.parse import urlparse 8 | from pantalaimon.index import INDEXING_ENABLED 9 | from pantalaimon.store import FetchTask, MediaInfo, UploadInfo 10 | 11 | TEST_ROOM = "!SVkFJHzfwvuaIEawgC:localhost" 12 | TEST_ROOM2 = "!testroom:localhost" 13 | 14 | 15 | class TestClass(object): 16 | @property 17 | def test_event(self): 18 | return RoomMessage.parse_event( 19 | { 20 | "content": {"body": "Test message", "msgtype": "m.text"}, 21 | "event_id": "$15163622445EBvZJ:localhost", 22 | "origin_server_ts": 1516362244026, 23 | "room_id": "!SVkFJHzfwvuaIEawgC:localhost", 24 | "sender": "@example2:localhost", 25 | "type": "m.room.message", 26 | "unsigned": {"age": 43289803095}, 27 | "user_id": "@example2:localhost", 28 | "age": 43289803095, 29 | } 30 | ) 31 | 32 | @property 33 | def another_event(self): 34 | return RoomMessage.parse_event( 35 | { 36 | "content": {"body": "Another message", "msgtype": "m.text"}, 37 | "event_id": "$15163622445EBvZK:localhost", 38 | "origin_server_ts": 1516362244030, 39 | "room_id": "!SVkFJHzfwvuaIEawgC:localhost", 40 | "sender": "@example2:localhost", 41 | "type": "m.room.message", 42 | "unsigned": {"age": 43289803095}, 43 | "user_id": "@example2:localhost", 44 | "age": 43289803095, 45 | } 46 | ) 47 | 48 | @property 49 | def encrypted_media_event(self): 50 | return RoomEncryptedMedia.from_dict( 51 | { 52 | "room_id": "!testroom:localhost", 53 | "event_id": "$15163622445EBvZK:localhost", 54 | "origin_server_ts": 1516362244030, 55 | "sender": "@example2:localhost", 56 | "type": "m.room.message", 57 | "content": { 58 | "body": "orange_cat.jpg", 59 | "msgtype": "m.image", 60 | "file": { 61 | "v": "v2", 62 | "key": { 63 | "alg": "A256CTR", 64 | "ext": True, 65 | "k": "yx0QvkgYlasdWEsdalkejaHBzCkKEBAp3tB7dGtWgrs", 66 | "key_ops": ["encrypt", "decrypt"], 67 | "kty": "oct", 68 | }, 69 | "iv": "0pglXX7fspIBBBBAEERLFd", 70 | "hashes": { 71 | "sha256": "eXRDFvh+aXsQRj8a+5ZVVWUQ9Y6u9DYiz4tq1NvbLu8" 72 | }, 73 | "url": "mxc://localhost/maDtasSiPFjROFMnlwxIhhyW", 74 | "mimetype": "image/jpeg", 75 | }, 76 | }, 77 | } 78 | ) 79 | 80 | def test_account_loading(self, panstore): 81 | accounts = panstore.load_all_users() 82 | assert len(accounts) == 10 83 | 84 | def test_token_saving(self, panstore, access_token): 85 | accounts = panstore.load_all_users() 86 | user_id = accounts[0][0] 87 | device_id = accounts[0][1] 88 | 89 | panstore.save_access_token(user_id, device_id, access_token) 90 | 91 | token = panstore.load_access_token(user_id, device_id) 92 | access_token == token 93 | 94 | def test_token_storing(self, panstore_with_users): 95 | panstore = panstore_with_users 96 | accounts = panstore.load_all_users() 97 | user, _ = accounts[0] 98 | 99 | assert not panstore.load_token("example", user) 100 | panstore.save_token("example", user, "abc123") 101 | 102 | assert panstore.load_token("example", user) == "abc123" 103 | 104 | def test_fetcher_tasks(self, panstore_with_users): 105 | panstore = panstore_with_users 106 | accounts = panstore.load_all_users() 107 | user, _ = accounts[0] 108 | 109 | task = FetchTask(TEST_ROOM, "abc1234") 110 | task2 = FetchTask(TEST_ROOM2, "abc1234") 111 | 112 | assert not panstore.load_fetcher_tasks("example", user) 113 | 114 | panstore.save_fetcher_task("example", user, task) 115 | panstore.save_fetcher_task("example", user, task2) 116 | 117 | tasks = panstore.load_fetcher_tasks("example", user) 118 | 119 | assert task in tasks 120 | assert task2 in tasks 121 | 122 | panstore.delete_fetcher_task("example", user, task) 123 | tasks = panstore.load_fetcher_tasks("example", user) 124 | 125 | assert task not in tasks 126 | assert task2 in tasks 127 | 128 | async def test_new_indexstore(self, tempdir): 129 | if not INDEXING_ENABLED: 130 | pytest.skip("Indexing needs to be enabled to test this") 131 | 132 | from pantalaimon.index import IndexStore 133 | 134 | loop = asyncio.get_event_loop() 135 | 136 | store = IndexStore("example", tempdir) 137 | 138 | store.add_event(self.test_event, TEST_ROOM, None, None) 139 | store.add_event(self.another_event, TEST_ROOM, None, None) 140 | await store.commit_events() 141 | 142 | assert store.event_in_store(self.test_event.event_id, TEST_ROOM) 143 | assert not store.event_in_store("FAKE", TEST_ROOM) 144 | 145 | result = await store.search("test", TEST_ROOM, after_limit=10, before_limit=10) 146 | pprint.pprint(result) 147 | 148 | assert len(result["results"]) == 1 149 | assert result["count"] == 1 150 | assert result["results"][0]["result"] == self.test_event.source 151 | assert ( 152 | result["results"][0]["context"]["events_after"][0] 153 | == self.another_event.source 154 | ) 155 | 156 | def test_media_storage(self, panstore): 157 | server_name = "test" 158 | media_cache = panstore.load_media_cache(server_name) 159 | assert not media_cache 160 | 161 | event = self.encrypted_media_event 162 | 163 | mxc = urlparse(event.url) 164 | 165 | assert mxc 166 | 167 | mxc_server = mxc.netloc 168 | mxc_path = mxc.path 169 | 170 | assert not panstore.load_media(server_name, mxc_server, mxc_path) 171 | 172 | media = MediaInfo(mxc_server, mxc_path, event.key, event.iv, event.hashes) 173 | 174 | panstore.save_media(server_name, media) 175 | 176 | media_cache = panstore.load_media_cache(server_name) 177 | 178 | assert (mxc_server, mxc_path) in media_cache 179 | media_info = media_cache[(mxc_server, mxc_path)] 180 | assert media_info == media 181 | assert media_info == panstore.load_media(server_name, mxc_server, mxc_path) 182 | 183 | def test_upload_storage(self, panstore): 184 | server_name = "test" 185 | upload_cache = panstore.load_upload(server_name) 186 | assert not upload_cache 187 | 188 | filename = "orange_cat.jpg" 189 | mimetype = "image/jpeg" 190 | event = self.encrypted_media_event 191 | 192 | assert not panstore.load_upload(server_name, event.url) 193 | 194 | upload = UploadInfo(event.url, filename, mimetype) 195 | 196 | panstore.save_upload(server_name, event.url, filename, mimetype) 197 | 198 | upload_cache = panstore.load_upload(server_name) 199 | 200 | assert (event.url) in upload_cache 201 | upload_info = upload_cache[event.url] 202 | assert upload_info == upload 203 | assert upload_info == panstore.load_upload(server_name, event.url) 204 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = coverage 3 | 4 | [testenv] 5 | deps = -rtest-requirements.txt 6 | install_command = pip install {opts} {packages} 7 | 8 | passenv = TOXENV,CI 9 | commands = pytest 10 | 11 | [testenv:coverage] 12 | commands = 13 | pytest --cov=pantalaimon --cov-report term-missing 14 | coverage xml 15 | coverage report --show-missing 16 | codecov -e TOXENV 17 | deps = 18 | -rtest-requirements.txt 19 | coverage 20 | codecov>=1.4.0 21 | setenv = 22 | COVERAGE_FILE=.coverage 23 | --------------------------------------------------------------------------------