├── .github ├── CODEOWNERS └── workflows │ ├── changelog_check.yml │ ├── docker_check.yml │ ├── docker_push.yml │ ├── pipeline.yml │ ├── triage_incoming.yml │ └── triage_labelled.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── RELEASING.md ├── changelog.d ├── .gitignore ├── 397.misc ├── 398.misc ├── 400.docker ├── 402.misc ├── 405.misc ├── 406.misc ├── 408.bugfix ├── 409.docker ├── 410.doc └── 412.misc ├── contrib └── grafana │ ├── README.md │ └── Sygnal.json ├── docker └── Dockerfile ├── docs ├── applications.md └── troubleshooting.md ├── mypy.ini ├── poetry.lock ├── pyproject.toml ├── scripts-dev ├── check_newsfragment.sh ├── lint.sh └── proxy-test │ ├── curl.sh │ ├── docker-compose.yml │ ├── nginx.conf │ ├── notification-gcm.json │ ├── notification-ios.json │ ├── proxy.conf │ └── sygnal.yaml ├── stubs ├── google │ ├── __init__.pyi │ ├── auth │ │ ├── __init__.pyi │ │ ├── _default.pyi │ │ └── transport │ │ │ ├── __init__.pyi │ │ │ └── requests.pyi │ └── oauth2 │ │ ├── __init__.pyi │ │ └── service_account.pyi └── twisted │ ├── __init__.pyi │ └── web │ ├── __init__.pyi │ └── http.pyi ├── sygnal.yaml.sample ├── sygnal ├── __init__.py ├── apnspushkin.py ├── apnstruncate.py ├── exceptions.py ├── gcmpushkin.py ├── helper │ ├── __init__.py │ ├── context_factory.py │ └── proxy │ │ ├── __init__.py │ │ ├── connectproxyclient_twisted.py │ │ ├── proxy_asyncio.py │ │ └── proxyagent_twisted.py ├── http.py ├── notifications.py ├── sygnal.py ├── utils.py └── webpushpushkin.py ├── tests ├── __init__.py ├── asyncio_test_helpers.py ├── test_apns.py ├── test_apnstruncate.py ├── test_concurrency_limit.py ├── test_gcm.py ├── test_http.py ├── test_httpproxy_asyncio.py ├── test_httpproxy_twisted.py ├── test_proxy_url_parsing.py ├── test_pushgateway_api_v1.py ├── testutils.py ├── tls │ ├── ca.crt │ ├── ca.key │ └── server.key └── twisted_test_helpers.py └── tox.ini /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Automatically request reviews from the synapse-core team when a pull request comes in. 2 | * @matrix-org/synapse-core 3 | -------------------------------------------------------------------------------- /.github/workflows/changelog_check.yml: -------------------------------------------------------------------------------- 1 | name: Changelog 2 | on: [pull_request] 3 | 4 | jobs: 5 | check-newsfile: 6 | if: ${{ github.base_ref == 'main' || contains(github.base_ref, 'release-') }} 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v3 10 | with: 11 | fetch-depth: 0 12 | ref: ${{github.event.pull_request.head.sha}} 13 | - uses: actions/setup-python@v4 14 | with: 15 | python-version: "3.11" 16 | - run: python -m pip install towncrier 17 | - run: "scripts-dev/check_newsfragment.sh ${{ github.event.number }}" 18 | -------------------------------------------------------------------------------- /.github/workflows/docker_check.yml: -------------------------------------------------------------------------------- 1 | # GitHub actions workflow which builds the docker images. 2 | # This is useful as it will run in PRs that change the Dockerfile, and can be 3 | # an early source of warnings that the Dockerfile isn't right. 4 | # This check also triggers when this file itself is modified. 5 | 6 | name: Check Docker image can be built successfully 7 | 8 | on: 9 | push: 10 | paths: 11 | - 'docker/Dockerfile' 12 | - '.github/workflows/docker_check.yml' 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Set up QEMU 22 | id: qemu 23 | uses: docker/setup-qemu-action@v1 24 | with: 25 | platforms: arm64 26 | 27 | - name: Set up Docker Buildx 28 | id: buildx 29 | uses: docker/setup-buildx-action@v1 30 | 31 | - name: Inspect builder 32 | run: docker buildx inspect 33 | 34 | # we explicitly check out the repository (and use `context: .` in buildx) 35 | # because we need to preserve the git metadata so that setuptools_scm 36 | # (part of build system config in pyproject.toml) can deduce the package version. 37 | # See: https://github.com/marketplace/actions/build-and-push-docker-images#path-context 38 | - name: Checkout 39 | uses: actions/checkout@v3 40 | 41 | - name: Build all platforms 42 | uses: docker/build-push-action@v6 43 | with: 44 | context: . 45 | push: false 46 | labels: "gitsha1=${{ github.sha }}" 47 | file: "docker/Dockerfile" 48 | platforms: linux/amd64,linux/arm64 49 | -------------------------------------------------------------------------------- /.github/workflows/docker_push.yml: -------------------------------------------------------------------------------- 1 | # GitHub actions workflow which builds and publishes the docker images. 2 | 3 | name: Build and push docker images 4 | 5 | on: 6 | push: 7 | tags: ["v*"] 8 | workflow_dispatch: 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | build: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Set up QEMU 18 | id: qemu 19 | uses: docker/setup-qemu-action@v1 20 | with: 21 | platforms: arm64 22 | 23 | - name: Set up Docker Buildx 24 | id: buildx 25 | uses: docker/setup-buildx-action@v1 26 | 27 | - name: Inspect builder 28 | run: docker buildx inspect 29 | 30 | - name: Log in to DockerHub 31 | uses: docker/login-action@v1 32 | with: 33 | username: ${{ secrets.DOCKER_HUB_USERNAME }} 34 | password: ${{ secrets.DOCKER_HUB_TOKEN }} 35 | 36 | - name: Calculate docker image tags 37 | id: set-tag 38 | uses: docker/metadata-action@v5 39 | with: 40 | images: matrixdotorg/sygnal 41 | tags: | 42 | type=ref,event=tag 43 | 44 | # we explicitly check out the repository (and use `context: .` in buildx) 45 | # because we need to preserve the git metadata so that setuptools_scm 46 | # (part of build system config in pyproject.toml) can deduce the package version. 47 | # See: https://github.com/marketplace/actions/build-and-push-docker-images#path-context 48 | - name: Checkout 49 | uses: actions/checkout@v3 50 | 51 | - name: Build and push all platforms 52 | uses: docker/build-push-action@v6 53 | with: 54 | context: . 55 | push: true 56 | labels: "gitsha1=${{ github.sha }}" 57 | tags: "${{ steps.set-tag.outputs.tags }}" 58 | file: "docker/Dockerfile" 59 | platforms: linux/amd64,linux/arm64 60 | -------------------------------------------------------------------------------- /.github/workflows/pipeline.yml: -------------------------------------------------------------------------------- 1 | name: Linting and Tests 2 | on: 3 | push: 4 | branches: ["main"] 5 | pull_request: 6 | 7 | jobs: 8 | check-code-style: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | 13 | - name: Setup Poetry 14 | uses: matrix-org/setup-python-poetry@v1 15 | with: 16 | install-project: "false" 17 | python-version: "3.12" 18 | 19 | - run: poetry run tox -e check_codestyle 20 | 21 | check-types-mypy: 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v4 25 | 26 | - name: Setup Poetry 27 | uses: matrix-org/setup-python-poetry@v1 28 | with: 29 | install-project: "false" 30 | python-version: "3.12" 31 | 32 | - run: poetry run tox -e check_types 33 | 34 | run-unit-tests: 35 | name: Unit tests 36 | needs: [check-code-style, check-types-mypy] 37 | runs-on: ubuntu-latest 38 | steps: 39 | - uses: actions/checkout@v3 40 | - uses: actions/setup-python@v4 41 | with: 42 | python-version: "3.12" 43 | - run: python -m pip install -e . 44 | - run: python -m twisted.trial tests 45 | 46 | run-unit-tests-olddeps: 47 | name: Unit tests (old dependencies) 48 | needs: [ check-code-style, check-types-mypy ] 49 | runs-on: ubuntu-latest 50 | steps: 51 | - uses: actions/checkout@v3 52 | - uses: actions/setup-python@v4 53 | with: 54 | python-version: "3.8" 55 | - name: Patch pyproject.toml to require oldest dependencies 56 | run: | 57 | # Ugly. Could use something like https://pyproject-parser.readthedocs.io/en/latest/cli.html#info in the future. 58 | sed --in-place=.bak -e 's/>=/==/g' pyproject.toml 59 | diff pyproject.toml.bak pyproject.toml || true # diff returns 1 if there is a change 60 | - name: Install Sygnal 61 | run: python -m pip install -e . 62 | - run: python -m twisted.trial tests 63 | -------------------------------------------------------------------------------- /.github/workflows/triage_incoming.yml: -------------------------------------------------------------------------------- 1 | name: Move new issues into the issue triage board 2 | 3 | on: 4 | issues: 5 | types: [ opened ] 6 | 7 | jobs: 8 | add_new_issues: 9 | name: Add new issues to the triage board 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: octokit/graphql-action@v2.x 13 | id: add_to_project 14 | with: 15 | headers: '{"GraphQL-Features": "projects_next_graphql"}' 16 | query: | 17 | mutation add_to_project($projectid:ID!,$contentid:ID!) { 18 | addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) { 19 | item { 20 | id 21 | } 22 | } 23 | } 24 | projectid: ${{ env.PROJECT_ID }} 25 | contentid: ${{ github.event.issue.node_id }} 26 | env: 27 | PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ" 28 | GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/triage_labelled.yml: -------------------------------------------------------------------------------- 1 | name: Move labelled issues to correct projects 2 | 3 | on: 4 | issues: 5 | types: [ labeled ] 6 | 7 | jobs: 8 | move_needs_info: 9 | name: Move X-Needs-Info on the triage board 10 | runs-on: ubuntu-latest 11 | if: > 12 | contains(github.event.issue.labels.*.name, 'X-Needs-Info') 13 | steps: 14 | - uses: actions/add-to-project@main 15 | id: add_project 16 | with: 17 | project-url: "https://github.com/orgs/matrix-org/projects/67" 18 | github-token: ${{ secrets.ELEMENT_BOT_TOKEN }} 19 | - name: Set status 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }} 22 | run: | 23 | gh api graphql -f query=' 24 | mutation( 25 | $project: ID! 26 | $item: ID! 27 | $fieldid: ID! 28 | $columnid: String! 29 | ) { 30 | updateProjectV2ItemFieldValue( 31 | input: { 32 | projectId: $project 33 | itemId: $item 34 | fieldId: $fieldid 35 | value: { 36 | singleSelectOptionId: $columnid 37 | } 38 | } 39 | ) { 40 | projectV2Item { 41 | id 42 | } 43 | } 44 | }' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | sygnal.yaml 3 | var/ 4 | sygnal.pid 5 | sygnal.db 6 | _trial_temp* 7 | .coverage* 8 | 9 | /venv/ 10 | /.venv/ 11 | /oldvenv/ 12 | /.oldvenv/ 13 | /.idea 14 | /.eggs 15 | /*.egg-info 16 | /build 17 | /dist 18 | /.tox 19 | /.python-version 20 | /htmlcov 21 | 22 | .vscode/ 23 | .idea/ 24 | .DS_Store 25 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing code to Sygnal 2 | 3 | Everyone is welcome to contribute code to Sygnal, provided you are willing to 4 | license your contributions under the same license as the project itself. In 5 | this case, the [Apache Software License v2](LICENSE). 6 | 7 | ### Installing dependencies 8 | 9 | To contribute to Sygnal, ensure you have Python 3.8 or newer and then run: 10 | 11 | Sygnal uses the [poetry](https://python-poetry.org/) project to manage its dependencies 12 | and development environment. Once you have installed Python 3 and added the 13 | source, you should install `poetry`. 14 | Of their installation methods, we recommend 15 | [installing `poetry` using `pipx`](https://python-poetry.org/docs/#installing-with-pipx), 16 | 17 | ```shell 18 | pip install --user pipx 19 | pipx install poetry 20 | ``` 21 | 22 | but see poetry's [installation instructions](https://python-poetry.org/docs/#installation) 23 | for other installation methods. 24 | 25 | Next, open a terminal and install dependencies as follows: 26 | 27 | ```sh 28 | cd path/where/you/have/cloned/the/repository 29 | poetry install 30 | ``` 31 | 32 | This will install the runtime and developer dependencies for the project. Be sure to check 33 | that the `poetry install` step completed cleanly. 34 | 35 | ### Run the tests 36 | 37 | To make sure everything is working as expected, run the unit tests: 38 | 39 | ```bash 40 | tox -e py 41 | ``` 42 | 43 | If you see a message like: 44 | 45 | ``` 46 | ------------------------------------------------------------------------------- 47 | Ran 46 tests in 0.209s 48 | 49 | PASSED (successes=46) 50 | ___________________________________ summary ___________________________________ 51 | py: commands succeeded 52 | congratulations :) 53 | ``` 54 | 55 | Then all is well and you're ready to work! 56 | 57 | You can also directly run the tests using poetry. 58 | 59 | ```sh 60 | poetry run trial tests 61 | ``` 62 | 63 | You can run unit tests in parallel by specifying `-jX` argument to `trial` where `X` is the number of parallel runners you want. To use 4 cpu cores, you would run them like: 64 | 65 | ```sh 66 | poetry run trial -j4 tests 67 | ``` 68 | 69 | If you wish to only run *some* unit tests, you may specify 70 | another module instead of `tests` - or a test class or a method: 71 | 72 | ```sh 73 | poetry run trial tests.test_apns.ApnsTestCase.test_expected 74 | ``` 75 | 76 | ## How to contribute 77 | 78 | The preferred and easiest way to contribute changes is to fork the relevant 79 | project on github, and then [create a pull request]( 80 | https://help.github.com/articles/using-pull-requests/) to ask us to pull your 81 | changes into our repo. 82 | 83 | Some other points to follow: 84 | 85 | * Please base your changes on the `main` branch. 86 | 87 | * Please follow the [code style requirements](#code-style). 88 | 89 | * Please include a [changelog entry](#changelog) with each PR. 90 | 91 | * Please [sign off](#sign-off) your contribution. 92 | 93 | * Please keep an eye on the pull request for feedback from the [continuous 94 | integration system](#continuous-integration-and-testing) and try to fix any 95 | errors that come up. 96 | 97 | * If you need to [update your PR](#updating-your-pull-request), just add new 98 | commits to your branch rather than rebasing. 99 | 100 | ## Code style 101 | 102 | Sygnal follows the [Synapse code style]. 103 | 104 | [Synapse code style]: https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md 105 | 106 | Many of the conventions are enforced by scripts which are run as part of the 107 | [continuous integration system](#continuous-integration-and-testing). 108 | 109 | To help check and fix adherence to the code style, you can run `tox` 110 | locally. You'll need Python 3.8 or later: 111 | 112 | ```bash 113 | # Run the code style check 114 | tox -e check_codestyle 115 | 116 | # Run the types check 117 | tox -e check_types 118 | ``` 119 | 120 | These commands will consider the paths and files related to the project (i.e. 121 | everything in `sygnal/` and in `tests/` as well as the `setup.py` file). 122 | 123 | Before pushing new changes, ensure they don't produce linting errors. Commit any 124 | files that were corrected. 125 | 126 | Please ensure your changes match the cosmetic style of the existing project, 127 | and **never** mix cosmetic and functional changes in the same commit, as it 128 | makes it horribly hard to review otherwise. 129 | 130 | ## Further information on poetry 131 | 132 | See the information provided in the [Synapse docs](https://github.com/element-hq/synapse/blob/master/docs/development/dependencies.md). 133 | 134 | ## Changelog 135 | 136 | All changes, even minor ones, need a corresponding changelog / newsfragment 137 | entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier). 138 | 139 | To create a changelog entry, make a new file in the `changelog.d` directory named 140 | in the format of `PRnumber.type`. The type can be one of the following: 141 | 142 | * `feature` 143 | * `bugfix` 144 | * `docker` (for updates to the Docker image) 145 | * `doc` (for updates to the documentation) 146 | * `removal` (also used for deprecations) 147 | * `misc` (for internal-only changes) 148 | 149 | This file will become part of our [changelog]( 150 | https://github.com/matrix-org/sygnal/blob/master/CHANGELOG.md) at the next 151 | release, so the content of the file should be a short description of your 152 | change in the same style as the rest of the changelog. The file can contain Markdown 153 | formatting, and should end with a full stop (.) or an exclamation mark (!) for 154 | consistency. 155 | 156 | Adding credits to the changelog is encouraged, we value your 157 | contributions and would like to have you shouted out in the release notes! 158 | 159 | For example, a fix in PR #1234 would have its changelog entry in 160 | `changelog.d/1234.bugfix`, and contain content like: 161 | 162 | > The security levels of Florbs are now validated when received 163 | > via the `/federation/florb` endpoint. Contributed by Jane Matrix. 164 | 165 | If there are multiple pull requests involved in a single bugfix/feature/etc, 166 | then the content for each `changelog.d` file should be the same. Towncrier will 167 | merge the matching files together into a single changelog entry when we come to 168 | release. 169 | 170 | ### How do I know what to call the changelog file before I create the PR? 171 | 172 | Obviously, you don't know if you should call your newsfile 173 | `1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a 174 | chicken-and-egg problem. 175 | 176 | There are two options for solving this: 177 | 178 | 1. Open the PR without a changelog file, see what number you got, and *then* 179 | add the changelog file to your branch (see [Updating your pull 180 | request](#updating-your-pull-request)), or: 181 | 182 | 1. Look at the [list of all 183 | issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the 184 | highest number you see, and quickly open the PR before somebody else claims 185 | your number. 186 | 187 | [This 188 | script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh) 189 | might be helpful if you find yourself doing this a lot. 190 | 191 | Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come 192 | to put together a release! 193 | 194 | ## Sign off 195 | 196 | In order to have a concrete record that your contribution is intentional 197 | and you agree to license it under the same terms as the project's license, we've adopted the 198 | same lightweight approach that the Linux Kernel 199 | [submitting patches process]( 200 | https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>), 201 | [Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other 202 | projects use: the DCO (Developer Certificate of Origin: 203 | https://developercertificate.org/). This is a simple declaration that you wrote 204 | the contribution or otherwise have the right to contribute it to Matrix: 205 | 206 | ``` 207 | Developer Certificate of Origin 208 | Version 1.1 209 | 210 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 211 | 660 York Street, Suite 102, 212 | San Francisco, CA 94110 USA 213 | 214 | Everyone is permitted to copy and distribute verbatim copies of this 215 | license document, but changing it is not allowed. 216 | 217 | Developer's Certificate of Origin 1.1 218 | 219 | By making a contribution to this project, I certify that: 220 | 221 | (a) The contribution was created in whole or in part by me and I 222 | have the right to submit it under the open source license 223 | indicated in the file; or 224 | 225 | (b) The contribution is based upon previous work that, to the best 226 | of my knowledge, is covered under an appropriate open source 227 | license and I have the right under that license to submit that 228 | work with modifications, whether created in whole or in part 229 | by me, under the same open source license (unless I am 230 | permitted to submit under a different license), as indicated 231 | in the file; or 232 | 233 | (c) The contribution was provided directly to me by some other 234 | person who certified (a), (b) or (c) and I have not modified 235 | it. 236 | 237 | (d) I understand and agree that this project and the contribution 238 | are public and that a record of the contribution (including all 239 | personal information I submit with it, including my sign-off) is 240 | maintained indefinitely and may be redistributed consistent with 241 | this project or the open source license(s) involved. 242 | ``` 243 | 244 | If you agree to this for your contribution, then all that's needed is to 245 | include the line in your commit or pull request comment: 246 | 247 | ``` 248 | Signed-off-by: Your Name 249 | ``` 250 | 251 | Git allows you to add this signoff automatically when using the `-s` 252 | flag to `git commit`, which uses the name and email set in your 253 | `user.name` and `user.email` git configs. 254 | 255 | ## Continuous integration and testing 256 | 257 | [Buildkite](https://buildkite.com/matrix-dot-org/sygnal) will automatically 258 | run a series of checks and tests against any PR which is opened against the 259 | project; if your change breaks the build, this will be shown in GitHub, with 260 | links to the build results. If your build fails, please try to fix the errors 261 | and update your branch. 262 | 263 | After installing tox with `pip install tox`, you can use the following to run 264 | unit tests and lints in a local development environment: 265 | 266 | - `tox -e py38` to run unit tests on Python 3.8. 267 | - `tox -e check_codestyle` to check code style and formatting. 268 | - `tox -e check_types` to check types with MyPy. 269 | - `tox` **to do all of the above.** 270 | 271 | ### Testing proxy support 272 | 273 | To test whether proxy support is working or not, a docker compose file has been 274 | provided to make things easier. 275 | 276 | For GCM Pushkin proxy testing follow these steps: 277 | - create a firebase project & service account 278 | - download the service account file from firebase & save to `./scripts-dev/proxy-test/service_account.json` 279 | - configure the PROJECT_ID in `./scripts-dev/proxy-test/sygnal.yaml` 280 | - build a docker image of sygnal named `sygnal` 281 | - cd to `./scripts-dev/proxy-test/` 282 | - run `docker compose up` 283 | - in another terminal, run `docker exec -it sygnal bash` 284 | - run `apt update && apt install curl -y` 285 | - run `chmod +x curl.sh` 286 | - run `./curl.sh` 287 | - you can tell if the proxy is **NOT** working by inspecting the sygnal logs & seeing something along the lines of "Network is unreachable" or DNS resolution/proxy errors 288 | - you cal tell if the proxy is working by inspecting the sygnal logs & seeing the following error from firebase '"code": 400, "message": "The registration token is not a valid FCM registration token"' 289 | - this is due to the `pushkey` being set to PUSHKEY_HERE in `notification.json` 290 | - if you want to fully test an actual notification, you will have to update this value in `./scripts-dev/proxy-test/notification.json` before calling `docker compose up` 291 | 292 | ## Updating your pull request 293 | 294 | If you decide to make changes to your pull request - perhaps to address issues 295 | raised in a review, or to fix problems highlighted by [continuous 296 | integration](#continuous-integration-and-testing) - just add new commits to your 297 | branch, and push to GitHub. The pull request will automatically be updated. 298 | 299 | Please **avoid** rebasing your branch, especially once the PR has been 300 | reviewed: doing so makes it very difficult for a reviewer to see what has 301 | changed since a previous review. 302 | 303 | ## Conclusion 304 | 305 | That's it! Matrix is a very open and collaborative project as you might expect 306 | given our obsession with open communication. If we're going to successfully 307 | matrix together all the fragmented communication technologies out there we are 308 | reliant on contributions and collaboration from the community to do so. So 309 | please get involved - and we hope you have as much fun hacking on Matrix as we 310 | do! 311 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include sygnal.yaml.sample 3 | include LICENSE 4 | recursive-include tests *.py 5 | 6 | prune docker 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | Sygnal is a reference Push Gateway for [Matrix](https://matrix.org/). 5 | 6 | See https://spec.matrix.org/latest/push-gateway-api/#overview for a high 7 | level overview of how notifications work in Matrix. 8 | 9 | The [Matrix Specification](https://spec.matrix.org/latest/push-gateway-api/) 10 | describes the protocol that Matrix Home Servers use to send notifications to Push 11 | Gateways such as Sygnal. 12 | 13 | 14 | Contributing 15 | ============ 16 | 17 | Looking to contribute to Sygnal? See [CONTRIBUTING.md](CONTRIBUTING.md) 18 | 19 | 20 | Setup 21 | ===== 22 | 23 | Sygnal is configured through a YAML configuration file. By default, this 24 | configuration file is assumed to be named `sygnal.yaml` and to be in the 25 | working directory. To change this, set the `SYGNAL_CONF` environment 26 | variable to the path to your configuration file. A sample configuration 27 | file is provided in this repository; see `sygnal.yaml.sample`. 28 | 29 | The `apps:` section is where you set up different apps that 30 | are to be handled. Each app should be given its own subsection, with the 31 | key of that subsection being the app's `app_id`. Keys in this section 32 | take the form of the `app_id`, as specified when setting up a Matrix 33 | pusher (see 34 | https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3pushersset). 35 | 36 | See the sample configuration for examples. 37 | 38 | You can find a docker image for sygnal [on DockerHub](https://hub.docker.com/r/matrixdotorg/sygnal). 39 | 40 | 41 | App Types 42 | --------- 43 | 44 | There are two supported App Types: 45 | 46 | 47 | ### apns 48 | 49 | This sends push notifications to iOS apps via the Apple Push 50 | Notification Service (APNS). 51 | 52 | The expected configuration depends on which kind of authentication you 53 | wish to use. 54 | 55 | For certificate-based authentication, it expects: 56 | 57 | - the `certfile` parameter to be a path relative to sygnal's 58 | working directory of a PEM file containing the APNS 59 | certificate and unencrypted private key. 60 | 61 | For token-based authentication, it expects: 62 | 63 | - the `keyfile` parameter to be a path relative to Sygnal's 64 | working directory of a p8 file 65 | - the `key_id` parameter 66 | - the `team_id` parameter 67 | - the `topic` parameter, which is most commonly the 'Bundle Identifier' for your 68 | iOS application 69 | 70 | For either type, it can accept: 71 | 72 | - the `platform` parameter which determines whether the production or sandbox 73 | APNS environment is used. 74 | Valid values are 'production' or 'sandbox'. If not provided, 'production' is used. 75 | - the `push_type` parameter which determines what value for the `apns-push-type` header is sent to 76 | APNs. If not provided, the header is not sent. 77 | - the `convert_device_token_to_hex` parameter which determines if the 78 | token provided from the client is b64 decoded and converted to 79 | hex. Some client libraries already provide the token in hex, and 80 | this should be set to `False` if so. 81 | 82 | ### gcm 83 | 84 | This sends messages via Google/Firebase Cloud Messaging (GCM/FCM) 85 | and hence can be used to deliver notifications to Android apps. 86 | 87 | The expected configuration depends on which version of the firebase api you 88 | wish to use. 89 | 90 | For legacy API, it expects: 91 | 92 | - the `api_key` parameter to contain the `Server key`, 93 | which can be acquired from Firebase Console at: 94 | `https://console.firebase.google.com/project//settings/cloudmessaging/` 95 | 96 | For API v1, it expects: 97 | 98 | - the `api_version` parameter to contain `v1` 99 | - the `project_id` parameter to contain the `Project ID`, 100 | which can be acquired from Firebase Console at: 101 | `https://console.cloud.google.com/project//settings/general/` 102 | - the `service_account_file` parameter to contain the path to the service account file, 103 | which can be acquired from Firebase Console at: 104 | `https://console.firebase.google.com/project//settings/serviceaccounts/adminsdk` 105 | 106 | Using an HTTP Proxy for outbound traffic 107 | ---------------------------------------- 108 | 109 | Sygnal will, by default, automatically detect an `HTTPS_PROXY` 110 | environment variable on start-up. 111 | 112 | If one is present, it will be used for outbound traffic to APNs and 113 | GCM/FCM. 114 | 115 | Currently only HTTP proxies with the CONNECT method are supported. (Both 116 | APNs and FCM use HTTPS traffic which is tunnelled in a CONNECT tunnel.) 117 | 118 | If you wish, you can instead configure a HTTP CONNECT proxy in 119 | `sygnal.yaml`. 120 | 121 | 122 | Pusher `data` configuration 123 | =========================== 124 | 125 | The following parameters can be specified in the `data` 126 | dictionary which is given when configuring the pusher via 127 | [POST /_matrix/client/v3/pushers/set](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3pushersset): 128 | 129 | - `default_payload`: a dictionary which defines the basic payload to 130 | be sent to the notification service. Sygnal will merge information 131 | specific to the push event into this dictionary. If unset, the empty 132 | dictionary is used. 133 | 134 | This can be useful for clients to specify default push payload 135 | content. For instance, iOS clients will have freedom to use 136 | silent/mutable notifications and be able to set some default 137 | alert/sound/badge fields. 138 | 139 | 140 | Running 141 | ======= 142 | 143 | ### Python 144 | 145 | With default configuration file name of `sygnal.yaml`: 146 | 147 | ```sh 148 | python -m sygnal.sygnal 149 | ``` 150 | 151 | With custom configuration file name: 152 | 153 | ```sh 154 | SYGNAL_CONF=/path/to/custom_sygnal.conf python -m sygnal.sygnal 155 | ``` 156 | 157 | Python 3.8 or higher is required. 158 | 159 | 160 | ### Container 161 | 162 | The example below uses Podman but should work the same by substituting `podman` with `docker`. First create a volume to store your configuration and any necessary key files: 163 | 164 | ``` 165 | podman volume create sygnal 166 | cp /path/to/sygnal.conf /path/to/volumes/sygnal/_data 167 | cp /path/to/keyfile.p8 /path/to/volumes/sygnal/_data 168 | ``` 169 | 170 | We're going to mount the volume as `/sygnal` so make sure your configuration references any key files in this directory. Now you can pull the image and run the container: 171 | 172 | ``` 173 | podman image pull docker.io/matrixdotorg/sygnal 174 | podman run -d --name sygnal -p 5000:5000 -v sygnal:/sygnal -e SYGNAL_CONF=/sygnal/sygnal.yaml sygnal:latest 175 | ``` 176 | 177 | 178 | Log Rotation 179 | ============ 180 | 181 | Sygnal's logging appends to files but does not use a rotating logger. 182 | The recommended configuration is therefore to use `logrotate`. The log 183 | file will be automatically reopened if the log file changes, for example 184 | due to `logrotate`. 185 | 186 | 187 | More Documentation 188 | ================== 189 | 190 | More documentation for Sygnal is available in the `docs` directory: 191 | 192 | - [Notes for Application Developers](docs/applications.md) 193 | - [Troubleshooting](docs/troubleshooting.md) 194 | -------------------------------------------------------------------------------- /RELEASING.md: -------------------------------------------------------------------------------- 1 | 0. Consider whether this release will affect any customers, including those on 2 | EMS, and warn them beforehand - in case they need to upgrade quickly. 3 | 4 | 1. Update the version number in pyproject.toml. 5 | 2. Set a variable to the version number for convenience: 6 | ```sh 7 | ver=x.y.z 8 | ``` 9 | 3. Update the changelog: 10 | ```sh 11 | towncrier --version=$ver 12 | ``` 13 | 4. Push your changes: 14 | ```sh 15 | git add -u && git commit -m $ver && git push 16 | ``` 17 | 5. Sanity-check the 18 | [changelog](https://github.com/matrix-org/sygnal/blob/master/CHANGELOG.md) 19 | and update if need be. 20 | 6. Create a signed tag for the relese: 21 | ```sh 22 | git tag -s v$ver 23 | ``` 24 | Base the tag message on the changelog. 25 | 7. Push the tag: 26 | ```sh 27 | git push origin tag v$ver 28 | ``` 29 | Pushing a tag on GitHub will automatically trigger a build in Docker Hub and 30 | the resulting image will be published using the same tag as git. 31 | 8. Create release on GH project page: 32 | ```sh 33 | xdg-open https://github.com/matrix-org/sygnal/releases/edit/v$ver 34 | ``` 35 | 9. Notify #sygnal:matrix.org, #synapse-dev:matrix.org and EMS that a new 36 | release has been published. 37 | -------------------------------------------------------------------------------- /changelog.d/.gitignore: -------------------------------------------------------------------------------- 1 | !.gitignore 2 | -------------------------------------------------------------------------------- /changelog.d/397.misc: -------------------------------------------------------------------------------- 1 | Bump twisted from 24.3.0 to 24.7.0. -------------------------------------------------------------------------------- /changelog.d/398.misc: -------------------------------------------------------------------------------- 1 | Bump aiohttp from 3.9.5 to 3.10.2. -------------------------------------------------------------------------------- /changelog.d/400.docker: -------------------------------------------------------------------------------- 1 | Use Python 3.12 as a base for the Docker image instead of Python 3.11. 2 | -------------------------------------------------------------------------------- /changelog.d/402.misc: -------------------------------------------------------------------------------- 1 | Fix unit tests by adding `setTcpNoDelay` to `FakeTransport`. -------------------------------------------------------------------------------- /changelog.d/405.misc: -------------------------------------------------------------------------------- 1 | Bump `virtualenv` from 20.26.2 to 20.26.6. 2 | -------------------------------------------------------------------------------- /changelog.d/406.misc: -------------------------------------------------------------------------------- 1 | Bump `jinja2` from 3.1.4 to 3.1.5. 2 | -------------------------------------------------------------------------------- /changelog.d/408.bugfix: -------------------------------------------------------------------------------- 1 | Fix issue where GCM pushkins behind proxy fail to start. 2 | -------------------------------------------------------------------------------- /changelog.d/409.docker: -------------------------------------------------------------------------------- 1 | Speed up the building of the Docker image. 2 | -------------------------------------------------------------------------------- /changelog.d/410.doc: -------------------------------------------------------------------------------- 1 | Add basic instructions for running within a container. -------------------------------------------------------------------------------- /changelog.d/412.misc: -------------------------------------------------------------------------------- 1 | Bump docker/build-push-action and docker/metadata-action to the latest versions. -------------------------------------------------------------------------------- /contrib/grafana/README.md: -------------------------------------------------------------------------------- 1 | # Sygnal Grafana dashboard 2 | 3 | This directory includes a Grafana dashboard for Sygnal. To use it, download 4 | `Sygnal.json`, then import it to Grafana and select the correct Prometheus 5 | datasource. See also http://docs.grafana.org/reference/export_import/. 6 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Dockerfile to build the matrixdotorg/sygnal docker images. 2 | # 3 | # To build the image, run `docker build` command from the root of the 4 | # sygnal repository: 5 | # 6 | # docker build -f docker/Dockerfile . 7 | # 8 | 9 | ARG DEBIAN_VERSION=bookworm 10 | ARG PYTHON_VERSION=3.12 11 | ARG POETRY_VERSION=1.8.3 12 | 13 | ### 14 | ### Stage 0: generate requirements.txt 15 | ### 16 | ### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation. 17 | ### 18 | FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements 19 | 20 | WORKDIR /sygnal 21 | 22 | # Copy just what we need to run `poetry export`... 23 | COPY pyproject.toml poetry.lock /sygnal/ 24 | 25 | # If specified, we won't verify the hashes of dependencies. 26 | # This is only needed if the hashes of dependencies cannot be checked for some 27 | # reason, such as when a git repository is used directly as a dependency. 28 | ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION 29 | 30 | # If specified, we won't use the Poetry lockfile. 31 | # Instead, we'll just install what a regular `pip install` would from PyPI. 32 | ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE 33 | 34 | # This silences a warning as uv isn't able to do hardlinks between its cache 35 | # (mounted as --mount=type=cache) and the target directory. 36 | ENV UV_LINK_MODE=copy 37 | 38 | # Export the dependencies, but only if we're actually going to use the Poetry lockfile. 39 | # Otherwise, just create an empty requirements file so that the Dockerfile can 40 | # proceed. 41 | ARG POETRY_VERSION 42 | RUN --mount=type=cache,target=/root/.cache/uv \ 43 | if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ 44 | uvx --with poetry-plugin-export==1.8.0 \ 45 | poetry@${POETRY_VERSION} export -o /sygnal/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \ 46 | else \ 47 | touch /sygnal/requirements.txt; \ 48 | fi 49 | 50 | ### 51 | ### Stage 1: builder 52 | ### 53 | FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder 54 | 55 | # This silences a warning as uv isn't able to do hardlinks between its cache 56 | # (mounted as --mount=type=cache) and the target directory. 57 | ENV UV_LINK_MODE=copy 58 | 59 | # To speed up rebuilds, install all of the dependencies before we copy over 60 | # the whole sygnal project, so that this layer in the Docker cache can be 61 | # used while you develop on the source. 62 | # 63 | # This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml. 64 | COPY --from=requirements /sygnal/requirements.txt /sygnal/ 65 | RUN --mount=type=cache,target=/root/.cache/uv \ 66 | uv pip install --prefix="/install" --no-deps -r /sygnal/requirements.txt 67 | 68 | # Copy over the rest of the sygnal source code. 69 | COPY sygnal /sygnal/sygnal/ 70 | # ... and what we need to `pip install`. 71 | COPY pyproject.toml README.md /sygnal/ 72 | 73 | # Repeat of earlier build argument declaration, as this is a new build stage. 74 | ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE 75 | 76 | # Install the sygnal package itself. 77 | # If we have populated requirements.txt, we don't install any dependencies 78 | # as we should already have those from the previous `pip install` step. 79 | RUN \ 80 | --mount=type=cache,target=/root/.cache/uv \ 81 | --mount=type=cache,target=/sygnal/target,sharing=locked \ 82 | --mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \ 83 | if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ 84 | uv pip install --prefix="/install" --no-deps /sygnal; \ 85 | else \ 86 | uv pip install --prefix="/install" /sygnal; \ 87 | fi 88 | 89 | ### 90 | ### Stage 2: runtime 91 | ### 92 | 93 | FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION} 94 | 95 | COPY --from=builder /install /usr/local 96 | 97 | EXPOSE 5000/tcp 98 | 99 | ENTRYPOINT ["python", "-m", "sygnal.sygnal"] 100 | -------------------------------------------------------------------------------- /docs/troubleshooting.md: -------------------------------------------------------------------------------- 1 | # Troubleshooting Sygnal deployments 2 | 3 | Push notifications can be rather hard to get right, and there are plenty of 4 | places to trip up and have nothing but silence to show for your efforts. 5 | 6 | This document offers some suggestions of what to check and lists some common 7 | pitfalls you may encounter, based on experience. 8 | 9 | There are also appendices with information which may be useful for manual 10 | debugging. 11 | 12 | Your first steps are to ensure that you have logging so you can see what is going 13 | on; a level of `INFO` or even `DEBUG` will be useful. 14 | 15 | 16 | ## Narrowing in on the problem 17 | 18 | ### Check the pusher is registered in your homeserver 19 | 20 | Typically, applications will register a pusher on startup. 21 | 22 | If you have access to your homeserver, you can check that it is making it there. 23 | 24 | Start your application and then run a query against the database. 25 | 26 | #### On Synapse 27 | 28 | Use `sqlite3 /path/to/homeserver.db` or `psql synapse` as required for your 29 | deployment. 30 | 31 | ```sql 32 | SELECT app_id, data FROM pushers 33 | WHERE user_name = '@my.user:example.org' AND kind='http'; 34 | ``` 35 | 36 | You should see something like: 37 | 38 | ``` 39 | app_id | data 40 | -------------------+-------------------------------------------------------- 41 | org.example.chat | {"format":"event_id_only", 42 | | "url":"https://example.org/_matrix/push/v1/notify"} 43 | ``` 44 | 45 | 46 | #### On other homeserver implementations 47 | 48 | No details available, but contributions welcome. 49 | 50 | 51 | ### Check the push gateway (Sygnal) is reachable from the homeserver 52 | 53 | Following on from the example above, the homeserver's database contains the 54 | push gateway URL of `https://example.org/_matrix/push/v1/notify`. 55 | 56 | It may be worth manually checking that the push gateway is reachable from the 57 | homeserver; e.g. with curl: 58 | 59 | ``` 60 | $ curl https://example.org/_matrix/push/v1/notify 61 | 62 | 405 - Method Not Allowed 63 | 64 |

Method Not Allowed

65 |

Your browser approached me (at /_matrix/push/v1/notify) with the method "GET". I only allow the methods HEAD, POST here.

66 | 67 | 68 | ``` 69 | 70 | If you get a response, such as an error like **405 Method Not Allowed**, as above, 71 | this would suggest that the push gateway is at least reachable. 72 | 73 | If you get a **404 No Such Resource** error on the `/_matrix/push/v1/notify` endpoint, 74 | then chances are that your reverse proxy is not configured to pass through the 75 | full URL. 76 | 77 | If you don't get an HTTP response, then it is probably worth investigation. 78 | Check that: 79 | 80 | * Sygnal is running 81 | * Sygnal's configuration makes it listen on the desired port 82 | * Any reverse proxies are correctly set up and running 83 | * The firewall permits inbound traffic on the port in question 84 | 85 | 86 | ## Troubleshooting Firebase notifications 87 | 88 | ### iOS-specific troubles with apps using Firebase 89 | 90 | #### App doesn't receive notifications when inactive 91 | 92 | Sygnal currently only sends 'data messages' (also called 'silent notifications', 93 | but this name could be misleading). 94 | 95 | Whereas data messages will wake up apps on Android with no additional changes, 96 | iOS needs to be told that a notification is meant to wake up an inactive app. 97 | This is done with FCM's `content_available` flag, which you can set in your 98 | `fcm_options` dictionary for the Firebase pushkin. 99 | (See [`sygnal.yaml.sample`](../sygnal.yaml.sample).) 100 | 101 | 102 | ## Troubleshooting APNs notifications 103 | 104 | ### Base64 decoding error in the logs 105 | 106 | #### Common cause 1: Hex rather than base64 encoding 107 | 108 | Sygnal's APNs support expects your pushkeys to be base64 encoded rather than 109 | hexadecimally encoded. 110 | 111 | *(Why? The previous APNs API which Sygnal supported was binary and didn't define 112 | a text-safe encoding, so it was chosen to use base64 in Sygnal. Now the new API 113 | exists and specifies hexadecimal encoding, but Sygnal retains backwards 114 | compatibility and will do the base64-to-hex conversion.)* 115 | 116 | 117 | #### Common cause 2: Firebase token given 118 | 119 | If you are using Firebase for your iOS app, you will get Firebase tokens 120 | (looking a bit like `blahblahblah:APA91blahblahblah`… note the presence of a 121 | colon which is not valid base64). 122 | 123 | In this case, you need to **configure Sygnal to use a FCM (gcm) pushkin rather 124 | than an APNs one, as Firebase talks to APNs on your behalf**. 125 | Instead of configuring Sygnal with your APNs secrets, you need to configure 126 | Firebase with your APNs secrets, and Sygnal with your Firebase secrets. 127 | 128 | 129 | ### App doesn't receive notifications when inactive 130 | 131 | If you want your application to be woken up to be able to process APNs messages 132 | received when your application is in the background, you need to set the 133 | `content-available` flag in your pusher's default payload — see 134 | [the notes for iOS applications](applications.md#ios-applications-beware). 135 | 136 | 137 | ### '400 BadDeviceToken' error 138 | 139 | If you get a bad device token error and you have doubled-checked the 140 | token is correct, it is possible that you have used a token from the wrong 'environment', 141 | such as a development token when Sygnal is configured to use the production 142 | environment. 143 | 144 | Sygnal connects to the production APNs instance by default. This will return 145 | `400 BadDeviceToken` if you send it a token intended for the sandbox APNs 146 | server. 147 | 148 | Either use production tokens, or switch to the sandbox APNs server by setting: 149 | 150 | ``` 151 | com.example.myapp.ios: 152 | type: apns 153 | ... 154 | platform: sandbox 155 | ``` 156 | 157 | in your Sygnal config file. 158 | 159 | 160 | ### 'ValueError: Could not deserialize key data' 161 | 162 | This error suggests that your key file (`.p8` file) is not valid. 163 | If viewed with a text editor, the file should begin with `----- BEGIN PRIVATE KEY -----`. 164 | If yours doesn't, you probably have the wrong kind of file. 165 | 166 | 167 | # Appendices 168 | 169 | ## Sending a notification to Sygnal manually with `curl` 170 | 171 | Note: this depends on the heredoc syntax of the `bash` shell. 172 | 173 | ```bash 174 | curl -i -H "Content-Type: application/json" --request POST -d '@-' http://syg1:8008/_matrix/push/v1/notify <", 196 | "pushkey": "", 197 | "pushkey_ts": 12345678, 198 | "data": {}, 199 | "tweaks": { 200 | "sound": "bing" 201 | } 202 | } 203 | ] 204 | } 205 | } 206 | EOF 207 | ``` 208 | 209 | 210 | ## Example of an FCM request (Legacy API) 211 | 212 | HTTP data sent to `https://fcm.googleapis.com/fcm/send`: 213 | 214 | ``` 215 | POST /fcm/send HTTP/1.1 216 | User-Agent: sygnal 217 | Content-Type: application/json 218 | Authorization: key= 219 | Host: fcm.googleapis.com 220 | 221 | {"data": {"event_id": "$3957tyerfgewrf384", "type": "m.room.message", "sender": "@exampleuser:example.org", "room_name": "Mission Control", "room_alias": "#exampleroom:example.org", "membership": null, "sender_display_name": "Major Tom", "content": {"msgtype": "m.text", "body": "I'm floating in a most peculiar way."}, "room_id": "!slw48wfj34rtnrf:example.org", "prio": "high", "unread": 2, "missed_calls": 1}, "priority": "high", "to": ""} 222 | ``` 223 | 224 | You can send using curl using: 225 | 226 | ```bash 227 | curl -i -H "Content-Type: application/json" -H "Authorization: key=" --request POST -d '@-' https://fcm.googleapis.com/fcm/send <"} 229 | EOF 230 | ``` 231 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | plugins = mypy_zope:plugin 3 | check_untyped_defs = True 4 | disallow_untyped_defs = True 5 | show_error_codes = True 6 | show_traceback = True 7 | mypy_path = stubs 8 | 9 | [mypy-prometheus_client] 10 | ignore_missing_imports = True 11 | 12 | [mypy-py_vapid] 13 | ignore_missing_imports = True 14 | 15 | [mypy-pywebpush] 16 | ignore_missing_imports = True 17 | 18 | [mypy-sygnal.notifications] 19 | disallow_untyped_defs = False 20 | 21 | [mypy-sygnal.http] 22 | disallow_untyped_defs = False 23 | 24 | [mypy-sygnal.sygnal] 25 | disallow_untyped_defs = False 26 | 27 | [mypy-tests.asyncio_test_helpers] 28 | disallow_untyped_defs = False 29 | 30 | [mypy-tests.test_httpproxy_asyncio] 31 | disallow_untyped_defs = False 32 | 33 | [mypy-tests.test_httpproxy_twisted] 34 | disallow_untyped_defs = False 35 | 36 | [mypy-tests.testutils] 37 | disallow_untyped_defs = False 38 | 39 | [mypy-tests.twisted_test_helpers] 40 | disallow_untyped_defs = False 41 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.towncrier] 2 | package = "sygnal" 3 | filename = "CHANGELOG.md" 4 | directory = "changelog.d" 5 | issue_format = "[\\#{issue}](https://github.com/matrix-org/sygnal/issues/{issue})" 6 | 7 | [[tool.towncrier.type]] 8 | directory = "feature" 9 | name = "Features" 10 | showcontent = true 11 | 12 | [[tool.towncrier.type]] 13 | directory = "bugfix" 14 | name = "Bugfixes" 15 | showcontent = true 16 | 17 | [[tool.towncrier.type]] 18 | directory = "docker" 19 | name = "Updates to the Docker image" 20 | showcontent = true 21 | 22 | [[tool.towncrier.type]] 23 | directory = "doc" 24 | name = "Improved Documentation" 25 | showcontent = true 26 | 27 | [[tool.towncrier.type]] 28 | directory = "removal" 29 | name = "Deprecations and Removals" 30 | showcontent = true 31 | 32 | [[tool.towncrier.type]] 33 | directory = "misc" 34 | name = "Internal Changes" 35 | showcontent = true 36 | 37 | [tool.isort] 38 | line_length = 88 39 | sections = "FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TESTS,LOCALFOLDER" 40 | default_section = "THIRDPARTY" 41 | known_first_party = "sygnal" 42 | known_tests = "tests" 43 | multi_line_output = 3 44 | include_trailing_comma = true 45 | combine_as_imports = true 46 | 47 | [tool.ruff] 48 | line-length = 88 49 | ignore = [ 50 | "E501", # https://beta.ruff.rs/docs/rules/line-too-long/. Black enforces this for us. 51 | ] 52 | 53 | [build-system] 54 | requires = ["poetry-core"] 55 | build-backend = "poetry.core.masonry.api" 56 | 57 | [tool.poetry] 58 | name = "matrix-sygnal" 59 | version = "0.15.1" 60 | description = "Reference Push Gateway for Matrix Notifications" 61 | authors = ["Matrix.org Team and Contributors "] 62 | readme = "README.md" 63 | license = "Apache-2.0" 64 | packages = [ 65 | { include = "sygnal" }, 66 | ] 67 | include = [ 68 | { path = "tests", format = "sdist" }, 69 | ] 70 | 71 | [tool.poetry.dependencies] 72 | python = "^3.8.0" 73 | aioapns = ">=3.0" 74 | aiohttp = "^3.10.11" 75 | attrs = ">=19.2.0" 76 | cryptography = ">=2.6.1" 77 | idna = ">=2.8" 78 | google-auth = ">=2.27.0" 79 | jaeger-client = ">=4.0.0" 80 | matrix-common = "==1.3.0" 81 | opentracing = ">=2.2.0" 82 | prometheus_client = ">=0.7.0,<0.8" 83 | py-vapid = ">=1.7.0" 84 | pyOpenSSL = ">=17.5.0" 85 | pywebpush = ">=1.13.0" 86 | pyyaml = ">=5.1.1" 87 | sentry-sdk = ">=0.10.2" 88 | service_identity = ">=18.1.0" 89 | Twisted = ">=19.7" 90 | zope-interface = ">=5.0.0" 91 | 92 | [tool.poetry.group.dev.dependencies] 93 | black = "==24.3.0" 94 | coverage = "~=5.5" 95 | ruff = "==0.0.291" 96 | isort = "~=5.10" 97 | google-auth-stubs = "==0.2.0" 98 | mypy = "==1.5.1" 99 | mypy-zope = "==1.0.1" 100 | towncrier = ">=18.6.0" 101 | tox = ">=4.15.0" 102 | types-opentracing = ">=2.4.2" 103 | types-pyOpenSSL = ">=24.0.0" 104 | types-PyYAML = ">=6.0.0" 105 | typing-extensions = ">=3.7.4" 106 | 107 | [tool.poetry.urls] 108 | "homepage" = "https://github.com/matrix-org/sygnal" 109 | "documentation" = "https://github.com/matrix-org/sygnal/tree/main/docs" 110 | "repository" = "https://github.com/matrix-org/sygnal.git" 111 | "changelog" = "https://github.com/matrix-org/sygnal/blob/main/CHANGELOG.md" 112 | 113 | [tool.poetry.scripts] 114 | sygnal = "sygnal.sygnal:main" 115 | -------------------------------------------------------------------------------- /scripts-dev/check_newsfragment.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # A script which checks that an appropriate news file has been added on this 4 | # branch. 5 | 6 | echo -e "+++ \033[32mChecking newsfragment\033[m" 7 | 8 | set -e 9 | 10 | pr="$1" 11 | 12 | # Print a link to the contributing guide if the user makes a mistake 13 | CONTRIBUTING_GUIDE_TEXT="!! Please see the contributing guide for help writing your changelog entry: 14 | https://github.com/matrix-org/sygnal/blob/main/CONTRIBUTING.md#changelog" 15 | 16 | # If towncrier returns a non-zero exit code, print the contributing guide link and exit 17 | python -m towncrier.check --compare-with="origin/main" || (echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 && exit 1) 18 | 19 | echo 20 | echo "--------------------------" 21 | echo 22 | 23 | matched=0 24 | for f in `git diff --name-only origin/main... -- changelog.d`; do 25 | # check that any modified newsfiles on this branch end with a full stop. 26 | lastchar=`tr -d '\n' < $f | tail -c 1` 27 | if [ $lastchar != '.' -a $lastchar != '!' ]; then 28 | echo -e "\e[31mERROR: newsfragment $f does not end with a '.' or '!'\e[39m" >&2 29 | echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 30 | exit 1 31 | fi 32 | 33 | # see if this newsfile corresponds to the right PR 34 | [[ -n "$pr" && "$f" == changelog.d/"$pr".* ]] && matched=1 35 | done 36 | 37 | if [[ -n "$pr" && "$matched" -eq 0 ]]; then 38 | echo -e "\e[31mERROR: Did not find a news fragment with the right number: expected changelog.d/$pr.*.\e[39m" >&2 39 | echo -e "$CONTRIBUTING_GUIDE_TEXT" >&2 40 | exit 1 41 | fi 42 | -------------------------------------------------------------------------------- /scripts-dev/lint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Runs linting scripts over the local Sygnal checkout 4 | # isort - sorts import statements 5 | # black - opinionated code formatter 6 | # ruff - lints and finds mistakes 7 | # mypy - type checker 8 | 9 | set -e 10 | 11 | usage() { 12 | echo 13 | echo "Usage: $0 [-h] [-d] [paths...]" 14 | echo 15 | echo "-d" 16 | echo " Lint files that have changed since the last git commit." 17 | echo 18 | echo " If paths are provided and this option is set, both provided paths and those" 19 | echo " that have changed since the last commit will be linted." 20 | echo 21 | echo " If no paths are provided and this option is not set, all files will be linted." 22 | echo 23 | echo " Note that paths with a file extension that is not '.py' will be excluded." 24 | echo "-h" 25 | echo " Display this help text." 26 | } 27 | 28 | USING_DIFF=0 29 | files=() 30 | 31 | while getopts ":dh" opt; do 32 | case $opt in 33 | d) 34 | USING_DIFF=1 35 | ;; 36 | h) 37 | usage 38 | exit 39 | ;; 40 | \?) 41 | echo "ERROR: Invalid option: -$OPTARG" >&2 42 | usage 43 | exit 44 | ;; 45 | esac 46 | done 47 | 48 | # Strip any options from the command line arguments now that 49 | # we've finished processing them 50 | shift "$((OPTIND-1))" 51 | 52 | if [ $USING_DIFF -eq 1 ]; then 53 | # Check both staged and non-staged changes 54 | for path in $(git diff HEAD --name-only); do 55 | filename=$(basename "$path") 56 | file_extension="${filename##*.}" 57 | 58 | # If an extension is present, and it's something other than 'py', 59 | # then ignore this file 60 | if [[ -n ${file_extension+x} && $file_extension != "py" ]]; then 61 | continue 62 | fi 63 | 64 | # Append this path to our list of files to lint 65 | files+=("$path") 66 | done 67 | fi 68 | 69 | # Append any remaining arguments as files to lint 70 | files+=("$@") 71 | 72 | if [[ $USING_DIFF -eq 1 ]]; then 73 | # If we were asked to lint changed files, and no paths were found as a result... 74 | if [ ${#files[@]} -eq 0 ]; then 75 | # Then print and exit 76 | echo "No files found to lint." 77 | exit 0 78 | fi 79 | else 80 | # If we were not asked to lint changed files, and no paths were found as a result, 81 | # then lint everything! 82 | if [[ -z ${files+x} ]]; then 83 | # Lint all source code files and directories 84 | # Note: this list aims to mirror the one in tox.ini 85 | files=( 86 | "sygnal" 87 | "tests" 88 | "stubs" 89 | ) 90 | fi 91 | fi 92 | 93 | echo "Linting these paths: ${files[*]}" 94 | echo 95 | 96 | # Print out the commands being run 97 | set -x 98 | 99 | isort "${files[@]}" 100 | python3 -m black "${files[@]}" 101 | ruff --quiet --fix "${files[@]}" 102 | mypy "${files[@]}" 103 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/curl.sh: -------------------------------------------------------------------------------- 1 | if [ "$#" -ne 1 ]; then 2 | echo "Usage: $0 " 3 | exit 1 4 | fi 5 | 6 | curl -i -H "Content-Type: application/json" --request POST -d @$1 http://localhost:5000/_matrix/push/v1/notify 7 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | sygnal: 3 | image: sygnal 4 | networks: 5 | no-internet: 6 | ipv4_address: 172.28.0.2 7 | container_name: sygnal 8 | volumes: 9 | - ./sygnal.yaml:/sygnal.yaml 10 | - ./service_account.json:/service_account.json:ro 11 | - ./curl.sh:/curl.sh 12 | - ./notification-gcm.json:/notification-gcm.json 13 | - ./notification-ios.json:/notification-ios.json 14 | - ./proxy.conf:/etc/apt/apt.conf.d/proxy.conf 15 | ports: 16 | - 5000:5000 17 | 18 | proxy: 19 | image: dominikbechstein/nginx-forward-proxy 20 | networks: 21 | no-internet: 22 | ipv4_address: 172.28.0.3 23 | internet: 24 | container_name: nginx-forward-proxy 25 | volumes: 26 | - ./nginx.conf:/usr/local/nginx/conf/nginx.conf:ro 27 | ports: 28 | - 8080:8080 29 | 30 | networks: 31 | no-internet: 32 | driver: bridge 33 | internal: true 34 | ipam: 35 | config: 36 | - subnet: 172.28.0.0/16 37 | gateway: 172.28.0.1 38 | internet: 39 | driver: bridge 40 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes auto; 2 | 3 | daemon off; 4 | 5 | events { 6 | worker_connections 1024; 7 | } 8 | 9 | http { 10 | include mime.types; 11 | 12 | access_log /dev/stdout; 13 | error_log /dev/stderr; 14 | 15 | server { 16 | listen 8080; 17 | 18 | resolver 1.1.1.1 ipv6=off; 19 | 20 | proxy_connect; 21 | proxy_connect_allow 443 563; 22 | proxy_connect_connect_timeout 10s; 23 | proxy_connect_read_timeout 10s; 24 | proxy_connect_send_timeout 10s; 25 | 26 | proxy_hide_header Upgrade; 27 | proxy_hide_header X-Powered-By; 28 | 29 | add_header Content-Security-Policy "upgrade-insecure-requests"; 30 | add_header X-Frame-Options "SAMEORIGIN"; 31 | add_header X-XSS-Protection "1; mode=block" always; 32 | add_header X-Content-Type-Options "nosniff" always; 33 | add_header Cache-Control "no-transform" always; 34 | add_header Referrer-Policy no-referrer always; 35 | add_header X-Robots-Tag none; 36 | 37 | location / { 38 | proxy_http_version 1.1; 39 | proxy_set_header Host $host; 40 | proxy_pass $scheme://$host; 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/notification-gcm.json: -------------------------------------------------------------------------------- 1 | { 2 | "notification": { 3 | "event_id": "\\$3957tyerfgewrf384", 4 | "room_id": "!slw48wfj34rtnrf:example.org", 5 | "type": "m.room.message", 6 | "sender": "@exampleuser:example.org", 7 | "sender_display_name": "Major Tom", 8 | "room_name": "Mission Control", 9 | "room_alias": "#exampleroom:example.org", 10 | "prio": "high", 11 | "content": { 12 | "msgtype": "m.text", 13 | "body": "I'm floating in a most peculiar way." 14 | }, 15 | "counts": { 16 | "unread": 2, 17 | "missed_calls": 1 18 | }, 19 | "devices": [ 20 | { 21 | "app_id": "im.vector.app", 22 | "pushkey": "aaaa", 23 | "pushkey_ts": 12345678, 24 | "data": {}, 25 | "tweaks": { 26 | "sound": "bing" 27 | } 28 | } 29 | ] 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/notification-ios.json: -------------------------------------------------------------------------------- 1 | { 2 | "notification": { 3 | "event_id": "\\$3957tyerfgewrf384", 4 | "room_id": "!slw48wfj34rtnrf:example.org", 5 | "type": "m.room.message", 6 | "sender": "@exampleuser:example.org", 7 | "sender_display_name": "Major Tom", 8 | "room_name": "Mission Control", 9 | "room_alias": "#exampleroom:example.org", 10 | "prio": "high", 11 | "content": { 12 | "msgtype": "m.text", 13 | "body": "I'm floating in a most peculiar way." 14 | }, 15 | "counts": { 16 | "unread": 2, 17 | "missed_calls": 1 18 | }, 19 | "devices": [ 20 | { 21 | "app_id": "im.vector.app.ios", 22 | "pushkey": "aaaa", 23 | "pushkey_ts": 12345678, 24 | "data": {}, 25 | "tweaks": { 26 | "sound": "bing" 27 | } 28 | } 29 | ] 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/proxy.conf: -------------------------------------------------------------------------------- 1 | Acquire::http::Proxy "http://172.28.0.3:8080/"; 2 | -------------------------------------------------------------------------------- /scripts-dev/proxy-test/sygnal.yaml: -------------------------------------------------------------------------------- 1 | ## 2 | # This is a configuration for Sygnal, the reference Push Gateway for Matrix 3 | ## 4 | 5 | log: 6 | setup: 7 | version: 1 8 | formatters: 9 | normal: 10 | format: "%(asctime)s [%(process)d] %(levelname)-5s %(name)s %(message)s" 11 | handlers: 12 | stderr: 13 | class: "logging.StreamHandler" 14 | formatter: "normal" 15 | stream: "ext://sys.stderr" 16 | 17 | stdout: 18 | class: "logging.StreamHandler" 19 | formatter: "normal" 20 | stream: "ext://sys.stdout" 21 | 22 | file: 23 | class: "logging.handlers.WatchedFileHandler" 24 | formatter: "normal" 25 | filename: "./sygnal.log" 26 | loggers: 27 | sygnal.access: 28 | propagate: false 29 | handlers: ["stdout"] 30 | level: "INFO" 31 | 32 | sygnal: 33 | propagate: false 34 | handlers: ["stderr", "file"] 35 | 36 | root: 37 | handlers: ["stderr"] 38 | level: "INFO" 39 | 40 | disable_existing_loggers: false 41 | 42 | access: 43 | x_forwarded_for: false 44 | 45 | http: 46 | bind_addresses: ['127.0.0.1'] 47 | port: 5000 48 | 49 | proxy: 'http://172.28.0.3:8080' 50 | 51 | metrics: 52 | prometheus: 53 | enabled: false 54 | 55 | opentracing: 56 | enabled: false 57 | 58 | sentry: 59 | enabled: false 60 | 61 | apps: 62 | im.vector.app: 63 | type: gcm 64 | api_version: v1 65 | project_id: 66 | service_account_file: /service_account.json 67 | im.vector.app.ios: 68 | type: apns 69 | keyfile: key.p8 70 | key_id: asdf 71 | team_id: team 72 | topic: topic 73 | -------------------------------------------------------------------------------- /stubs/google/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/sygnal/bf1a053fb553347751b845c6d24c4715577aa09b/stubs/google/__init__.pyi -------------------------------------------------------------------------------- /stubs/google/auth/__init__.pyi: -------------------------------------------------------------------------------- 1 | from google.auth._default import default 2 | 3 | __all__ = ["default"] 4 | -------------------------------------------------------------------------------- /stubs/google/auth/_default.pyi: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from google.auth.transport.requests import Request 4 | 5 | class Credentials: 6 | token = "token" 7 | 8 | def refresh(self, request: Request) -> None: ... 9 | 10 | def default( 11 | scopes: Optional[list[str]] = None, 12 | request: Optional[str] = None, 13 | quota_project_id: Optional[int] = None, 14 | default_scopes: Optional[list[str]] = None, 15 | ) -> tuple[Credentials, Optional[str]]: ... 16 | -------------------------------------------------------------------------------- /stubs/google/auth/transport/__init__.pyi: -------------------------------------------------------------------------------- 1 | from google.auth.transport.requests import Request 2 | 3 | __all__ = ["Request"] 4 | -------------------------------------------------------------------------------- /stubs/google/auth/transport/requests.pyi: -------------------------------------------------------------------------------- 1 | class Request: ... 2 | -------------------------------------------------------------------------------- /stubs/google/oauth2/__init__.pyi: -------------------------------------------------------------------------------- 1 | from google.oauth2.service_account import Credentials 2 | 3 | __all__ = ["Credentials"] 4 | -------------------------------------------------------------------------------- /stubs/google/oauth2/service_account.pyi: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from google.auth.transport.requests import Request 4 | 5 | class Credentials: 6 | token = "token" 7 | 8 | def refresh(self, request: Request) -> None: ... 9 | @staticmethod 10 | def from_service_account_file( 11 | service_account_file: str, 12 | scopes: Optional[list[str]] = None, 13 | request: Optional[str] = None, 14 | quota_project_id: Optional[int] = None, 15 | default_scopes: Optional[list[str]] = None, 16 | ) -> Credentials: ... 17 | -------------------------------------------------------------------------------- /stubs/twisted/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/sygnal/bf1a053fb553347751b845c6d24c4715577aa09b/stubs/twisted/__init__.pyi -------------------------------------------------------------------------------- /stubs/twisted/web/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/sygnal/bf1a053fb553347751b845c6d24c4715577aa09b/stubs/twisted/web/__init__.pyi -------------------------------------------------------------------------------- /stubs/twisted/web/http.pyi: -------------------------------------------------------------------------------- 1 | import typing 2 | from typing import AnyStr, Dict, List, Optional 3 | 4 | from twisted.internet.defer import Deferred 5 | from twisted.internet.interfaces import IAddress, ITCPTransport 6 | from twisted.logger import Logger 7 | from twisted.protocols.basic import LineReceiver 8 | from twisted.web.http_headers import Headers 9 | from twisted.web.iweb import IAccessLogFormatter, IRequest 10 | from zope.interface import implementer, provider 11 | 12 | class HTTPChannel: ... 13 | 14 | # Type ignore: I don't want to respecify the methods on the interface that we 15 | # don't use. 16 | @implementer(IRequest) # type: ignore[misc] 17 | class Request: 18 | code = 200 19 | # Instance attributes mentioned in the docstring 20 | method: bytes 21 | uri: bytes 22 | path: bytes 23 | args: Dict[bytes, List[bytes]] 24 | content: typing.BinaryIO 25 | cookies: List[bytes] 26 | requestHeaders: Headers 27 | responseHeaders: Headers 28 | notifications: List[Deferred[None]] 29 | _disconnected: bool 30 | _log: Logger 31 | 32 | # Other instance attributes set in __init__ 33 | channel: HTTPChannel 34 | client: IAddress 35 | # This was hard to derive. 36 | # - `transport` is `self.channel.transport` 37 | # - `self.channel` is set in the constructor, and looks like it's always 38 | # an `HTTPChannel`. 39 | # - `HTTPChannel` is a `LineReceiver` is a `Protocol` is a `BaseProtocol`. 40 | # - `BaseProtocol` sets `self.transport` to initially `None`. 41 | # 42 | # Note that `transport` is set to an ITransport in makeConnection, 43 | # so is almost certainly not None by the time it reaches our code. 44 | # 45 | # I've narrowed this to ITCPTransport because 46 | # - we use `self.transport.abortConnection`, which belongs to that interface 47 | # - twisted does too! in its implementation of HTTPChannel.forceAbortClient 48 | transport: Optional[ITCPTransport] 49 | def __init__(self, channel: HTTPChannel): ... 50 | def getHeader(self, key: AnyStr) -> Optional[AnyStr]: ... 51 | def handleContentChunk(self, data: bytes) -> None: ... 52 | def setResponseCode(self, code: int, message: Optional[bytes] = ...) -> None: ... 53 | def setHeader(self, k: AnyStr, v: AnyStr) -> None: ... 54 | def write(self, data: bytes) -> None: ... 55 | def finish(self) -> None: ... 56 | def getClientAddress(self) -> IAddress: ... 57 | def requestReceived(self, command: bytes, path: bytes, version: bytes) -> None: ... 58 | 59 | class HTTPClient(LineReceiver): 60 | def sendCommand(self, command: bytes, path: bytes) -> None: ... 61 | def sendHeader(self, name: bytes, value: bytes) -> None: ... 62 | def endHeaders(self) -> None: ... 63 | 64 | @provider(IAccessLogFormatter) 65 | def proxiedLogFormatter(timestamp: str, request: Request) -> str: ... 66 | @provider(IAccessLogFormatter) 67 | def combinedLogFormatter(timestamp: str, request: Request) -> str: ... 68 | def datetimeToLogString(msSinceEpoch: Optional[int] = None) -> str: ... 69 | -------------------------------------------------------------------------------- /sygnal.yaml.sample: -------------------------------------------------------------------------------- 1 | ## 2 | # This is a configuration for Sygnal, the reference Push Gateway for Matrix 3 | # See: matrix.org 4 | ## 5 | 6 | 7 | ## Logging # 8 | # 9 | log: 10 | # Specify a Python logging 'dictConfig', as described at: 11 | # https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig 12 | # 13 | setup: 14 | version: 1 15 | formatters: 16 | normal: 17 | format: "%(asctime)s [%(process)d] %(levelname)-5s %(name)s %(message)s" 18 | handlers: 19 | # This handler prints to Standard Error 20 | # 21 | stderr: 22 | class: "logging.StreamHandler" 23 | formatter: "normal" 24 | stream: "ext://sys.stderr" 25 | 26 | # This handler prints to Standard Output. 27 | # 28 | stdout: 29 | class: "logging.StreamHandler" 30 | formatter: "normal" 31 | stream: "ext://sys.stdout" 32 | 33 | # This handler demonstrates logging to a text file on the filesystem. 34 | # You can use logrotate(8) to perform log rotation. 35 | # 36 | file: 37 | class: "logging.handlers.WatchedFileHandler" 38 | formatter: "normal" 39 | filename: "./sygnal.log" 40 | loggers: 41 | # sygnal.access contains the access logging lines. 42 | # Comment out this section if you don't want to give access logging 43 | # any special treatment. 44 | # 45 | sygnal.access: 46 | propagate: false 47 | handlers: ["stdout"] 48 | level: "INFO" 49 | 50 | # sygnal contains log lines from Sygnal itself. 51 | # You can comment out this section to fall back to the root logger. 52 | # 53 | sygnal: 54 | propagate: false 55 | handlers: ["stderr", "file"] 56 | 57 | root: 58 | # Specify the handler(s) to send log messages to. 59 | handlers: ["stderr"] 60 | level: "INFO" 61 | 62 | disable_existing_loggers: false 63 | 64 | 65 | access: 66 | # Specify whether or not to trust the IP address in the `X-Forwarded-For` 67 | # header. In general, you want to enable this if and only if you are using a 68 | # reverse proxy which is configured to emit it. 69 | # 70 | x_forwarded_for: false 71 | 72 | ## HTTP Server (Matrix Push Gateway API) # 73 | # 74 | http: 75 | # Specify a list of interface addresses to bind to. 76 | # 77 | # This example listens on the IPv4 loopback device: 78 | bind_addresses: ['127.0.0.1'] 79 | # This example listens on all IPv4 interfaces: 80 | #bind_addresses: ['0.0.0.0'] 81 | # This example listens on all IPv4 and IPv6 interfaces: 82 | #bind_addresses: ['0.0.0.0', '::'] 83 | 84 | # Specify the port number to listen on. 85 | # 86 | port: 5000 87 | 88 | ## Proxying for outgoing connections # 89 | # 90 | # Specify the URL of a proxy to use for outgoing traffic 91 | # (e.g. to Apple & Google) if desired. 92 | # Currently only HTTP proxies with CONNECT capability are supported. 93 | # 94 | # If you do not specify a value, the `HTTPS_PROXY` environment variable will 95 | # be used if present. Otherwise, no proxy will be used. 96 | # 97 | # Default is unspecified. 98 | # 99 | #proxy: 'http://user:secret@prox:8080' 100 | 101 | ## Metrics # 102 | # 103 | metrics: 104 | ## Prometheus # 105 | # 106 | prometheus: 107 | # Specify whether or not to enable Prometheus. 108 | # 109 | enabled: false 110 | 111 | # Specify an address for the Prometheus HTTP Server to listen on. 112 | # 113 | address: '127.0.0.1' 114 | 115 | # Specify a port for the Prometheus HTTP Server to listen on. 116 | # 117 | port: 8000 118 | 119 | ## OpenTracing # 120 | # 121 | opentracing: 122 | # Specify whether or not to enable OpenTracing. 123 | # 124 | enabled: false 125 | 126 | # Specify an implementation of OpenTracing to use. Currently only 'jaeger' 127 | # is supported. 128 | # 129 | implementation: jaeger 130 | 131 | # Specify the service name to be reported to the tracer. 132 | # 133 | service_name: sygnal 134 | 135 | # Specify configuration values to pass to jaeger_client. 136 | # 137 | jaeger: 138 | sampler: 139 | type: 'const' 140 | param: 1 141 | # local_agent: 142 | # reporting_host: '127.0.0.1' 143 | # reporting_port: 144 | logging: true 145 | 146 | ## Sentry # 147 | # 148 | sentry: 149 | # Specify whether or not to enable Sentry. 150 | # 151 | enabled: false 152 | 153 | # Specify your Sentry DSN if you enable Sentry 154 | # 155 | #dsn: "https://@sentry.example.org/" 156 | 157 | ## Pushkins/Apps # 158 | # 159 | # Add a section for every push application here. 160 | # Specify the pushkey for the application and also the type. 161 | # For the type, you may specify a fully-qualified Python classname if desired. 162 | # 163 | apps: 164 | # This is an example APNs push configuration 165 | # 166 | #com.example.myapp.ios: 167 | # type: apns 168 | # 169 | # # Authentication 170 | # # 171 | # # Two methods of authentication to APNs are currently supported. 172 | # # 173 | # # You can authenticate using a key: 174 | # keyfile: my_key.p8 175 | # key_id: MY_KEY_ID 176 | # team_id: MY_TEAM_ID 177 | # topic: MY_TOPIC 178 | # 179 | # # Or, a certificate can be used instead: 180 | # certfile: com.example.myApp_prod_APNS.pem 181 | # 182 | # # This is the maximum number of in-flight requests *for this pushkin* 183 | # # before additional notifications will be failed. 184 | # # (This is a robustness measure to prevent one pushkin stacking up with 185 | # # queued requests and saturating the inbound connection queue of a load 186 | # # balancer or reverse proxy). 187 | # # Defaults to 512 if unset. 188 | # # 189 | # #inflight_request_limit: 512 190 | # 191 | # # Specifies whether to use the production or sandbox APNs server. Note that 192 | # # sandbox tokens should only be used with the sandbox server and vice versa. 193 | # # 194 | # # Valid options are: 195 | # # * production 196 | # # * sandbox 197 | # # 198 | # # The default is 'production'. Uncomment to use the sandbox instance. 199 | # #platform: sandbox 200 | # # 201 | # # Specifies whether to convert the device push token from base 64 to hex. 202 | # # Defaults to True, set this to False if your client library provides a 203 | # # push token in hex format. 204 | # #convert_device_token_to_hex: false 205 | 206 | # This is an example GCM/FCM push configuration. 207 | # 208 | #im.vector.app: 209 | # type: gcm 210 | # #api_key: 211 | # api_version: v1 212 | # project_id: project-id 213 | # service_account_file: /path/to/service_account.json 214 | # 215 | # # This is the maximum number of connections to GCM servers at any one time 216 | # # the default is 20. 217 | # #max_connections: 20 218 | # 219 | # # This is the maximum number of in-flight requests *for this pushkin* 220 | # # before additional notifications will be failed. 221 | # # (This is a robustness measure to prevent one pushkin stacking up with 222 | # # queued requests and saturating the inbound connection queue of a load 223 | # # balancer or reverse proxy). 224 | # # Defaults to 512 if unset. 225 | # # 226 | # #inflight_request_limit: 512 227 | # 228 | # # This allows you to specify additional options to send to Firebase. 229 | # # 230 | # # Of particular interest, admins who wish to support iOS apps using Firebase 231 | # # probably wish to set content-available, and may need to set mutable-content. 232 | # # (content-available allows your iOS app to be woken up by data messages, 233 | # # and mutable-content allows your notification to be modified by a 234 | # # Notification Service app extension). 235 | # # 236 | # # See https://firebase.google.com/docs/cloud-messaging/http-server-ref 237 | # # for the exhaustive list of valid options. 238 | # # 239 | # # Do not specify `data`, `priority`, `to` or `registration_ids` as they may 240 | # # be overwritten or lead to an invalid request. 241 | # # 242 | # #fcm_options: 243 | # # apns: 244 | # # payload: 245 | # # aps: 246 | # # content-available: 1 247 | # # mutable-content: 1 248 | # # alert: "" 249 | -------------------------------------------------------------------------------- /sygnal/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2020 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from importlib.metadata import PackageNotFoundError, version 17 | 18 | try: 19 | __version__ = version("matrix-sygnal") 20 | except PackageNotFoundError: 21 | # package is not installed 22 | pass 23 | -------------------------------------------------------------------------------- /sygnal/apnstruncate.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 OpenMarket Ltd 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Copied and adapted from 16 | # https://raw.githubusercontent.com/matrix-org/pushbaby/master/pushbaby/truncate.py 17 | import json 18 | import sys 19 | from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union 20 | 21 | if TYPE_CHECKING or sys.version_info < (3, 8, 0): 22 | from typing_extensions import Literal 23 | else: 24 | from typing import Literal 25 | 26 | Choppable = Union[ 27 | Tuple[Literal["alert", "alert.body"]], Tuple[Literal["alert.loc-args"], int] 28 | ] 29 | 30 | 31 | def json_encode(payload: Dict[str, Any]) -> bytes: 32 | return json.dumps(payload, ensure_ascii=False).encode() 33 | 34 | 35 | class BodyTooLongException(Exception): 36 | pass 37 | 38 | 39 | def is_too_long(payload: Dict[Any, Any], max_length: int = 2048) -> bool: 40 | """ 41 | Returns True if the given payload dictionary is too long for a push. 42 | Note that the maximum is now 2kB "In iOS 8 and later" although in 43 | practice, payloads over 256 bytes (the old limit) are still 44 | delivered to iOS 7 or earlier devices. 45 | 46 | Maximum is 4 kiB in the new APNs with the HTTP/2 interface. 47 | """ 48 | return len(json_encode(payload)) > max_length 49 | 50 | 51 | def truncate(payload: Dict[str, Any], max_length: int = 2048) -> Dict[str, Any]: 52 | """ 53 | Truncate APNs fields to make the payload fit within the max length 54 | specified. 55 | Only truncates fields that are safe to do so. 56 | 57 | Args: 58 | payload: nested dict that will be passed to APNs 59 | max_length: Maximum length, in bytes, that the payload should occupy 60 | when JSON-encoded. 61 | 62 | Returns: 63 | Nested dict which should comply with the maximum length restriction. 64 | 65 | """ 66 | payload = payload.copy() 67 | if "aps" not in payload: 68 | if is_too_long(payload, max_length): 69 | raise BodyTooLongException() 70 | else: 71 | return payload 72 | aps = payload["aps"] 73 | 74 | # first ensure all our choppables are str objects. 75 | # We need them to be for truncating to work and this 76 | # makes more sense than checking every time. 77 | for c in _choppables_for_aps(aps): 78 | val = _choppable_get(aps, c) 79 | if isinstance(val, bytes): 80 | _choppable_put(aps, c, val.decode()) 81 | 82 | # chop off whole unicode characters until it fits (or we run out of chars) 83 | while is_too_long(payload, max_length): 84 | longest = _longest_choppable(aps) 85 | if longest is None: 86 | raise BodyTooLongException() 87 | 88 | txt = _choppable_get(aps, longest) 89 | # Note that python's support for this is actually broken on some OSes 90 | # (see test_apnstruncate.py) 91 | txt = txt[:-1] 92 | _choppable_put(aps, longest, txt) 93 | payload["aps"] = aps 94 | 95 | return payload 96 | 97 | 98 | def _choppables_for_aps(aps: Dict[str, Any]) -> List[Choppable]: 99 | ret: List[Choppable] = [] 100 | if "alert" not in aps: 101 | return ret 102 | 103 | alert = aps["alert"] 104 | if isinstance(alert, str): 105 | ret.append(("alert",)) 106 | elif isinstance(alert, dict): 107 | if "body" in alert: 108 | ret.append(("alert.body",)) 109 | if "loc-args" in alert: 110 | ret.extend([("alert.loc-args", i) for i in range(len(alert["loc-args"]))]) 111 | 112 | return ret 113 | 114 | 115 | def _choppable_get( 116 | aps: Dict[str, Any], 117 | choppable: Choppable, 118 | ) -> str: 119 | if choppable[0] == "alert": 120 | return aps["alert"] 121 | elif choppable[0] == "alert.body": 122 | return aps["alert"]["body"] 123 | elif choppable[0] == "alert.loc-args": 124 | return aps["alert"]["loc-args"][choppable[1]] 125 | 126 | 127 | def _choppable_put( 128 | aps: Dict[str, Any], 129 | choppable: Choppable, 130 | val: str, 131 | ) -> None: 132 | if choppable[0] == "alert": 133 | aps["alert"] = val 134 | elif choppable[0] == "alert.body": 135 | aps["alert"]["body"] = val 136 | elif choppable[0] == "alert.loc-args": 137 | aps["alert"]["loc-args"][choppable[1]] = val 138 | 139 | 140 | def _longest_choppable(aps: Dict[str, Any]) -> Optional[Choppable]: 141 | longest = None 142 | length_of_longest = 0 143 | for c in _choppables_for_aps(aps): 144 | val = _choppable_get(aps, c) 145 | val_len = len(val.encode()) 146 | if val_len > length_of_longest: 147 | longest = c 148 | length_of_longest = val_len 149 | return longest 150 | -------------------------------------------------------------------------------- /sygnal/exceptions.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 OpenMarket Ltd 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | from typing import Optional 15 | 16 | from twisted.internet.error import ConnectError 17 | 18 | 19 | class InvalidNotificationException(Exception): 20 | pass 21 | 22 | 23 | class PushkinSetupException(Exception): 24 | pass 25 | 26 | 27 | class NotificationDispatchException(Exception): 28 | pass 29 | 30 | 31 | class TemporaryNotificationDispatchException(Exception): 32 | """ 33 | To be used by pushkins for errors that are not our fault and are 34 | hopefully temporary, so the request should possibly be retried soon. 35 | """ 36 | 37 | def __init__(self, *args: object, custom_retry_delay: Optional[int] = None) -> None: 38 | super().__init__(*args) 39 | self.custom_retry_delay = custom_retry_delay 40 | 41 | 42 | class NotificationQuotaDispatchException(Exception): 43 | """ 44 | To be used by pushkins for errors that are do to exceeding the quota 45 | limits and are hopefully temporary, so the request should possibly be 46 | retried soon. 47 | """ 48 | 49 | def __init__(self, *args: object, custom_retry_delay: Optional[int] = None) -> None: 50 | super().__init__(*args) 51 | self.custom_retry_delay = custom_retry_delay 52 | 53 | 54 | class ProxyConnectError(ConnectError): 55 | """ 56 | Exception raised when we are unable to start a connection using a HTTP proxy 57 | This indicates an issue with the HTTP Proxy in use rather than the final 58 | endpoint we wanted to contact. 59 | """ 60 | 61 | pass 62 | -------------------------------------------------------------------------------- /sygnal/helper/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/sygnal/bf1a053fb553347751b845c6d24c4715577aa09b/sygnal/helper/__init__.py -------------------------------------------------------------------------------- /sygnal/helper/context_factory.py: -------------------------------------------------------------------------------- 1 | # Copyright 2014-2016 OpenMarket Ltd 2 | # Copyright 2019 New Vector Ltd 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # Adapted from Synapse: 17 | # https://github.com/matrix-org/synapse/blob/1016f303e58b1305ed5b3572fde002e1273e0fc0/synapse/crypto/context_factory.py#L77 18 | 19 | 20 | import logging 21 | 22 | import idna 23 | from OpenSSL import SSL 24 | from service_identity import VerificationError 25 | from service_identity.pyopenssl import verify_hostname, verify_ip_address 26 | from twisted.internet.abstract import isIPAddress, isIPv6Address 27 | from twisted.internet.interfaces import IOpenSSLClientConnectionCreator 28 | from twisted.internet.ssl import CertificateOptions, TLSVersion, platformTrust 29 | from twisted.protocols.tls import TLSMemoryBIOProtocol 30 | from twisted.python.failure import Failure 31 | from twisted.web.iweb import IPolicyForHTTPS 32 | from zope.interface import implementer 33 | 34 | logger = logging.getLogger(__name__) 35 | 36 | 37 | @implementer(IPolicyForHTTPS) 38 | class ClientTLSOptionsFactory: 39 | """Factory for Twisted SSLClientConnectionCreators that are used to make connections 40 | to remote servers for federation. 41 | Uses one of two OpenSSL context objects for all connections, depending on whether 42 | we should do SSL certificate verification. 43 | get_options decides whether we should do SSL certificate verification and 44 | constructs an SSLClientConnectionCreator factory accordingly. 45 | """ 46 | 47 | def __init__(self) -> None: 48 | # Use CA root certs provided by OpenSSL 49 | trust_root = platformTrust() 50 | 51 | # "insecurelyLowerMinimumTo" is the argument that will go lower than 52 | # Twisted's default, which is why it is marked as "insecure" (since 53 | # Twisted's defaults are reasonably secure). But, since Twisted is 54 | # moving to TLS 1.2 by default, we want to respect the config option if 55 | # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not 56 | # let us do). 57 | minTLS = TLSVersion.TLSv1_2 58 | 59 | self._verify_ssl = CertificateOptions( 60 | trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS 61 | ) 62 | self._verify_ssl_context = self._verify_ssl.getContext() 63 | self._verify_ssl_context.set_info_callback(self._context_info_cb) 64 | 65 | def get_options(self, host: bytes) -> IOpenSSLClientConnectionCreator: 66 | ssl_context = self._verify_ssl_context 67 | 68 | return SSLClientConnectionCreator(host, ssl_context) 69 | 70 | @staticmethod 71 | def _context_info_cb(ssl_connection: SSL.Connection, where: int, ret: int) -> None: 72 | """The 'information callback' for our openssl context object.""" 73 | # we assume that the app_data on the connection object has been set to 74 | # a TLSMemoryBIOProtocol object. (This is done by SSLClientConnectionCreator) 75 | tls_protocol = ssl_connection.get_app_data() 76 | try: 77 | # ... we further assume that SSLClientConnectionCreator has set the 78 | # '_synapse_tls_verifier' attribute to a ConnectionVerifier object. 79 | tls_protocol._synapse_tls_verifier.verify_context_info_cb( 80 | ssl_connection, where 81 | ) 82 | except: # noqa: E722, taken from the twisted implementation 83 | logger.exception("Error during info_callback") 84 | f = Failure() 85 | tls_protocol.failVerification(f) 86 | 87 | def creatorForNetloc( 88 | self, hostname: bytes, port: int 89 | ) -> IOpenSSLClientConnectionCreator: 90 | """Implements the IPolicyForHTTPS interace so that this can be passed 91 | directly to agents. 92 | """ 93 | return self.get_options(hostname) 94 | 95 | 96 | @implementer(IOpenSSLClientConnectionCreator) 97 | class SSLClientConnectionCreator: 98 | """Creates openssl connection objects for client connections. 99 | 100 | Replaces twisted.internet.ssl.ClientTLSOptions 101 | """ 102 | 103 | def __init__(self, hostname: bytes, ctx: SSL.Context): 104 | self._ctx = ctx 105 | self._verifier = ConnectionVerifier(hostname) 106 | 107 | def clientConnectionForTLS( 108 | self, tls_protocol: TLSMemoryBIOProtocol 109 | ) -> SSL.Connection: 110 | context = self._ctx 111 | connection = SSL.Connection(context, None) 112 | 113 | # as per twisted.internet.ssl.ClientTLSOptions, we set the application 114 | # data to our TLSMemoryBIOProtocol... 115 | connection.set_app_data(tls_protocol) 116 | 117 | # ... and we also gut-wrench a '_synapse_tls_verifier' attribute into the 118 | # tls_protocol so that the SSL context's info callback has something to 119 | # call to do the cert verification. 120 | setattr(tls_protocol, "_synapse_tls_verifier", self._verifier) 121 | return connection 122 | 123 | 124 | class ConnectionVerifier: 125 | """Set the SNI, and do cert verification 126 | 127 | This is a thing which is attached to the TLSMemoryBIOProtocol, and is called by 128 | the ssl context's info callback. 129 | """ 130 | 131 | # This code is based on twisted.internet.ssl.ClientTLSOptions. 132 | 133 | def __init__(self, hostname: bytes): 134 | _decoded = hostname.decode("ascii") 135 | if isIPAddress(_decoded) or isIPv6Address(_decoded): 136 | self._hostnameBytes = hostname 137 | self._is_ip_address = True 138 | else: 139 | # twisted's ClientTLSOptions falls back to the stdlib impl here if 140 | # idna is not installed, but points out that lacks support for 141 | # IDNA2008 (http://bugs.python.org/issue17305). 142 | # 143 | # We can rely on having idna. 144 | self._hostnameBytes = idna.encode(hostname) 145 | self._is_ip_address = False 146 | 147 | self._hostnameASCII = self._hostnameBytes.decode("ascii") 148 | 149 | def verify_context_info_cb( 150 | self, ssl_connection: SSL.Connection, where: int 151 | ) -> None: 152 | if where & SSL.SSL_CB_HANDSHAKE_START and not self._is_ip_address: 153 | ssl_connection.set_tlsext_host_name(self._hostnameBytes) 154 | 155 | if where & SSL.SSL_CB_HANDSHAKE_DONE: 156 | try: 157 | if self._is_ip_address: 158 | verify_ip_address(ssl_connection, self._hostnameASCII) 159 | else: 160 | verify_hostname(ssl_connection, self._hostnameASCII) 161 | except VerificationError: 162 | f = Failure() 163 | tls_protocol = ssl_connection.get_app_data() 164 | tls_protocol.failVerification(f) 165 | -------------------------------------------------------------------------------- /sygnal/helper/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2020 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | from typing import NamedTuple, Optional, Tuple 16 | from urllib.parse import urlparse 17 | 18 | """ 19 | HttpProxyUrl represents a HTTP proxy URL and no more. 20 | 21 | hostname is a string with the pure hostname (or IP address). 22 | port is always an integer; a default port number used if necessary. 23 | credentials is None or a tuple of (username, password) strings. 24 | """ 25 | HttpProxyUrl = NamedTuple( 26 | "HttpProxyUrl", 27 | [("hostname", str), ("port", int), ("credentials", Optional[Tuple[str, str]])], 28 | ) 29 | 30 | 31 | def decompose_http_proxy_url(proxy_url: str) -> HttpProxyUrl: 32 | """ 33 | Given a HTTP proxy URL, breaks it down into components and checks that it 34 | has a hostname (otherwise it is not right useful to us trying to find a 35 | proxy) and asserts that the URL has the 'http' scheme as that is all we 36 | support. 37 | 38 | Args: 39 | proxy_url: 40 | The proxy URL, as a string. 41 | e.g. 'http://user:password@prox:8080' or just 'http://prox' or 42 | anything in between. 43 | 44 | Returns: 45 | A `HttpProxyUrl` namedtuple with the separate information relevant for 46 | connecting to a proxy. 47 | """ 48 | url = urlparse(proxy_url, scheme="http") 49 | 50 | if not url.hostname: 51 | raise RuntimeError("Proxy URL did not contain a hostname! Please specify one.") 52 | 53 | if url.scheme != "http": 54 | raise RuntimeError( 55 | f"Unknown proxy scheme {url.scheme}; only 'http' is supported." 56 | ) 57 | 58 | credentials = None 59 | if url.username and url.password: 60 | credentials = (url.username, url.password) 61 | 62 | return HttpProxyUrl( 63 | hostname=url.hostname, port=url.port or 80, credentials=credentials 64 | ) 65 | -------------------------------------------------------------------------------- /sygnal/helper/proxy/connectproxyclient_twisted.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 The Matrix.org Foundation C.I.C. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Adapted from Synapse: 16 | # https://github.com/matrix-org/synapse/blob/6920e58136671f086536332bdd6844dff0d4b429/synapse/http/connectproxyclient.py 17 | 18 | import logging 19 | from base64 import urlsafe_b64encode 20 | from typing import Optional, Tuple 21 | 22 | from twisted.internet import defer, protocol 23 | from twisted.internet.base import ReactorBase 24 | from twisted.internet.defer import Deferred 25 | from twisted.internet.interfaces import ( 26 | IAddress, 27 | IConnector, 28 | IProtocol, 29 | IProtocolFactory, 30 | IStreamClientEndpoint, 31 | ) 32 | from twisted.internet.protocol import Protocol, connectionDone 33 | from twisted.python.failure import Failure 34 | from twisted.web import http 35 | from zope.interface import implementer 36 | 37 | from sygnal.exceptions import ProxyConnectError 38 | 39 | logger = logging.getLogger(__name__) 40 | 41 | 42 | @implementer(IStreamClientEndpoint) 43 | class HTTPConnectProxyEndpoint: 44 | """An Endpoint implementation which will send a CONNECT request to an http proxy 45 | 46 | Wraps an existing HostnameEndpoint for the proxy. 47 | 48 | When we get the connect() request from the connection pool (via the TLS wrapper), 49 | we'll first connect to the proxy endpoint with a ProtocolFactory which will make the 50 | CONNECT request. Once that completes, we invoke the protocolFactory which was passed 51 | in. 52 | 53 | Args: 54 | reactor: the Twisted reactor to use for the connection 55 | proxy_endpoint: the endpoint to use to connect to the proxy 56 | host: hostname that we want to CONNECT to 57 | port: port that we want to connect to 58 | proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy 59 | authentication 60 | """ 61 | 62 | def __init__( 63 | self, 64 | reactor: ReactorBase, 65 | proxy_endpoint: IStreamClientEndpoint, 66 | host: bytes, 67 | port: int, 68 | proxy_auth: Optional[Tuple[str, str]], 69 | ): 70 | self._reactor = reactor 71 | self._proxy_endpoint = proxy_endpoint 72 | self._host = host 73 | self._port = port 74 | self._proxy_auth = proxy_auth 75 | 76 | def __repr__(self) -> str: 77 | return "" % (self._proxy_endpoint,) 78 | 79 | def connect(self, protocolFactory: IProtocolFactory) -> "defer.Deferred[IProtocol]": 80 | assert isinstance(protocolFactory, protocol.ClientFactory) 81 | f = HTTPProxiedClientFactory( 82 | self._host, self._port, self._proxy_auth, protocolFactory 83 | ) 84 | d = self._proxy_endpoint.connect(f) 85 | # once the tcp socket connects successfully, we need to wait for the 86 | # CONNECT to complete. 87 | d.addCallback(lambda conn: f.on_connection) 88 | return d 89 | 90 | 91 | class HTTPProxiedClientFactory(protocol.ClientFactory): 92 | """ClientFactory wrapper that triggers an HTTP proxy CONNECT on connect. 93 | 94 | It invokes the original ClientFactory to build the HTTP Protocol object, 95 | and then, once CONNECT is completed, uses it to run the rest of the 96 | connection. 97 | 98 | Args: 99 | dst_host: hostname that we want to CONNECT to 100 | dst_port: port that we want to connect to 101 | proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy 102 | authentication 103 | wrapped_factory: The original Factory 104 | """ 105 | 106 | def __init__( 107 | self, 108 | dst_host: bytes, 109 | dst_port: int, 110 | proxy_auth: Optional[Tuple[str, str]], 111 | wrapped_factory: protocol.ClientFactory, 112 | ): 113 | self.dst_host = dst_host 114 | self.dst_port = dst_port 115 | self._proxy_auth = proxy_auth 116 | self.wrapped_factory = wrapped_factory 117 | self.on_connection: defer.Deferred = defer.Deferred() 118 | 119 | def startedConnecting(self, connector: IConnector) -> None: 120 | return self.wrapped_factory.startedConnecting(connector) 121 | 122 | def buildProtocol(self, addr: IAddress) -> "HTTPConnectProtocol": 123 | wrapped_protocol = self.wrapped_factory.buildProtocol(addr) 124 | assert wrapped_protocol is not None 125 | 126 | return HTTPConnectProtocol( 127 | self.dst_host, 128 | self.dst_port, 129 | self._proxy_auth, 130 | wrapped_protocol, 131 | self.on_connection, 132 | ) 133 | 134 | def clientConnectionFailed(self, connector: IConnector, reason: Failure) -> None: 135 | logger.debug("Connection to proxy failed: %s", reason) 136 | if not self.on_connection.called: 137 | self.on_connection.errback(reason) 138 | return self.wrapped_factory.clientConnectionFailed(connector, reason) 139 | 140 | def clientConnectionLost(self, connector: IConnector, reason: Failure) -> None: 141 | logger.debug("Connection to proxy lost: %s", reason) 142 | if not self.on_connection.called: 143 | self.on_connection.errback(reason) 144 | return self.wrapped_factory.clientConnectionLost(connector, reason) 145 | 146 | 147 | class HTTPConnectProtocol(protocol.Protocol): 148 | """Protocol that wraps an existing Protocol to do a CONNECT handshake at connect 149 | 150 | Args: 151 | host: The original HTTP(s) hostname or IPv4 or IPv6 address literal 152 | to put in the CONNECT request 153 | 154 | port: The original HTTP(s) port to put in the CONNECT request 155 | 156 | proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy 157 | authentication 158 | 159 | wrapped_protocol: the original protocol (probably 160 | HTTPChannel or TLSMemoryBIOProtocol, but could be anything really) 161 | 162 | connected_deferred: a Deferred which will be callbacked with 163 | wrapped_protocol when the CONNECT completes 164 | """ 165 | 166 | def __init__( 167 | self, 168 | host: bytes, 169 | port: int, 170 | proxy_auth: Optional[Tuple[str, str]], 171 | wrapped_protocol: Protocol, 172 | connected_deferred: Deferred, 173 | ): 174 | self.host = host 175 | self.port = port 176 | self.wrapped_protocol = wrapped_protocol 177 | self.connected_deferred = connected_deferred 178 | self.http_setup_client = HTTPConnectSetupClient( 179 | self.host, self.port, proxy_auth 180 | ) 181 | self.http_setup_client.on_connected.addCallback(self.proxyConnected) 182 | 183 | def connectionMade(self) -> None: 184 | self.http_setup_client.makeConnection(self.transport) 185 | 186 | def connectionLost(self, reason: Failure = connectionDone) -> None: 187 | if self.wrapped_protocol.connected: 188 | self.wrapped_protocol.connectionLost(reason) 189 | 190 | self.http_setup_client.connectionLost(reason) 191 | 192 | if not self.connected_deferred.called: 193 | self.connected_deferred.errback(reason) 194 | 195 | def proxyConnected(self, _: Optional["defer.Deferred[None]"]) -> None: 196 | self.wrapped_protocol.makeConnection(self.transport) 197 | 198 | self.connected_deferred.callback(self.wrapped_protocol) 199 | 200 | # Get any pending data from the http buf and forward it to the original protocol 201 | buf = self.http_setup_client.clearLineBuffer() 202 | if buf: 203 | self.wrapped_protocol.dataReceived(buf) 204 | 205 | def dataReceived(self, data: bytes) -> None: 206 | # if we've set up the HTTP protocol, we can send the data there 207 | if self.wrapped_protocol.connected: 208 | return self.wrapped_protocol.dataReceived(data) 209 | 210 | # otherwise, we must still be setting up the connection: send the data to the 211 | # setup client 212 | return self.http_setup_client.dataReceived(data) 213 | 214 | 215 | class HTTPConnectSetupClient(http.HTTPClient): 216 | """HTTPClient protocol to send a CONNECT message for proxies and read the response. 217 | 218 | Args: 219 | host: The hostname to send in the CONNECT message 220 | port: The port to send in the CONNECT message 221 | proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy 222 | authentication 223 | """ 224 | 225 | def __init__(self, host: bytes, port: int, proxy_auth: Optional[Tuple[str, str]]): 226 | self.host = host 227 | self.port = port 228 | self._proxy_auth = proxy_auth 229 | self.on_connected: defer.Deferred = defer.Deferred() 230 | 231 | def connectionMade(self) -> None: 232 | logger.debug("Connected to proxy, sending CONNECT") 233 | self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port)) 234 | if self._proxy_auth is not None: 235 | username, password = self._proxy_auth 236 | # a credential pair is a urlsafe-base64-encoded pair separated by colon 237 | encoded_credentials = urlsafe_b64encode(f"{username}:{password}".encode()) 238 | self.sendHeader(b"Proxy-Authorization", b"basic " + encoded_credentials) 239 | self.endHeaders() 240 | 241 | def handleStatus(self, version: bytes, status: bytes, message: bytes) -> None: 242 | logger.debug("Got Status: %s %s %s", status, message, version) 243 | if status != b"200": 244 | raise ProxyConnectError(f"Unexpected status on CONNECT: {status!s}") 245 | 246 | def handleEndHeaders(self) -> None: 247 | logger.debug("End Headers") 248 | self.on_connected.callback(None) 249 | 250 | def handleResponse(self, body: bytes) -> None: 251 | pass 252 | -------------------------------------------------------------------------------- /sygnal/helper/proxy/proxyagent_twisted.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 The Matrix.org Foundation C.I.C. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Adapted from Synapse: 16 | # https://github.com/matrix-org/synapse/blob/6920e58136671f086536332bdd6844dff0d4b429/synapse/http/proxyagent.py 17 | 18 | import logging 19 | import re 20 | from typing import Any, Dict, Optional 21 | 22 | from twisted.internet import defer 23 | from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS 24 | from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint 25 | from twisted.python.failure import Failure 26 | from twisted.web.client import ( 27 | URI, 28 | BrowserLikePolicyForHTTPS, 29 | HTTPConnectionPool, 30 | _AgentBase, 31 | ) 32 | from twisted.web.error import SchemeNotSupported 33 | from twisted.web.http_headers import Headers 34 | from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS, IResponse 35 | from zope.interface import implementer 36 | 37 | from sygnal.helper.proxy import decompose_http_proxy_url 38 | from sygnal.helper.proxy.connectproxyclient_twisted import HTTPConnectProxyEndpoint 39 | 40 | logger = logging.getLogger(__name__) 41 | 42 | _VALID_URI = re.compile(rb"\A[\x21-\x7e]+\Z") 43 | 44 | 45 | @implementer(IAgent) 46 | class ProxyAgent(_AgentBase): 47 | """An Agent implementation which will use an HTTP proxy if one was requested 48 | 49 | Args: 50 | reactor: twisted reactor to place outgoing connections. 51 | 52 | contextFactory: A factory for TLS contexts, to control the 53 | verification parameters of OpenSSL. The default is to use a 54 | `BrowserLikePolicyForHTTPS`, so unless you have special 55 | requirements you can leave this as-is. 56 | 57 | connectTimeout: The amount of time that this Agent will wait 58 | for the peer to accept a connection. 59 | 60 | bindAddress: The local address for client sockets to bind to. 61 | 62 | pool: connection pool to be used. If None, a 63 | non-persistent pool instance will be created. 64 | """ 65 | 66 | def __init__( 67 | self, 68 | reactor: IReactorCore, 69 | contextFactory: IPolicyForHTTPS = BrowserLikePolicyForHTTPS(), 70 | connectTimeout: Optional[float] = None, 71 | bindAddress: Optional[bytes] = None, 72 | pool: Optional[HTTPConnectionPool] = None, 73 | proxy_url_str: Optional[str] = None, 74 | ): 75 | _AgentBase.__init__(self, reactor, pool) 76 | 77 | self._endpoint_kwargs: Dict[str, Any] = {} 78 | if connectTimeout is not None: 79 | self._endpoint_kwargs["timeout"] = connectTimeout 80 | if bindAddress is not None: 81 | self._endpoint_kwargs["bindAddress"] = bindAddress 82 | 83 | if proxy_url_str is not None: 84 | parsed_url = decompose_http_proxy_url(proxy_url_str) 85 | self._proxy_auth = parsed_url.credentials 86 | 87 | self.proxy_endpoint: Optional[HostnameEndpoint] = HostnameEndpoint( 88 | reactor, parsed_url.hostname, parsed_url.port, **self._endpoint_kwargs 89 | ) 90 | else: 91 | self.proxy_endpoint = None 92 | 93 | self._policy_for_https = contextFactory 94 | self._reactor = reactor 95 | 96 | def request( 97 | self, 98 | method: bytes, 99 | uri: bytes, 100 | headers: Optional[Headers] = None, 101 | bodyProducer: Optional[IBodyProducer] = None, 102 | ) -> "defer.Deferred[IResponse]": 103 | """ 104 | Issue a request to the server indicated by the given uri. 105 | 106 | Supports `http` and `https` schemes. 107 | 108 | An existing connection from the connection pool may be used or a new one may be 109 | created. 110 | 111 | See also: twisted.web.iweb.IAgent.request 112 | 113 | Args: 114 | method: The request method to use, such as `GET`, `POST`, etc 115 | 116 | uri: The location of the resource to request. 117 | 118 | headers: Extra headers to send with the request 119 | 120 | bodyProducer: An object which can generate bytes to 121 | make up the body of this request (for example, the properly encoded 122 | contents of a file for a file upload). Or, None if the request is to 123 | have no body. 124 | 125 | Returns: 126 | completes when the header of the response has been received 127 | (regardless of the response status code). 128 | """ 129 | uri = uri.strip() 130 | if not _VALID_URI.match(uri): 131 | raise ValueError("Invalid URI {!r}".format(uri)) 132 | 133 | parsed_uri = URI.fromBytes(uri) 134 | pool_key: tuple = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port) 135 | request_path = parsed_uri.originForm 136 | 137 | endpoint: IStreamClientEndpoint 138 | if parsed_uri.scheme == b"http" and self.proxy_endpoint: 139 | # Cache *all* connections under the same key, since we are only 140 | # connecting to a single destination, the proxy: 141 | pool_key = ("http-proxy", self.proxy_endpoint) 142 | endpoint = self.proxy_endpoint 143 | request_path = uri 144 | elif parsed_uri.scheme == b"https" and self.proxy_endpoint: 145 | endpoint = HTTPConnectProxyEndpoint( 146 | self._reactor, 147 | self.proxy_endpoint, 148 | parsed_uri.host, 149 | parsed_uri.port, 150 | self._proxy_auth, 151 | ) 152 | else: 153 | # not using a proxy 154 | endpoint = HostnameEndpoint( 155 | self._reactor, parsed_uri.host, parsed_uri.port, **self._endpoint_kwargs 156 | ) 157 | 158 | logger.debug("Requesting %s via %s", uri, endpoint) 159 | 160 | if parsed_uri.scheme == b"https": 161 | tls_connection_creator = self._policy_for_https.creatorForNetloc( 162 | parsed_uri.host, parsed_uri.port 163 | ) 164 | endpoint = wrapClientTLS(tls_connection_creator, endpoint) 165 | elif parsed_uri.scheme == b"http": 166 | pass 167 | else: 168 | return defer.fail( 169 | Failure( 170 | SchemeNotSupported("Unsupported scheme: %r" % (parsed_uri.scheme,)) 171 | ) 172 | ) 173 | 174 | return self._requestWithEndpoint( 175 | pool_key, endpoint, method, parsed_uri, headers, bodyProducer, request_path 176 | ) 177 | -------------------------------------------------------------------------------- /sygnal/notifications.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2014 OpenMarket Ltd 3 | # Copyright 2019 New Vector Ltd 4 | # Copyright 2019 The Matrix.org Foundation C.I.C. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | import abc 18 | from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, TypeVar, overload 19 | 20 | from matrix_common.regex import glob_to_regex 21 | from opentracing import Span 22 | from prometheus_client import Counter 23 | 24 | from sygnal.exceptions import ( 25 | InvalidNotificationException, 26 | NotificationDispatchException, 27 | PushkinSetupException, 28 | ) 29 | 30 | if TYPE_CHECKING: 31 | from sygnal.sygnal import Sygnal 32 | 33 | T = TypeVar("T") 34 | 35 | 36 | @overload 37 | def get_key(raw: Dict[str, Any], key: str, type_: Type[T], default: T) -> T: ... 38 | 39 | 40 | @overload 41 | def get_key( 42 | raw: Dict[str, Any], key: str, type_: Type[T], default: None = None 43 | ) -> Optional[T]: ... 44 | 45 | 46 | def get_key( 47 | raw: Dict[str, Any], key: str, type_: Type[T], default: Optional[T] = None 48 | ) -> Optional[T]: 49 | if key not in raw: 50 | return default 51 | if not isinstance(raw[key], type_): 52 | raise InvalidNotificationException(f"{key} is of invalid type") 53 | return raw[key] 54 | 55 | 56 | class Tweaks: 57 | def __init__(self, raw: Dict[str, Any]): 58 | self.sound: Optional[str] = get_key(raw, "sound", str) 59 | 60 | 61 | class Device: 62 | def __init__(self, raw: Dict[str, Any]): 63 | if "app_id" not in raw or not isinstance(raw["app_id"], str): 64 | raise InvalidNotificationException( 65 | "Device with missing or non-string app_id" 66 | ) 67 | self.app_id: str = raw["app_id"] 68 | if "pushkey" not in raw or not isinstance(raw["pushkey"], str): 69 | raise InvalidNotificationException( 70 | "Device with missing or non-string pushkey" 71 | ) 72 | self.pushkey: str = raw["pushkey"] 73 | 74 | self.pushkey_ts: int = get_key(raw, "pushkey_ts", int, 0) 75 | self.data: Optional[Dict[str, Any]] = get_key(raw, "data", dict) 76 | self.tweaks = Tweaks(get_key(raw, "tweaks", dict, {})) 77 | 78 | 79 | class Counts: 80 | def __init__(self, raw: Dict[str, Any]): 81 | self.unread: Optional[int] = get_key(raw, "unread", int) 82 | self.missed_calls: Optional[int] = get_key(raw, "missed_calls", int) 83 | 84 | 85 | class Notification: 86 | def __init__(self, notif: dict): 87 | # optional attributes 88 | self.room_name: Optional[str] = notif.get("room_name") 89 | self.room_alias: Optional[str] = notif.get("room_alias") 90 | self.prio: Optional[str] = notif.get("prio") 91 | self.membership: Optional[str] = notif.get("membership") 92 | self.sender_display_name: Optional[str] = notif.get("sender_display_name") 93 | self.content: Optional[Dict[str, Any]] = notif.get("content") 94 | self.event_id: Optional[str] = notif.get("event_id") 95 | self.room_id: Optional[str] = notif.get("room_id") 96 | self.user_is_target: Optional[bool] = notif.get("user_is_target") 97 | self.type: Optional[str] = notif.get("type") 98 | self.sender: Optional[str] = notif.get("sender") 99 | 100 | if "devices" not in notif or not isinstance(notif["devices"], list): 101 | raise InvalidNotificationException("Expected list in 'devices' key") 102 | 103 | if "counts" in notif: 104 | self.counts = Counts(notif["counts"]) 105 | else: 106 | self.counts = Counts({}) 107 | 108 | self.devices = [Device(d) for d in notif["devices"]] 109 | 110 | 111 | class Pushkin(abc.ABC): 112 | def __init__(self, name: str, sygnal: "Sygnal", config: Dict[str, Any]): 113 | self.name = name 114 | self.appid_pattern = glob_to_regex(name, ignore_case=False) 115 | self.cfg = config 116 | self.sygnal = sygnal 117 | 118 | @overload 119 | def get_config(self, key: str, type_: Type[T], default: T) -> T: ... 120 | 121 | @overload 122 | def get_config( 123 | self, key: str, type_: Type[T], default: None = None 124 | ) -> Optional[T]: ... 125 | 126 | def get_config( 127 | self, key: str, type_: Type[T], default: Optional[T] = None 128 | ) -> Optional[T]: 129 | if key not in self.cfg: 130 | return default 131 | if not isinstance(self.cfg[key], type_): 132 | raise PushkinSetupException( 133 | f"{key} is of incorrect type, please check that the entry for {key} is " 134 | f"formatted correctly in the config file. " 135 | ) 136 | return self.cfg[key] 137 | 138 | def handles_appid(self, appid: str) -> bool: 139 | """Checks whether the pushkin is responsible for the given app ID""" 140 | return self.name == appid or self.appid_pattern.match(appid) is not None 141 | 142 | @abc.abstractmethod 143 | async def dispatch_notification( 144 | self, n: Notification, device: Device, context: "NotificationContext" 145 | ) -> List[str]: 146 | """ 147 | Args: 148 | n: The notification to dispatch via this pushkin 149 | device: The device to dispatch the notification for. 150 | context: the request context 151 | 152 | Returns: 153 | A list of rejected pushkeys, to be reported back to the homeserver 154 | """ 155 | ... 156 | 157 | @classmethod 158 | async def create(cls, name: str, sygnal: "Sygnal", config: Dict[str, Any]): 159 | """ 160 | Override this if your pushkin needs to call async code in order to 161 | be constructed. Otherwise, it defaults to just invoking the Python-standard 162 | __init__ constructor. 163 | 164 | Returns: 165 | an instance of this Pushkin 166 | """ 167 | return cls(name, sygnal, config) 168 | 169 | 170 | class ConcurrencyLimitedPushkin(Pushkin): 171 | """ 172 | A subclass of Pushkin that limits the number of in-flight requests at any 173 | one time, so as to prevent one Pushkin pulling the whole show down. 174 | """ 175 | 176 | # Maximum in-flight, concurrent notification dispatches that we apply by default 177 | # We start turning away requests after this limit is reached. 178 | DEFAULT_CONCURRENCY_LIMIT = 512 179 | 180 | UNDERSTOOD_CONFIG_FIELDS = {"inflight_request_limit"} 181 | 182 | RATELIMITING_DROPPED_REQUESTS = Counter( 183 | "sygnal_inflight_request_limit_drop", 184 | "Number of notifications dropped because the number of inflight requests" 185 | " exceeded the configured inflight_request_limit.", 186 | labelnames=["pushkin"], 187 | ) 188 | 189 | def __init__(self, name: str, sygnal: "Sygnal", config: Dict[str, Any]): 190 | super().__init__(name, sygnal, config) 191 | self._concurrent_limit = config.get( 192 | "inflight_request_limit", 193 | ConcurrencyLimitedPushkin.DEFAULT_CONCURRENCY_LIMIT, 194 | ) 195 | self._concurrent_now = 0 196 | 197 | # Grab an instance of the dropped request counter given our pushkin name. 198 | # Note this ensures the counter appears in metrics even if it hasn't yet 199 | # been incremented. 200 | dropped_requests = ConcurrencyLimitedPushkin.RATELIMITING_DROPPED_REQUESTS 201 | self.dropped_requests_counter = dropped_requests.labels(pushkin=name) 202 | 203 | async def dispatch_notification( 204 | self, n: Notification, device: Device, context: "NotificationContext" 205 | ) -> List[str]: 206 | if self._concurrent_now >= self._concurrent_limit: 207 | self.dropped_requests_counter.inc() 208 | raise NotificationDispatchException( 209 | "Too many in-flight requests for this pushkin. " 210 | "(Something is wrong and Sygnal is struggling to keep up!)" 211 | ) 212 | 213 | self._concurrent_now += 1 214 | try: 215 | return await self._dispatch_notification_unlimited(n, device, context) 216 | finally: 217 | self._concurrent_now -= 1 218 | 219 | async def _dispatch_notification_unlimited( 220 | self, n: Notification, device: Device, context: "NotificationContext" 221 | ) -> List[str]: 222 | # to be overridden by Pushkins! 223 | raise NotImplementedError 224 | 225 | 226 | class NotificationContext(object): 227 | def __init__(self, request_id: str, opentracing_span: Span, start_time: float): 228 | """ 229 | Args: 230 | request_id: An ID for the request, or None to have it 231 | generated automatically. 232 | opentracing_span: The span for the API request triggering 233 | the notification. 234 | start_time: Start timer value, `time.perf_counter()` 235 | """ 236 | self.request_id = request_id 237 | self.opentracing_span = opentracing_span 238 | self.start_time = start_time 239 | -------------------------------------------------------------------------------- /sygnal/sygnal.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2014 OpenMarket Ltd 3 | # Copyright 2018, 2019 New Vector Ltd 4 | # Copyright 2019-2020 The Matrix.org Foundation C.I.C. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | import copy 18 | import importlib 19 | import logging 20 | import logging.config 21 | import os 22 | import sys 23 | from typing import Any, Dict, Set, cast 24 | 25 | import opentracing 26 | import prometheus_client 27 | import yaml 28 | from opentracing import Tracer 29 | from opentracing.scope_managers.asyncio import AsyncioScopeManager 30 | from twisted.internet import asyncioreactor 31 | from twisted.internet.defer import ensureDeferred 32 | from twisted.internet.interfaces import ( 33 | IReactorCore, 34 | IReactorFDSet, 35 | IReactorPluggableNameResolver, 36 | IReactorTCP, 37 | IReactorTime, 38 | ) 39 | from twisted.python import log as twisted_log 40 | from twisted.python.failure import Failure 41 | from zope.interface import Interface 42 | 43 | from sygnal.http import PushGatewayApiServer 44 | from sygnal.notifications import Pushkin 45 | from sygnal.utils import twisted_sleep 46 | 47 | logger = logging.getLogger(__name__) 48 | 49 | CONFIG_DEFAULTS: Dict[str, Any] = { 50 | "http": {"port": 5000, "bind_addresses": ["127.0.0.1"]}, 51 | "log": {"setup": {}, "access": {"x_forwarded_for": False}}, 52 | "metrics": { 53 | "prometheus": {"enabled": False, "address": "127.0.0.1", "port": 8000}, 54 | "opentracing": { 55 | "enabled": False, 56 | "implementation": None, 57 | "jaeger": {}, 58 | "service_name": "sygnal", 59 | }, 60 | "sentry": {"enabled": False}, 61 | }, 62 | "proxy": None, 63 | "apps": {}, 64 | } 65 | 66 | 67 | class SygnalReactor( 68 | IReactorFDSet, 69 | IReactorPluggableNameResolver, 70 | IReactorTCP, 71 | IReactorCore, 72 | IReactorTime, 73 | Interface, 74 | ): 75 | pass 76 | 77 | 78 | class Sygnal: 79 | def __init__( 80 | self, 81 | config: Dict[str, Any], 82 | custom_reactor: SygnalReactor, 83 | tracer: Tracer = opentracing.tracer, 84 | ): 85 | """ 86 | Object that holds state for the entirety of a Sygnal instance. 87 | Args: 88 | config: Configuration for this Sygnal 89 | custom_reactor: a Twisted Reactor to use. 90 | tracer (optional): an OpenTracing tracer. The default is the no-op tracer. 91 | """ 92 | self.config = config 93 | self.reactor = custom_reactor 94 | self.pushkins: Dict[str, Pushkin] = {} 95 | self.tracer = tracer 96 | 97 | logging_dict_config = config["log"]["setup"] 98 | logging.config.dictConfig(logging_dict_config) 99 | 100 | logger.debug("Started logging") 101 | 102 | observer = twisted_log.PythonLoggingObserver() 103 | observer.start() 104 | 105 | proxy_url = config.get("proxy") 106 | if proxy_url is not None: 107 | logger.info("Using proxy configuration from Sygnal configuration file") 108 | else: 109 | proxy_url = os.getenv("HTTPS_PROXY") 110 | if proxy_url: 111 | logger.info( 112 | "Using proxy configuration from HTTPS_PROXY environment variable." 113 | ) 114 | config["proxy"] = proxy_url 115 | 116 | sentrycfg = config["metrics"]["sentry"] 117 | if sentrycfg["enabled"] is True: 118 | import sentry_sdk 119 | 120 | logger.info("Initialising Sentry") 121 | sentry_sdk.init(sentrycfg["dsn"]) 122 | 123 | if config.get("db") is not None: 124 | logger.warning( 125 | "Config includes the legacy 'db' option and will be ignored" 126 | " as Sygnal no longer uses a database, this field can be removed" 127 | ) 128 | 129 | if config.get("database") is not None: 130 | logger.warning( 131 | "Config includes the legacy 'database' option and will be ignored" 132 | " as Sygnal no longer uses a database, this field can be removed" 133 | ) 134 | 135 | promcfg = config["metrics"]["prometheus"] 136 | if promcfg["enabled"] is True: 137 | prom_addr = promcfg["address"] 138 | prom_port = int(promcfg["port"]) 139 | logger.info( 140 | "Starting Prometheus Server on %s port %d", prom_addr, prom_port 141 | ) 142 | 143 | prometheus_client.start_http_server(port=prom_port, addr=prom_addr or "") 144 | 145 | tracecfg = config["metrics"]["opentracing"] 146 | if tracecfg["enabled"] is True: 147 | if tracecfg["implementation"] == "jaeger": 148 | try: 149 | import jaeger_client 150 | 151 | jaeger_cfg = jaeger_client.Config( 152 | config=tracecfg["jaeger"], 153 | service_name=tracecfg["service_name"], 154 | scope_manager=AsyncioScopeManager(), 155 | ) 156 | 157 | jaeger_tracer = jaeger_cfg.initialize_tracer() 158 | assert jaeger_tracer is not None 159 | self.tracer = jaeger_tracer 160 | 161 | logger.info("Enabled OpenTracing support with Jaeger") 162 | except ModuleNotFoundError: 163 | logger.critical( 164 | "You have asked for OpenTracing with Jaeger but do not have" 165 | " the Python package 'jaeger_client' installed." 166 | ) 167 | raise 168 | else: 169 | raise RuntimeError( 170 | "Unknown OpenTracing implementation: %s.", tracecfg["impl"] 171 | ) 172 | 173 | async def _make_pushkin(self, app_name: str, app_config: Dict[str, Any]) -> Pushkin: 174 | """ 175 | Load and instantiate a pushkin. 176 | Args: 177 | app_name: The pushkin's app_id 178 | app_config: The pushkin's configuration 179 | 180 | Returns: 181 | A pushkin of the desired type. 182 | """ 183 | app_type = app_config["type"] 184 | if "." in app_type: 185 | kind_split = app_type.rsplit(".", 1) 186 | to_import = kind_split[0] 187 | to_construct = kind_split[1] 188 | else: 189 | to_import = f"sygnal.{app_type}pushkin" 190 | to_construct = f"{app_type.capitalize()}Pushkin" 191 | 192 | logger.info("Importing pushkin module: %s", to_import) 193 | pushkin_module = importlib.import_module(to_import) 194 | logger.info("Creating pushkin: %s", to_construct) 195 | clarse = getattr(pushkin_module, to_construct) 196 | return await clarse.create(app_name, self, app_config) 197 | 198 | async def make_pushkins_then_start(self) -> None: 199 | for app_id, app_cfg in self.config["apps"].items(): 200 | try: 201 | self.pushkins[app_id] = await self._make_pushkin(app_id, app_cfg) 202 | except Exception: 203 | logger.error( 204 | "Failed to load and create pushkin for kind '%s'" % app_cfg["type"] 205 | ) 206 | raise 207 | 208 | if len(self.pushkins) == 0: 209 | raise RuntimeError( 210 | "No app IDs are configured. Edit sygnal.yaml to define some." 211 | ) 212 | 213 | logger.info("Configured with app IDs: %r", self.pushkins.keys()) 214 | 215 | pushgateway_api = PushGatewayApiServer(self) 216 | port = int(self.config["http"]["port"]) 217 | for interface in self.config["http"]["bind_addresses"]: 218 | logger.info("Starting listening on %s port %d", interface, port) 219 | self.reactor.listenTCP(port, pushgateway_api.site, 50, interface=interface) 220 | 221 | def run(self) -> None: 222 | """ 223 | Attempt to run Sygnal and then exit the application. 224 | """ 225 | 226 | async def start(): 227 | # NOTE: This sleep may seem odd to you, but it is in fact necessary. 228 | # Without this sleep, the code following it will run before Twisted has had 229 | # a chance to fully setup the asyncio event loop. 230 | # Specifically, `callWhenRunning` runs the functions 231 | # before the asyncio event loop has started running. 232 | # ie. asyncio.get_running_loop() will throw because of no running loop. 233 | # Calling twisted_sleep is enough to kickstart Twisted into setting up the 234 | # asyncio event loop for future usage. 235 | await twisted_sleep(0, self.reactor) 236 | try: 237 | await self.make_pushkins_then_start() 238 | except Exception: 239 | # Print the exception and bail out. 240 | print("Error during startup:", file=sys.stderr) 241 | 242 | # this gives better tracebacks than traceback.print_exc() 243 | Failure().printTraceback(file=sys.stderr) 244 | 245 | if self.reactor.running: 246 | self.reactor.stop() 247 | 248 | self.reactor.callWhenRunning(lambda: ensureDeferred(start())) 249 | self.reactor.run() 250 | 251 | 252 | def parse_config() -> Dict[str, Any]: 253 | """ 254 | Find and load Sygnal's configuration file. 255 | Returns: 256 | A loaded configuration. 257 | """ 258 | config_path = os.getenv("SYGNAL_CONF", "sygnal.yaml") 259 | print("Using configuration file: %s" % config_path, file=sys.stderr) 260 | try: 261 | with open(config_path) as file_handle: 262 | return yaml.safe_load(file_handle) 263 | except FileNotFoundError: 264 | logger.critical( 265 | "Could not find configuration file!\n" "Path: %s\n" "Absolute Path: %s", 266 | config_path, 267 | os.path.realpath(config_path), 268 | ) 269 | raise 270 | 271 | 272 | def check_config(config: Dict[str, Any]) -> None: 273 | """ 274 | Lightly check the configuration and issue warnings as appropriate. 275 | Args: 276 | config: The loaded configuration. 277 | """ 278 | UNDERSTOOD_CONFIG_FIELDS = CONFIG_DEFAULTS.keys() 279 | 280 | def check_section( 281 | section_name: str, known_keys: Set[str], cfgpart: Dict[str, Any] = config 282 | ) -> None: 283 | nonunderstood = set(cfgpart[section_name].keys()).difference(known_keys) 284 | if len(nonunderstood) > 0: 285 | logger.warning( 286 | f"The following configuration fields in '{section_name}' " 287 | f"are not understood: %s", 288 | nonunderstood, 289 | ) 290 | 291 | nonunderstood = set(config.keys()).difference(UNDERSTOOD_CONFIG_FIELDS) 292 | if len(nonunderstood) > 0: 293 | logger.warning( 294 | "The following configuration sections are not understood: %s", nonunderstood 295 | ) 296 | 297 | check_section("http", {"port", "bind_addresses"}) 298 | check_section("log", {"setup", "access"}) 299 | check_section( 300 | "access", {"file", "enabled", "x_forwarded_for"}, cfgpart=config["log"] 301 | ) 302 | check_section("metrics", {"opentracing", "sentry", "prometheus"}) 303 | check_section( 304 | "opentracing", 305 | {"enabled", "implementation", "jaeger", "service_name"}, 306 | cfgpart=config["metrics"], 307 | ) 308 | check_section( 309 | "prometheus", {"enabled", "address", "port"}, cfgpart=config["metrics"] 310 | ) 311 | check_section("sentry", {"enabled", "dsn"}, cfgpart=config["metrics"]) 312 | 313 | 314 | def merge_left_with_defaults( 315 | defaults: Dict[str, Any], loaded_config: Dict[str, Any] 316 | ) -> Dict[str, Any]: 317 | """ 318 | Merge two configurations, with one of them overriding the other. 319 | Args: 320 | defaults: A configuration of defaults 321 | loaded_config: A configuration, as loaded from disk. 322 | 323 | Returns: 324 | A merged configuration, with loaded_config preferred over defaults. 325 | """ 326 | result = defaults.copy() 327 | 328 | if loaded_config is None: 329 | return result 330 | 331 | # copy defaults or override them 332 | for k, v in result.items(): 333 | if isinstance(v, dict): 334 | if k in loaded_config: 335 | result[k] = merge_left_with_defaults(v, loaded_config[k]) 336 | else: 337 | result[k] = copy.deepcopy(v) 338 | elif k in loaded_config: 339 | result[k] = loaded_config[k] 340 | 341 | # copy things with no defaults 342 | for k, v in loaded_config.items(): 343 | if k not in result: 344 | result[k] = v 345 | 346 | return result 347 | 348 | 349 | def main() -> None: 350 | # TODO we don't want to have to install the reactor, when we can get away with 351 | # it 352 | asyncioreactor.install() 353 | 354 | # we remove the global reactor to make it evident when it has accidentally 355 | # been used: 356 | # ! twisted.internet.reactor = None 357 | # TODO can't do this ^ yet, since twisted.internet.task.{coiterate,cooperate} 358 | # (indirectly) depend on the globally-installed reactor and there's no way 359 | # to pass in a custom one. 360 | # and twisted.web.client uses twisted.internet.task.cooperate 361 | 362 | config = parse_config() 363 | config = merge_left_with_defaults(CONFIG_DEFAULTS, config) 364 | check_config(config) 365 | custom_reactor = cast(SygnalReactor, asyncioreactor.AsyncioSelectorReactor()) 366 | sygnal = Sygnal(config, custom_reactor) 367 | sygnal.run() 368 | 369 | 370 | if __name__ == "__main__": 371 | main() 372 | -------------------------------------------------------------------------------- /sygnal/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2019 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import json 16 | from logging import LoggerAdapter 17 | from typing import TYPE_CHECKING, Any, MutableMapping, Tuple 18 | 19 | from twisted.internet.defer import Deferred 20 | 21 | if TYPE_CHECKING: 22 | from sygnal.sygnal import SygnalReactor 23 | 24 | 25 | async def twisted_sleep(delay: float, twisted_reactor: "SygnalReactor") -> None: 26 | """ 27 | Creates a Deferred which will fire in a set time. 28 | This allows you to `await` on it and have an async analogue to 29 | L{time.sleep}. 30 | Args: 31 | delay: Delay in seconds 32 | twisted_reactor: Reactor to use for sleeping. 33 | 34 | Returns: 35 | a Deferred which fires in `delay` seconds. 36 | """ 37 | deferred: Deferred[None] = Deferred() 38 | twisted_reactor.callLater(delay, deferred.callback, None) 39 | await deferred 40 | 41 | 42 | class NotificationLoggerAdapter(LoggerAdapter): 43 | def process( 44 | self, msg: str, kwargs: MutableMapping[str, Any] 45 | ) -> Tuple[str, MutableMapping[str, Any]]: 46 | assert self.extra 47 | return f"[{self.extra['request_id']}] {msg}", kwargs 48 | 49 | 50 | def _reject_invalid_json(val: Any) -> None: 51 | """Do not allow Infinity, -Infinity, or NaN values in JSON.""" 52 | raise ValueError(f"Invalid JSON value: {val!r}") 53 | 54 | 55 | # a custom JSON decoder which will reject Python extensions to JSON. 56 | json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json) 57 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/sygnal/bf1a053fb553347751b845c6d24c4715577aa09b/tests/__init__.py -------------------------------------------------------------------------------- /tests/asyncio_test_helpers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import types 3 | from asyncio import AbstractEventLoop, transports 4 | from asyncio.protocols import BaseProtocol, BufferedProtocol, Protocol 5 | from asyncio.transports import Transport 6 | from contextvars import Context 7 | from typing import Any, Callable, List, Optional, Tuple 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class TimelessEventLoopWrapper: 13 | @property # type: ignore 14 | def __class__(self): 15 | """ 16 | Fakes isinstance(this, AbstractEventLoop) so we can set_event_loop 17 | without fail. 18 | """ 19 | return self._wrapped_loop.__class__ 20 | 21 | def __init__(self, wrapped_loop: AbstractEventLoop): 22 | self._wrapped_loop = wrapped_loop 23 | self._time = 0.0 24 | self._to_be_called: List[Tuple[float, Any, Any, Any]] = [] 25 | 26 | def advance(self, time_delta: float): 27 | target_time = self._time + time_delta 28 | logger.debug( 29 | "advancing from %f by %f (%d in queue)", 30 | self._time, 31 | time_delta, 32 | len(self._to_be_called), 33 | ) 34 | while self._time < target_time and self._to_be_called: 35 | # pop off the next callback from the queue 36 | next_time, next_callback, args, _context = self._to_be_called[0] 37 | if next_time > target_time: 38 | # this isn't allowed to run yet 39 | break 40 | logger.debug("callback at %f on %r", next_time, next_callback) 41 | self._to_be_called = self._to_be_called[1:] 42 | self._time = next_time 43 | next_callback(*args) 44 | 45 | # no more tasks can run now but advance to the time anyway 46 | self._time = target_time 47 | 48 | def __getattr__(self, item: str): 49 | """ 50 | We use this to delegate other method calls to the real EventLoop. 51 | """ 52 | value = getattr(self._wrapped_loop, item) 53 | if isinstance(value, types.MethodType): 54 | # rebind this method to be called on us 55 | # this makes the wrapped class use our overridden methods when 56 | # available. 57 | # we have to do this because methods are bound to the underlying 58 | # event loop, which will call `self.call_later` or something 59 | # which won't normally hit us because we are not an actual subtype. 60 | return types.MethodType(value.__func__, self) 61 | else: 62 | return value 63 | 64 | def call_later( 65 | self, 66 | delay: float, 67 | callback: Callable, 68 | *args: Any, 69 | context: Optional[Context] = None, 70 | ): 71 | self.call_at(self._time + delay, callback, *args, context=context) 72 | 73 | # We're meant to return a canceller, but can cheat and return a no-op one 74 | # instead. 75 | class _Canceller: 76 | def cancel(self): 77 | pass 78 | 79 | return _Canceller() 80 | 81 | def call_at( 82 | self, 83 | when: float, 84 | callback: Callable, 85 | *args: Any, 86 | context: Optional[Context] = None, 87 | ): 88 | logger.debug(f"Calling {callback} at %f...", when) 89 | self._to_be_called.append((when, callback, args, context)) 90 | 91 | # re-sort list in ascending time order 92 | self._to_be_called.sort(key=lambda x: x[0]) 93 | 94 | def call_soon( 95 | self, callback: Callable, *args: Any, context: Optional[Context] = None 96 | ): 97 | return self.call_later(0, callback, *args, context=context) 98 | 99 | def time(self) -> float: 100 | return self._time 101 | 102 | 103 | class MockTransport(Transport): 104 | """ 105 | A transport intended to be driven by tests. 106 | Stores received data into a buffer. 107 | """ 108 | 109 | def __init__(self): 110 | # Holds bytes received 111 | self.buffer = b"" 112 | 113 | # Whether we reached the end of file/stream 114 | self.eofed = False 115 | 116 | # Whether the connection was aborted 117 | self.aborted = False 118 | 119 | # The protocol attached to this transport 120 | self.protocol = None 121 | 122 | # Whether this transport was closed 123 | self.closed = False 124 | 125 | # We need to explicitly mark that this connection allows start tls, 126 | # otherwise `loop.start_tls` will raise an exception. 127 | self._start_tls_compatible = True 128 | 129 | def reset_mock(self) -> None: 130 | self.buffer = b"" 131 | self.eofed = False 132 | self.aborted = False 133 | self.closed = False 134 | 135 | def is_reading(self) -> bool: 136 | return True 137 | 138 | def pause_reading(self) -> None: 139 | pass # NOP 140 | 141 | def resume_reading(self) -> None: 142 | pass # NOP 143 | 144 | def set_write_buffer_limits( 145 | self, high: Optional[int] = None, low: Optional[int] = None 146 | ) -> None: 147 | pass # NOP 148 | 149 | def get_write_buffer_size(self) -> int: 150 | """Return the current size of the write buffer.""" 151 | raise NotImplementedError 152 | 153 | def write(self, data: bytes) -> None: 154 | self.buffer += data 155 | 156 | def write_eof(self) -> None: 157 | self.eofed = True 158 | 159 | def can_write_eof(self) -> bool: 160 | return True 161 | 162 | def abort(self) -> None: 163 | self.aborted = True 164 | 165 | def pretend_to_receive(self, data: bytes) -> None: 166 | proto = self.get_protocol() 167 | if isinstance(proto, Protocol): 168 | proto.data_received(data) 169 | elif isinstance(proto, BufferedProtocol): 170 | data_len = len(data) 171 | b = proto.get_buffer(data_len) 172 | b[0:data_len] = data # type: ignore[index] 173 | proto.buffer_updated(data_len) 174 | 175 | def set_protocol(self, protocol: BaseProtocol) -> None: 176 | self.protocol = protocol 177 | 178 | def get_protocol(self) -> BaseProtocol: 179 | assert isinstance(self.protocol, BaseProtocol) 180 | return self.protocol 181 | 182 | def close(self) -> None: 183 | self.closed = True 184 | 185 | 186 | class MockProtocol(Protocol): 187 | """ 188 | A protocol intended to be driven by tests. 189 | Stores received data into a buffer. 190 | """ 191 | 192 | def __init__(self): 193 | self._to_transmit = b"" 194 | self.received_bytes = b"" 195 | self.transport = None 196 | 197 | def data_received(self, data: bytes) -> None: 198 | self.received_bytes += data 199 | 200 | def connection_made(self, transport: transports.BaseTransport) -> None: 201 | assert isinstance(transport, Transport) 202 | self.transport = transport 203 | if self._to_transmit: 204 | transport.write(self._to_transmit) 205 | 206 | def write(self, data: bytes) -> None: 207 | if self.transport: 208 | self.transport.write(data) 209 | else: 210 | self._to_transmit += data 211 | 212 | 213 | class EchoProtocol(Protocol): 214 | """A protocol that immediately echoes all data it receives""" 215 | 216 | def __init__(self): 217 | self._to_transmit = b"" 218 | self.received_bytes = b"" 219 | self.transport = None 220 | 221 | def data_received(self, data: bytes) -> None: 222 | self.received_bytes += data 223 | assert self.transport 224 | self.transport.write(data) 225 | 226 | def connection_made(self, transport: transports.BaseTransport) -> None: 227 | assert isinstance(transport, Transport) 228 | self.transport = transport 229 | if self._to_transmit: 230 | transport.write(self._to_transmit) 231 | 232 | def write(self, data: bytes) -> None: 233 | if self.transport: 234 | self.transport.write(data) 235 | else: 236 | self._to_transmit += data 237 | -------------------------------------------------------------------------------- /tests/test_apnstruncate.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2015 OpenMarket Ltd 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # Copied and adapted from 17 | # https://raw.githubusercontent.com/matrix-org/pushbaby/master/tests/test_truncate.py 18 | 19 | 20 | import string 21 | import unittest 22 | from typing import Any, Dict 23 | 24 | from sygnal.apnstruncate import json_encode, truncate 25 | 26 | 27 | def simplestring(length: int, offset: int = 0) -> str: 28 | """ 29 | Deterministically generates a string. 30 | Args: 31 | length: Length of the string 32 | offset: Offset of the string 33 | 34 | Returns: 35 | A string formed of lowercase ASCII characters. 36 | """ 37 | return "".join( 38 | [ 39 | string.ascii_lowercase[(i + offset) % len(string.ascii_lowercase)] 40 | for i in range(length) 41 | ] 42 | ) 43 | 44 | 45 | def sillystring(length: int, offset: int = 0) -> str: 46 | """ 47 | Deterministically generates a string 48 | Args: 49 | length: Length of the string 50 | offset: Offset of the string 51 | 52 | Returns: 53 | A string formed of weird and wonderful UTF-8 emoji characters. 54 | """ 55 | chars = ["\U0001F430", "\U0001F431", "\U0001F432", "\U0001F433"] 56 | return "".join([chars[(i + offset) % len(chars)] for i in range(length)]) 57 | 58 | 59 | def payload_for_aps(aps: Dict[str, Any]) -> Dict[str, Any]: 60 | """ 61 | Returns the APNS payload for an 'aps' dictionary. 62 | """ 63 | return {"aps": aps} 64 | 65 | 66 | class TruncateTestCase(unittest.TestCase): 67 | def test_dont_truncate(self) -> None: 68 | """ 69 | Tests that truncation is not performed if unnecessary. 70 | """ 71 | # This shouldn't need to be truncated 72 | txt = simplestring(20) 73 | aps = {"alert": txt} 74 | self.assertEqual(txt, truncate(payload_for_aps(aps), 256)["aps"]["alert"]) 75 | 76 | def test_truncate_alert(self) -> None: 77 | """ 78 | Tests that the 'alert' string field will be truncated when needed. 79 | """ 80 | overhead = len(json_encode(payload_for_aps({"alert": ""}))) 81 | txt = simplestring(10) 82 | aps = {"alert": txt} 83 | self.assertEqual( 84 | txt[:5], truncate(payload_for_aps(aps), overhead + 5)["aps"]["alert"] 85 | ) 86 | 87 | def test_truncate_alert_body(self) -> None: 88 | """ 89 | Tests that the 'alert' 'body' field will be truncated when needed. 90 | """ 91 | overhead = len(json_encode(payload_for_aps({"alert": {"body": ""}}))) 92 | txt = simplestring(10) 93 | aps = {"alert": {"body": txt}} 94 | self.assertEqual( 95 | txt[:5], 96 | truncate(payload_for_aps(aps), overhead + 5)["aps"]["alert"]["body"], 97 | ) 98 | 99 | def test_truncate_loc_arg(self) -> None: 100 | """ 101 | Tests that the 'alert' 'loc-args' field will be truncated when needed. 102 | (Tests with one loc arg) 103 | """ 104 | overhead = len(json_encode(payload_for_aps({"alert": {"loc-args": [""]}}))) 105 | txt = simplestring(10) 106 | aps = {"alert": {"loc-args": [txt]}} 107 | self.assertEqual( 108 | txt[:5], 109 | truncate(payload_for_aps(aps), overhead + 5)["aps"]["alert"]["loc-args"][0], 110 | ) 111 | 112 | def test_truncate_loc_args(self) -> None: 113 | """ 114 | Tests that the 'alert' 'loc-args' field will be truncated when needed. 115 | (Tests with two loc args) 116 | """ 117 | overhead = len(json_encode(payload_for_aps({"alert": {"loc-args": ["", ""]}}))) 118 | txt = simplestring(10) 119 | txt2 = simplestring(10, 3) 120 | aps = {"alert": {"loc-args": [txt, txt2]}} 121 | self.assertEqual( 122 | txt[:5], 123 | truncate(payload_for_aps(aps), overhead + 10)["aps"]["alert"]["loc-args"][ 124 | 0 125 | ], 126 | ) 127 | self.assertEqual( 128 | txt2[:5], 129 | truncate(payload_for_aps(aps), overhead + 10)["aps"]["alert"]["loc-args"][ 130 | 1 131 | ], 132 | ) 133 | 134 | def test_python_unicode_support(self) -> None: 135 | """ 136 | Tests Python's unicode support :- 137 | a one character unicode string should have a length of one, even if it's one 138 | multibyte character. 139 | OS X, for example, is broken, and counts the number of surrogate pairs. 140 | I have no great desire to manually parse UTF-8 to work around this since 141 | it works fine on Linux. 142 | """ 143 | if len("\U0001F430") != 1: 144 | msg = ( 145 | "Unicode support is broken in your Python binary. " 146 | + "Truncating messages with multibyte unicode characters will fail." 147 | ) 148 | self.fail(msg) 149 | 150 | def test_truncate_string_with_multibyte(self) -> None: 151 | """ 152 | Tests that truncation works as expected on strings containing one 153 | multibyte character. 154 | """ 155 | overhead = len(json_encode(payload_for_aps({"alert": ""}))) 156 | txt = "\U0001F430" + simplestring(30) 157 | aps = {"alert": txt} 158 | # NB. The number of characters of the string we get is dependent 159 | # on the json encoding used. 160 | self.assertEqual( 161 | txt[:17], truncate(payload_for_aps(aps), overhead + 20)["aps"]["alert"] 162 | ) 163 | 164 | def test_truncate_multibyte(self) -> None: 165 | """ 166 | Tests that truncation works as expected on strings containing only 167 | multibyte characters. 168 | """ 169 | overhead = len(json_encode(payload_for_aps({"alert": ""}))) 170 | txt = sillystring(30) 171 | aps = {"alert": txt} 172 | trunc = truncate(payload_for_aps(aps), overhead + 30) 173 | # The string is all 4 byte characters so the trunctaed UTF-8 string 174 | # should be a multiple of 4 bytes long 175 | self.assertEqual(len(trunc["aps"]["alert"].encode()) % 4, 0) 176 | # NB. The number of characters of the string we get is dependent 177 | # on the json encoding used. 178 | self.assertEqual(txt[:7], trunc["aps"]["alert"]) 179 | -------------------------------------------------------------------------------- /tests/test_concurrency_limit.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019–2020 The Matrix.org Foundation C.I.C. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from typing import Any, Dict, List 16 | 17 | from sygnal.notifications import ( 18 | ConcurrencyLimitedPushkin, 19 | Device, 20 | Notification, 21 | NotificationContext, 22 | ) 23 | from sygnal.utils import twisted_sleep 24 | 25 | from tests.testutils import TestCase 26 | 27 | DEVICE_GCM1_EXAMPLE = { 28 | "app_id": "com.example.gcm", 29 | "pushkey": "spqrg", 30 | "pushkey_ts": 42, 31 | } 32 | DEVICE_GCM2_EXAMPLE = { 33 | "app_id": "com.example.gcm", 34 | "pushkey": "spqrh", 35 | "pushkey_ts": 42, 36 | } 37 | DEVICE_APNS_EXAMPLE = { 38 | "app_id": "com.example.apns", 39 | "pushkey": "spqra", 40 | "pushkey_ts": 42, 41 | } 42 | 43 | 44 | class SlowConcurrencyLimitedDummyPushkin(ConcurrencyLimitedPushkin): 45 | async def _dispatch_notification_unlimited( 46 | self, n: Notification, device: Device, context: NotificationContext 47 | ) -> List[str]: 48 | """ 49 | We will deliver the notification to the mighty nobody 50 | and we will take one second to do it, because we are slow! 51 | """ 52 | await twisted_sleep(1.0, self.sygnal.reactor) 53 | return [] 54 | 55 | 56 | class ConcurrencyLimitTestCase(TestCase): 57 | def config_setup(self, config: Dict[str, Any]) -> None: 58 | super().config_setup(config) 59 | config["apps"]["com.example.gcm"] = { 60 | "type": "tests.test_concurrency_limit.SlowConcurrencyLimitedDummyPushkin", 61 | "inflight_request_limit": 1, 62 | } 63 | config["apps"]["com.example.apns"] = { 64 | "type": "tests.test_concurrency_limit.SlowConcurrencyLimitedDummyPushkin", 65 | "inflight_request_limit": 1, 66 | } 67 | 68 | def test_passes_under_limit_one(self) -> None: 69 | """ 70 | Tests that a push notification succeeds if it is under the limit. 71 | """ 72 | resp = self._request(self._make_dummy_notification([DEVICE_GCM1_EXAMPLE])) 73 | 74 | self.assertEqual(resp, {"rejected": []}) 75 | 76 | def test_passes_under_limit_multiple_no_interfere(self) -> None: 77 | """ 78 | Tests that 2 push notifications succeed if they are to different 79 | pushkins (so do not hit a per-pushkin limit). 80 | """ 81 | resp = self._request( 82 | self._make_dummy_notification([DEVICE_GCM1_EXAMPLE, DEVICE_APNS_EXAMPLE]) 83 | ) 84 | 85 | self.assertEqual(resp, {"rejected": []}) 86 | 87 | def test_fails_when_limit_hit(self) -> None: 88 | """ 89 | Tests that 1 of 2 push notifications fail if they are to the same pushkins 90 | (so do hit the per-pushkin limit of 1). 91 | """ 92 | resp = self._multi_requests( 93 | [ 94 | self._make_dummy_notification([DEVICE_GCM1_EXAMPLE]), 95 | self._make_dummy_notification([DEVICE_GCM2_EXAMPLE]), 96 | ] 97 | ) 98 | 99 | # request 0 will succeed 100 | self.assertEqual(resp[0], {"rejected": []}) 101 | 102 | # request 1 will fail because request 0 has filled the limit 103 | self.assertEqual(resp[1], 502) 104 | -------------------------------------------------------------------------------- /tests/test_http.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation C.I.C. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | from typing import Any, Dict 15 | from unittest.mock import MagicMock, patch 16 | 17 | from aioapns.common import NotificationResult 18 | 19 | from sygnal.apnspushkin import ApnsPushkin 20 | 21 | from tests import testutils 22 | 23 | PUSHKIN_ID_1 = "com.example.apns" 24 | PUSHKIN_ID_2 = "*.example.*" 25 | PUSHKIN_ID_3 = "com.example.a*" 26 | 27 | TEST_CERTFILE_PATH = "/path/to/my/certfile.pem" 28 | 29 | # Specific app id 30 | DEVICE_EXAMPLE_SPECIFIC = { 31 | "app_id": "com.example.apns", 32 | "pushkey": "spqr", 33 | "pushkey_ts": 42, 34 | } 35 | 36 | # Only one time matching app id (with PUSHKIN_ID_2) 37 | DEVICE_EXAMPLE_MATCHING = { 38 | "app_id": "com.example.bpns", 39 | "pushkey": "spqr", 40 | "pushkey_ts": 42, 41 | } 42 | 43 | # More than one times matching app id (with PUSHKIN_ID_2 and PUSHKIN_ID_3) 44 | DEVICE_EXAMPLE_AMBIGIOUS = { 45 | "app_id": "com.example.apns2", 46 | "pushkey": "spqr", 47 | "pushkey_ts": 42, 48 | } 49 | 50 | 51 | class HttpTestCase(testutils.TestCase): 52 | def setUp(self) -> None: 53 | self.apns_mock_class = patch("sygnal.apnspushkin.APNs").start() 54 | self.apns_mock = MagicMock() 55 | self.apns_mock_class.return_value = self.apns_mock 56 | 57 | # pretend our certificate exists 58 | patch("os.path.exists", lambda x: x == TEST_CERTFILE_PATH).start() 59 | # Since no certificate exists, don't try to read it. 60 | patch("sygnal.apnspushkin.ApnsPushkin._report_certificate_expiration").start() 61 | self.addCleanup(patch.stopall) 62 | 63 | super().setUp() 64 | 65 | self.apns_pushkin_snotif = MagicMock() 66 | for key, value in self.sygnal.pushkins.items(): 67 | assert isinstance(value, ApnsPushkin) 68 | # type safety: ignore is used here due to mypy not handling monkeypatching, 69 | # see https://github.com/python/mypy/issues/2427 70 | value._send_notification = self.apns_pushkin_snotif # type: ignore[assignment] # noqa: E501 71 | 72 | def config_setup(self, config: Dict[str, Any]) -> None: 73 | super().config_setup(config) 74 | config["apps"][PUSHKIN_ID_1] = {"type": "apns", "certfile": TEST_CERTFILE_PATH} 75 | config["apps"][PUSHKIN_ID_2] = {"type": "apns", "certfile": TEST_CERTFILE_PATH} 76 | config["apps"][PUSHKIN_ID_3] = {"type": "apns", "certfile": TEST_CERTFILE_PATH} 77 | 78 | def test_with_specific_appid(self) -> None: 79 | """ 80 | Tests the expected case: A specific app id must be processed. 81 | """ 82 | # Arrange 83 | method = self.apns_pushkin_snotif 84 | method.side_effect = testutils.make_async_magic_mock( 85 | NotificationResult("notID", "200") 86 | ) 87 | 88 | # Act 89 | resp = self._request(self._make_dummy_notification([DEVICE_EXAMPLE_SPECIFIC])) 90 | 91 | # Assert 92 | # method should be called one time 93 | self.assertEqual(1, method.call_count) 94 | 95 | self.assertEqual({"rejected": []}, resp) 96 | 97 | def test_with_matching_appid(self) -> None: 98 | """ 99 | Tests the matching case: A matching app id (only one time) must be processed. 100 | """ 101 | # Arrange 102 | method = self.apns_pushkin_snotif 103 | method.side_effect = testutils.make_async_magic_mock( 104 | NotificationResult("notID", "200") 105 | ) 106 | 107 | # Act 108 | resp = self._request(self._make_dummy_notification([DEVICE_EXAMPLE_MATCHING])) 109 | 110 | # Assert 111 | # method should be called one time 112 | self.assertEqual(1, method.call_count) 113 | 114 | self.assertEqual({"rejected": []}, resp) 115 | 116 | def test_with_ambigious_appid(self) -> None: 117 | """ 118 | Tests the rejection case: An ambigious app id should be rejected without 119 | processing. 120 | """ 121 | # Arrange 122 | method = self.apns_pushkin_snotif 123 | 124 | # Act 125 | resp = self._request(self._make_dummy_notification([DEVICE_EXAMPLE_AMBIGIOUS])) 126 | 127 | # Assert 128 | # must be rejected without calling the method 129 | self.assertEqual(0, method.call_count) 130 | self.assertEqual({"rejected": ["spqr"]}, resp) 131 | -------------------------------------------------------------------------------- /tests/test_httpproxy_asyncio.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2020 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import asyncio 16 | import ssl 17 | from asyncio import AbstractEventLoop, BaseTransport, Protocol, Task 18 | from typing import Optional, Tuple, cast 19 | 20 | from sygnal.exceptions import ProxyConnectError 21 | from sygnal.helper.proxy.proxy_asyncio import HttpConnectProtocol 22 | 23 | from tests import testutils 24 | from tests.asyncio_test_helpers import ( 25 | EchoProtocol, 26 | MockProtocol, 27 | MockTransport, 28 | TimelessEventLoopWrapper, 29 | ) 30 | from tests.twisted_test_helpers import ( 31 | create_test_cert_file, 32 | get_test_ca_cert_file, 33 | get_test_key_file, 34 | ) 35 | 36 | 37 | class AsyncioHttpProxyTest(testutils.TestCase): 38 | def config_setup(self, config): 39 | super().config_setup(config) 40 | config["apps"]["com.example.spqr"] = { 41 | "type": "tests.test_pushgateway_api_v1.TestPushkin" 42 | } 43 | base_loop = asyncio.new_event_loop() 44 | augmented_loop = TimelessEventLoopWrapper(base_loop) # type: ignore 45 | asyncio.set_event_loop(cast(AbstractEventLoop, augmented_loop)) 46 | 47 | self.loop = augmented_loop 48 | 49 | def make_fake_proxy( 50 | self, host: str, port: int, proxy_credentials: Optional[Tuple[str, str]] 51 | ) -> Tuple[MockProtocol, MockTransport, "Task[Tuple[BaseTransport, Protocol]]"]: 52 | # Task[Tuple[MockTransport, MockProtocol]] 53 | # make a fake proxy 54 | fake_proxy = MockTransport() 55 | # make a fake protocol that we fancy using through the proxy 56 | fake_protocol = MockProtocol() 57 | # create a HTTP CONNECT proxy client protocol 58 | http_connect_protocol = HttpConnectProtocol( 59 | target_hostport=(host, port), 60 | proxy_credentials=proxy_credentials, 61 | protocol_factory=lambda: fake_protocol, 62 | sslcontext=None, 63 | loop=None, 64 | ) 65 | switch_over_task = asyncio.get_event_loop().create_task( 66 | http_connect_protocol.switch_over_when_ready() 67 | ) 68 | # check the task is not somehow already marked as done before we even 69 | # receive anything. 70 | self.assertFalse(switch_over_task.done()) 71 | # connect the proxy client to the proxy 72 | fake_proxy.set_protocol(http_connect_protocol) 73 | http_connect_protocol.connection_made(fake_proxy) 74 | return fake_protocol, fake_proxy, switch_over_task 75 | 76 | def test_connect_no_credentials(self): 77 | """ 78 | Tests the proxy connection procedure when there is no basic auth. 79 | """ 80 | host = "example.org" 81 | port = 443 82 | proxy_credentials = None 83 | fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( 84 | host, port, proxy_credentials 85 | ) 86 | 87 | # Check that the proxy got the proper CONNECT request. 88 | self.assertEqual(fake_proxy.buffer, b"CONNECT example.org:443 HTTP/1.0\r\n\r\n") 89 | # Reset the proxy mock 90 | fake_proxy.reset_mock() 91 | 92 | # pretend we got a happy response with some dangling bytes from the 93 | # target protocol 94 | fake_proxy.pretend_to_receive( 95 | b"HTTP/1.0 200 Connection Established\r\n\r\n" 96 | b"begin beep boop\r\n\r\n~~ :) ~~" 97 | ) 98 | 99 | # advance event loop because we have to let coroutines be executed 100 | cast(TimelessEventLoopWrapper, self.loop).advance(1.0) 101 | 102 | # *now* we should have switched over from the HTTP CONNECT protocol 103 | # to the user protocol (in our case, a MockProtocol). 104 | self.assertTrue(switch_over_task.done()) 105 | 106 | transport, protocol = switch_over_task.result() 107 | 108 | # check it was our protocol that was returned 109 | self.assertIs(protocol, fake_protocol) 110 | 111 | # check our protocol received exactly the bytes meant for it 112 | self.assertEqual( 113 | fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" 114 | ) 115 | 116 | def test_connect_correct_credentials(self): 117 | """ 118 | Tests the proxy connection procedure when there is basic auth. 119 | """ 120 | host = "example.org" 121 | port = 443 122 | proxy_credentials = ("user", "secret") 123 | fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( 124 | host, port, proxy_credentials 125 | ) 126 | 127 | # Check that the proxy got the proper CONNECT request with the 128 | # correctly-encoded credentials 129 | self.assertEqual( 130 | fake_proxy.buffer, 131 | b"CONNECT example.org:443 HTTP/1.0\r\n" 132 | b"Proxy-Authorization: basic dXNlcjpzZWNyZXQ=\r\n\r\n", 133 | ) 134 | # Reset the proxy mock 135 | fake_proxy.reset_mock() 136 | 137 | # pretend we got a happy response with some dangling bytes from the 138 | # target protocol 139 | fake_proxy.pretend_to_receive( 140 | b"HTTP/1.0 200 Connection Established\r\n\r\n" 141 | b"begin beep boop\r\n\r\n~~ :) ~~" 142 | ) 143 | 144 | # advance event loop because we have to let coroutines be executed 145 | cast(TimelessEventLoopWrapper, self.loop).advance(1.0) 146 | 147 | # *now* we should have switched over from the HTTP CONNECT protocol 148 | # to the user protocol (in our case, a MockProtocol). 149 | self.assertTrue(switch_over_task.done()) 150 | 151 | transport, protocol = switch_over_task.result() 152 | 153 | # check it was our protocol that was returned 154 | self.assertIs(protocol, fake_protocol) 155 | 156 | # check our protocol received exactly the bytes meant for it 157 | self.assertEqual( 158 | fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" 159 | ) 160 | 161 | def test_connect_failure(self): 162 | """ 163 | Test that our task fails properly when we cannot make a connection through 164 | the proxy. 165 | """ 166 | host = "example.org" 167 | port = 443 168 | proxy_credentials = ("user", "secret") 169 | fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( 170 | host, port, proxy_credentials 171 | ) 172 | 173 | # Check that the proxy got the proper CONNECT request with the 174 | # correctly-encoded credentials. 175 | self.assertEqual( 176 | fake_proxy.buffer, 177 | b"CONNECT example.org:443 HTTP/1.0\r\n" 178 | b"Proxy-Authorization: basic dXNlcjpzZWNyZXQ=\r\n\r\n", 179 | ) 180 | # Reset the proxy mock 181 | fake_proxy.reset_mock() 182 | 183 | # For the sake of this test, pretend the credentials are incorrect so 184 | # send a sad response with a HTML error page 185 | fake_proxy.pretend_to_receive( 186 | b"HTTP/1.0 401 Unauthorised\r\n\r\n... some error here ..." 187 | ) 188 | 189 | # advance event loop because we have to let coroutines be executed 190 | cast(TimelessEventLoopWrapper, self.loop).advance(1.0) 191 | 192 | # *now* this future should have completed 193 | self.assertTrue(switch_over_task.done()) 194 | 195 | # but we should have failed 196 | self.assertIsInstance(switch_over_task.exception(), ProxyConnectError) 197 | 198 | # check our protocol did not receive anything, because it was an HTTP- 199 | # level error, not actually a connection to our target. 200 | self.assertEqual(fake_protocol.received_bytes, b"") 201 | 202 | 203 | class AsyncioHttpProxyTLSTest(testutils.TestCase): 204 | """Test that using a HTTPS proxy works. 205 | 206 | This is a bit convoluted to try and test that we don't hit a race where the 207 | new client protocol can receive data before `connection_made` is called, 208 | which can cause problems if it tries to write to the connection that it 209 | hasn't been given yet. 210 | """ 211 | 212 | def config_setup(self, config): 213 | super().config_setup(config) 214 | config["apps"]["com.example.spqr"] = { 215 | "type": "tests.test_pushgateway_api_v1.TestPushkin" 216 | } 217 | self.base_loop = asyncio.new_event_loop() 218 | augmented_loop = TimelessEventLoopWrapper(self.base_loop) # type: ignore 219 | asyncio.set_event_loop(cast(AbstractEventLoop, augmented_loop)) 220 | 221 | self.loop = augmented_loop 222 | 223 | self.proxy_context = ssl.create_default_context() 224 | self.proxy_context.load_verify_locations(get_test_ca_cert_file()) 225 | self.proxy_context.set_ciphers("DEFAULT") 226 | 227 | def make_fake_proxy( 228 | self, 229 | host: str, 230 | port: int, 231 | proxy_credentials: Optional[Tuple[str, str]], 232 | ) -> Tuple[EchoProtocol, MockTransport, "Task[Tuple[BaseTransport, Protocol]]"]: 233 | # Task[Tuple[MockTransport, MockProtocol]] 234 | 235 | # make a fake proxy 236 | fake_proxy = MockTransport() 237 | 238 | # We connect with an echo protocol to test that we can always write when 239 | # we receive data. 240 | fake_protocol = EchoProtocol() 241 | 242 | # create a HTTP CONNECT proxy client protocol 243 | http_connect_protocol = HttpConnectProtocol( 244 | target_hostport=(host, port), 245 | proxy_credentials=proxy_credentials, 246 | protocol_factory=lambda: fake_protocol, 247 | sslcontext=self.proxy_context, 248 | loop=None, 249 | ) 250 | switch_over_task = self.loop.create_task( 251 | http_connect_protocol.switch_over_when_ready() 252 | ) 253 | # check the task is not somehow already marked as done before we even 254 | # receive anything. 255 | self.assertFalse(switch_over_task.done()) 256 | # connect the proxy client to the proxy 257 | fake_proxy.set_protocol(http_connect_protocol) 258 | http_connect_protocol.connection_made(fake_proxy) 259 | return fake_protocol, fake_proxy, switch_over_task 260 | 261 | def test_connect_no_credentials(self): 262 | """ 263 | Tests the proxy connection procedure when there is no basic auth. 264 | """ 265 | host = "example.org" 266 | port = 443 267 | proxy_credentials = None 268 | fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( 269 | host, port, proxy_credentials 270 | ) 271 | 272 | # Check that the proxy got the proper CONNECT request. 273 | self.assertEqual(fake_proxy.buffer, b"CONNECT example.org:443 HTTP/1.0\r\n\r\n") 274 | # Reset the proxy mock 275 | fake_proxy.reset_mock() 276 | 277 | # pretend we got a happy response 278 | fake_proxy.pretend_to_receive(b"HTTP/1.0 200 Connection Established\r\n\r\n") 279 | 280 | # Since we're talking TLS we need to create a server TLS connection that 281 | # we can use to talk to each other. 282 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 283 | context.load_cert_chain( 284 | create_test_cert_file([b"DNS:example.org"]), keyfile=get_test_key_file() 285 | ) 286 | context.set_ciphers("DEFAULT") 287 | 288 | # Note that we have to use a different event loop wrapper here as we 289 | # want that server side setup to finish before the client side setup, so 290 | # that we can trigger any races. 291 | server_loop = TimelessEventLoopWrapper(self.base_loop) # type: ignore 292 | server_transport = MockTransport() 293 | proxy_ft = server_loop.create_task( 294 | server_loop.start_tls( 295 | server_transport, 296 | MockProtocol(), 297 | context, 298 | server_hostname=host, 299 | server_side=True, 300 | ) 301 | ) 302 | 303 | # Advance event loop because we have to let coroutines be executed 304 | cast(TimelessEventLoopWrapper, self.loop).advance(1.0) 305 | server_loop.advance(1.0) 306 | 307 | # We manually copy the bytes between the fake_proxy transport and our 308 | # created TLS transport. We do this for each step in the TLS handshake. 309 | 310 | # Client -> Server 311 | server_transport.pretend_to_receive(fake_proxy.buffer) 312 | fake_proxy.buffer = b"" 313 | 314 | # Server -> Client 315 | fake_proxy.pretend_to_receive(server_transport.buffer) 316 | server_transport.buffer = b"" 317 | 318 | # Client -> Server 319 | server_transport.pretend_to_receive(fake_proxy.buffer) 320 | fake_proxy.buffer = b"" 321 | 322 | # We *only* advance the server side loop so that we can send data before 323 | # the client has called `connection_made` on the new protocol. 324 | server_loop.advance(0.1) 325 | 326 | # Server -> Client application data. 327 | server_plain_transport = proxy_ft.result() 328 | server_plain_transport.write(b"begin beep boop\r\n\r\n~~ :) ~~") 329 | fake_proxy.pretend_to_receive(server_transport.buffer) 330 | server_transport.buffer = b"" 331 | 332 | cast(TimelessEventLoopWrapper, self.loop).advance(1.0) 333 | 334 | # *now* we should have switched over from the HTTP CONNECT protocol 335 | # to the user protocol (in our case, a MockProtocol). 336 | self.assertTrue(switch_over_task.done()) 337 | 338 | transport, protocol = switch_over_task.result() 339 | 340 | # check it was our protocol that was returned 341 | self.assertIs(protocol, fake_protocol) 342 | 343 | # check our protocol received exactly the bytes meant for it 344 | self.assertEqual( 345 | fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" 346 | ) 347 | -------------------------------------------------------------------------------- /tests/test_httpproxy_twisted.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2019-2020 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import logging 16 | 17 | import twisted 18 | from incremental import Version 19 | from twisted.internet import interfaces # noqa: F401 20 | from twisted.internet.protocol import Factory 21 | from twisted.protocols.tls import TLSMemoryBIOFactory 22 | from twisted.web.client import readBody 23 | from twisted.web.http import HTTPChannel 24 | 25 | from sygnal.helper.proxy.proxyagent_twisted import ProxyAgent 26 | 27 | from tests.testutils import TestCase 28 | from tests.twisted_test_helpers import ( 29 | FakeTransport, 30 | TestServerTLSConnectionFactory, 31 | get_test_https_policy, 32 | ) 33 | 34 | logger = logging.getLogger(__name__) 35 | 36 | HTTPFactory = Factory.forProtocol(HTTPChannel) 37 | 38 | 39 | class SygnalTwistedProxyTests(TestCase): 40 | def config_setup(self, config): 41 | super().config_setup(config) 42 | config["apps"]["com.example.gcm"] = { 43 | "type": "tests.test_gcm.TestGcmPushkin", 44 | "api_key": "kii", 45 | } 46 | 47 | def _make_connection( 48 | self, client_factory, server_factory, ssl=False, expected_sni=None 49 | ): 50 | """Builds a test server, and completes the outgoing client connection 51 | 52 | Args: 53 | client_factory (interfaces.IProtocolFactory): the the factory that the 54 | application is trying to use to make the outbound connection. We will 55 | invoke it to build the client Protocol 56 | 57 | server_factory (interfaces.IProtocolFactory): a factory to build the 58 | server-side protocol 59 | 60 | ssl (bool): If true, we will expect an ssl connection and wrap 61 | server_factory with a TLSMemoryBIOFactory 62 | 63 | expected_sni (bytes|None): the expected SNI value 64 | 65 | Returns: 66 | IProtocol: the server Protocol returned by server_factory 67 | """ 68 | if ssl: 69 | server_factory = _wrap_server_factory_for_tls(server_factory, self.reactor) 70 | 71 | server_protocol = server_factory.buildProtocol(None) 72 | 73 | # now, tell the client protocol factory to build the client protocol, 74 | # and wire the output of said protocol up to the server via 75 | # a FakeTransport. 76 | # 77 | # Normally this would be done by the TCP socket code in Twisted, but we are 78 | # stubbing that out here. 79 | client_protocol = client_factory.buildProtocol(None) 80 | client_protocol.makeConnection( 81 | FakeTransport(server_protocol, self.reactor, client_protocol) 82 | ) 83 | 84 | # tell the server protocol to send its stuff back to the client, too 85 | server_protocol.makeConnection( 86 | FakeTransport(client_protocol, self.reactor, server_protocol) 87 | ) 88 | 89 | if ssl: 90 | http_protocol = server_protocol.wrappedProtocol 91 | tls_connection = server_protocol._tlsConnection 92 | else: 93 | http_protocol = server_protocol 94 | tls_connection = None 95 | 96 | # give the reactor a pump to get the TLS juices flowing (if needed) 97 | self.reactor.advance(0) 98 | 99 | if expected_sni is not None: 100 | server_name = tls_connection.get_servername() 101 | self.assertEqual( 102 | server_name, 103 | expected_sni, 104 | "Expected SNI %s but got %s" % (expected_sni, server_name), 105 | ) 106 | 107 | return http_protocol 108 | 109 | def test_https_request_via_proxy(self): 110 | agent = ProxyAgent( 111 | self.reactor, 112 | contextFactory=get_test_https_policy(), 113 | proxy_url_str="http://proxy.com:1080", 114 | ) 115 | 116 | self.reactor.lookups["proxy.com"] = "1.2.3.5" 117 | d = agent.request(b"GET", b"https://test.com/abc") 118 | 119 | # there should be a pending TCP connection 120 | clients = self.reactor.tcpClients 121 | self.assertEqual(len(clients), 1) 122 | (host, port, client_factory, _timeout, _bindAddress) = clients[0] 123 | self.assertEqual(host, "1.2.3.5") 124 | self.assertEqual(port, 1080) 125 | 126 | # make a test HTTP server, and wire up the client 127 | proxy_server = self._make_connection( 128 | client_factory, _get_test_protocol_factory() 129 | ) 130 | 131 | # fish the transports back out so that we can do the old switcheroo 132 | s2c_transport = proxy_server.transport 133 | client_protocol = s2c_transport.other 134 | c2s_transport = client_protocol.transport 135 | 136 | # the FakeTransport is async, so we need to pump the reactor 137 | self.reactor.advance(0) 138 | 139 | # now there should be a pending CONNECT request 140 | self.assertEqual(len(proxy_server.requests), 1) 141 | 142 | request = proxy_server.requests[0] 143 | self.assertEqual(request.method, b"CONNECT") 144 | self.assertEqual(request.path, b"test.com:443") 145 | 146 | # tell the proxy server not to close the connection 147 | proxy_server.persistent = True 148 | 149 | # this just stops the http Request trying to do a chunked response 150 | # request.setHeader(b"Content-Length", b"0") 151 | request.finish() 152 | 153 | # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel 154 | ssl_factory = _wrap_server_factory_for_tls( 155 | _get_test_protocol_factory(), self.reactor 156 | ) 157 | ssl_protocol = ssl_factory.buildProtocol(None) 158 | http_server = ssl_protocol.wrappedProtocol 159 | 160 | ssl_protocol.makeConnection( 161 | FakeTransport(client_protocol, self.reactor, ssl_protocol) 162 | ) 163 | c2s_transport.other = ssl_protocol 164 | 165 | self.reactor.advance(0) 166 | 167 | server_name = ssl_protocol._tlsConnection.get_servername() 168 | expected_sni = b"test.com" 169 | self.assertEqual( 170 | server_name, 171 | expected_sni, 172 | "Expected SNI %r but got %r" % (expected_sni, server_name), 173 | ) 174 | 175 | # now there should be a pending request 176 | self.assertEqual(len(http_server.requests), 1) 177 | 178 | request = http_server.requests[0] 179 | self.assertEqual(request.method, b"GET") 180 | self.assertEqual(request.path, b"/abc") 181 | self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) 182 | request.write(b"result") 183 | 184 | self.reactor.advance(0) 185 | 186 | request.finish() 187 | 188 | self.reactor.advance(0) 189 | 190 | resp = self.successResultOf(d) 191 | body = self.successResultOf(readBody(resp)) 192 | self.assertEqual(body, b"result") 193 | 194 | 195 | def _wrap_server_factory_for_tls(factory, clock, sanlist=None): 196 | """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory 197 | 198 | The resultant factory will create a TLS server which presents a certificate 199 | signed by our test CA, valid for the domains in `sanlist` 200 | 201 | Args: 202 | factory (interfaces.IProtocolFactory): protocol factory to wrap 203 | sanlist (iterable[bytes]): list of domains the cert should be valid for 204 | 205 | Returns: 206 | interfaces.IProtocolFactory 207 | """ 208 | if sanlist is None: 209 | sanlist = [b"DNS:test.com"] 210 | 211 | connection_creator = TestServerTLSConnectionFactory(sanlist=sanlist) 212 | # Twisted > 23.8.0 has a different API that accepts a clock. 213 | if twisted.version <= Version("Twisted", 23, 8, 0): # type: ignore[attr-defined] 214 | return TLSMemoryBIOFactory( 215 | connection_creator, isClient=False, wrappedFactory=factory 216 | ) 217 | else: 218 | return TLSMemoryBIOFactory( 219 | connection_creator, isClient=False, wrappedFactory=factory, clock=clock 220 | ) 221 | 222 | 223 | def _get_test_protocol_factory(): 224 | """Get a protocol Factory which will build an HTTPChannel 225 | 226 | Returns: 227 | interfaces.IProtocolFactory 228 | """ 229 | server_factory = Factory.forProtocol(HTTPChannel) 230 | 231 | # Request.finish expects the factory to have a 'log' method. 232 | server_factory.log = _log_request 233 | 234 | return server_factory 235 | 236 | 237 | def _log_request(request): 238 | """Implements Factory.log, which is expected by Request.finish""" 239 | logger.info("Completed request %s", request) 240 | -------------------------------------------------------------------------------- /tests/test_proxy_url_parsing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2020 The Matrix.org Foundation C.I.C. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | import unittest 16 | 17 | from sygnal.helper.proxy import HttpProxyUrl, decompose_http_proxy_url 18 | 19 | 20 | class ProxyUrlTestCase(unittest.TestCase): 21 | def test_decompose_http_proxy_url(self) -> None: 22 | parts = decompose_http_proxy_url("http://example.org") 23 | self.assertEqual(parts, HttpProxyUrl("example.org", 80, None)) 24 | 25 | parts = decompose_http_proxy_url("http://example.org:8080") 26 | self.assertEqual(parts, HttpProxyUrl("example.org", 8080, None)) 27 | 28 | parts = decompose_http_proxy_url("http://bob:secretsquirrel@example.org") 29 | self.assertEqual( 30 | parts, HttpProxyUrl("example.org", 80, ("bob", "secretsquirrel")) 31 | ) 32 | 33 | parts = decompose_http_proxy_url("http://bob:secretsquirrel@example.org:8080") 34 | self.assertEqual( 35 | parts, HttpProxyUrl("example.org", 8080, ("bob", "secretsquirrel")) 36 | ) 37 | 38 | def test_decompose_username_only(self) -> None: 39 | """ 40 | We do not support usernames without passwords for now — this tests the 41 | current behaviour, though (it ignores the username). 42 | """ 43 | 44 | parts = decompose_http_proxy_url("http://bob@example.org:8080") 45 | self.assertEqual(parts, HttpProxyUrl("example.org", 8080, None)) 46 | 47 | def test_decompose_http_proxy_url_failure(self) -> None: 48 | # test that non-HTTP schemes raise an exception 49 | self.assertRaises( 50 | RuntimeError, lambda: decompose_http_proxy_url("ftp://example.org") 51 | ) 52 | 53 | # test that the lack of a hostname raises an exception 54 | self.assertRaises(RuntimeError, lambda: decompose_http_proxy_url("http://")) 55 | -------------------------------------------------------------------------------- /tests/test_pushgateway_api_v1.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 The Matrix.org Foundation C.I.C. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from typing import Any, Dict, List 16 | 17 | from twisted.internet.address import IPv6Address 18 | from twisted.internet.testing import StringTransport 19 | 20 | from sygnal.exceptions import ( 21 | NotificationDispatchException, 22 | TemporaryNotificationDispatchException, 23 | ) 24 | from sygnal.notifications import Device, Notification, NotificationContext, Pushkin 25 | 26 | from tests import testutils 27 | 28 | DEVICE_RAISE_EXCEPTION = { 29 | "app_id": "com.example.spqr", 30 | "pushkey": "raise_exception", 31 | "pushkey_ts": 1234, 32 | } 33 | 34 | DEVICE_REMOTE_ERROR = { 35 | "app_id": "com.example.spqr", 36 | "pushkey": "remote_error", 37 | "pushkey_ts": 1234, 38 | } 39 | 40 | DEVICE_TEMPORARY_REMOTE_ERROR = { 41 | "app_id": "com.example.spqr", 42 | "pushkey": "temporary_remote_error", 43 | "pushkey_ts": 1234, 44 | } 45 | 46 | DEVICE_REJECTED = { 47 | "app_id": "com.example.spqr", 48 | "pushkey": "reject", 49 | "pushkey_ts": 1234, 50 | } 51 | 52 | DEVICE_ACCEPTED = { 53 | "app_id": "com.example.spqr", 54 | "pushkey": "accept", 55 | "pushkey_ts": 1234, 56 | } 57 | 58 | 59 | class TestPushkin(Pushkin): 60 | """ 61 | A synthetic Pushkin with simple rules. 62 | """ 63 | 64 | async def dispatch_notification( 65 | self, n: Notification, device: Device, context: NotificationContext 66 | ) -> List[str]: 67 | if device.pushkey == "raise_exception": 68 | raise Exception("Bad things have occurred!") 69 | elif device.pushkey == "remote_error": 70 | raise NotificationDispatchException("Synthetic failure") 71 | elif device.pushkey == "temporary_remote_error": 72 | raise TemporaryNotificationDispatchException("Synthetic failure") 73 | elif device.pushkey == "reject": 74 | return [device.pushkey] 75 | elif device.pushkey == "accept": 76 | return [] 77 | raise Exception(f"Unexpected fall-through. {device.pushkey}") 78 | 79 | 80 | class PushGatewayApiV1TestCase(testutils.TestCase): 81 | def config_setup(self, config: Dict[str, Any]) -> None: 82 | """ 83 | Set up a TestPushkin for the test. 84 | """ 85 | super().config_setup(config) 86 | config["apps"]["com.example.spqr"] = { 87 | "type": "tests.test_pushgateway_api_v1.TestPushkin" 88 | } 89 | 90 | def test_good_requests_give_200(self) -> None: 91 | """ 92 | Test that good requests give a 200 response code. 93 | """ 94 | # 200 codes cause the result to be parsed instead of returning the code 95 | self.assertNot( 96 | isinstance( 97 | self._request( 98 | self._make_dummy_notification([DEVICE_ACCEPTED, DEVICE_REJECTED]) 99 | ), 100 | int, 101 | ) 102 | ) 103 | 104 | def test_accepted_devices_are_not_rejected(self) -> None: 105 | """ 106 | Test that devices which are accepted by the Pushkin 107 | do not lead to a rejection being returned to the homeserver. 108 | """ 109 | self.assertEqual( 110 | self._request(self._make_dummy_notification([DEVICE_ACCEPTED])), 111 | {"rejected": []}, 112 | ) 113 | 114 | def test_rejected_devices_are_rejected(self) -> None: 115 | """ 116 | Test that devices which are rejected by the Pushkin 117 | DO lead to a rejection being returned to the homeserver. 118 | """ 119 | self.assertEqual( 120 | self._request(self._make_dummy_notification([DEVICE_REJECTED])), 121 | {"rejected": [DEVICE_REJECTED["pushkey"]]}, 122 | ) 123 | 124 | def test_only_rejected_devices_are_rejected(self) -> None: 125 | """ 126 | Test that devices which are rejected by the Pushkin 127 | are the only ones to have a rejection returned to the homeserver, 128 | even if other devices feature in the request. 129 | """ 130 | self.assertEqual( 131 | self._request( 132 | self._make_dummy_notification([DEVICE_REJECTED, DEVICE_ACCEPTED]) 133 | ), 134 | {"rejected": [DEVICE_REJECTED["pushkey"]]}, 135 | ) 136 | 137 | def test_bad_requests_give_400(self) -> None: 138 | """ 139 | Test that bad requests lead to a 400 Bad Request response. 140 | """ 141 | self.assertEqual(self._request({}), 400) 142 | 143 | def test_exceptions_give_500(self) -> None: 144 | """ 145 | Test that internal exceptions/errors lead to a 500 Internal Server Error 146 | response. 147 | """ 148 | 149 | self.assertEqual( 150 | self._request(self._make_dummy_notification([DEVICE_RAISE_EXCEPTION])), 500 151 | ) 152 | 153 | # we also check that a successful device doesn't hide the exception 154 | self.assertEqual( 155 | self._request( 156 | self._make_dummy_notification([DEVICE_ACCEPTED, DEVICE_RAISE_EXCEPTION]) 157 | ), 158 | 500, 159 | ) 160 | 161 | self.assertEqual( 162 | self._request( 163 | self._make_dummy_notification([DEVICE_RAISE_EXCEPTION, DEVICE_ACCEPTED]) 164 | ), 165 | 500, 166 | ) 167 | 168 | def test_remote_errors_give_502(self) -> None: 169 | """ 170 | Test that errors caused by remote services such as GCM or APNS 171 | lead to a 502 Bad Gateway response. 172 | """ 173 | 174 | self.assertEqual( 175 | self._request(self._make_dummy_notification([DEVICE_REMOTE_ERROR])), 502 176 | ) 177 | 178 | # we also check that a successful device doesn't hide the exception 179 | self.assertEqual( 180 | self._request( 181 | self._make_dummy_notification([DEVICE_ACCEPTED, DEVICE_REMOTE_ERROR]) 182 | ), 183 | 502, 184 | ) 185 | 186 | self.assertEqual( 187 | self._request( 188 | self._make_dummy_notification([DEVICE_REMOTE_ERROR, DEVICE_ACCEPTED]) 189 | ), 190 | 502, 191 | ) 192 | 193 | def test_overlong_requests_are_rejected(self) -> None: 194 | # as a control case, first send a regular request. 195 | 196 | # connect the site to a fake transport. 197 | transport = StringTransport() 198 | protocol = self.site.buildProtocol(IPv6Address("TCP", "::1", 2345)) 199 | protocol.makeConnection(transport) 200 | 201 | protocol.dataReceived( 202 | b"POST / HTTP/1.1\r\n" 203 | b"Connection: close\r\n" 204 | b"Transfer-Encoding: chunked\r\n" 205 | b"\r\n" 206 | b"0\r\n" 207 | b"\r\n" 208 | ) 209 | 210 | # we should get a 404 211 | self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 404 ") 212 | 213 | # now send an oversized request 214 | transport = StringTransport() 215 | protocol = self.site.buildProtocol(IPv6Address("TCP", "::1", 2345)) 216 | protocol.makeConnection(transport) 217 | 218 | protocol.dataReceived( 219 | b"POST / HTTP/1.1\r\n" 220 | b"Connection: close\r\n" 221 | b"Transfer-Encoding: chunked\r\n" 222 | b"\r\n" 223 | ) 224 | 225 | # we deliberately send all the data in one big chunk, to ensure that 226 | # twisted isn't buffering the data in the chunked transfer decoder. 227 | # we start with the chunk size, in hex. (We won't actually send this much) 228 | protocol.dataReceived(b"10000000\r\n") 229 | sent = 0 230 | while not transport.disconnected: 231 | self.assertLess(sent, 0x10000000, "connection did not drop") 232 | protocol.dataReceived(b"\0" * 1024) 233 | sent += 1024 234 | 235 | # default max upload size is 512K, so it should drop on the next buffer after 236 | # that. 237 | self.assertEqual(sent, 513 * 1024) 238 | -------------------------------------------------------------------------------- /tests/tls/ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDCjCCAfKgAwIBAgIJAPwHIHgH/jtjMA0GCSqGSIb3DQEBCwUAMBoxGDAWBgNV 3 | BAMMD3N5bmFwc2UgdGVzdCBDQTAeFw0xOTA2MTAxMTI2NDdaFw0yOTA2MDcxMTI2 4 | NDdaMBoxGDAWBgNVBAMMD3N5bmFwc2UgdGVzdCBDQTCCASIwDQYJKoZIhvcNAQEB 5 | BQADggEPADCCAQoCggEBAOZOXCKuylf9jHzJXpU2nS+XEKrnGPgs2SAhQKrzBxg3 6 | /d8KT2Zsfsj1i3G7oGu7B0ZKO6qG5AxOPCmSMf9/aiSHFilfSh+r8rCpJyWMev2c 7 | /w/xmhoFHgn+H90NnqlXvWb5y1YZCE3gWaituQSaa93GPKacRqXCgIrzjPUuhfeT 8 | uwFQt4iyUhMNBYEy3aw4IuIHdyBqi4noUhR2ZeuflLJ6PswdJ8mEiAvxCbBGPerq 9 | idhWcZwlo0fKu4u1uu5B8TnTsMg2fJgL6c5olBG90Urt22gA6anfP5W/U1ZdVhmB 10 | T3Rv5SJMkGyMGE6sEUetLFyb2GJpgGD7ePkUCZr+IMMCAwEAAaNTMFEwHQYDVR0O 11 | BBYEFLg7nTCYsvQXWTyS6upLc0YTlIwRMB8GA1UdIwQYMBaAFLg7nTCYsvQXWTyS 12 | 6upLc0YTlIwRMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBADqx 13 | GX4Ul5OGQlcG+xTt4u3vMCeqGo8mh1AnJ7zQbyRmwjJiNxJVX+/EcqFSTsmkBNoe 14 | xdYITI7Z6dyoiKw99yCZDE7gALcyACEU7r0XY7VY/hebAaX6uLaw1sZKKAIC04lD 15 | KgCu82tG85n60Qyud5SiZZF0q1XVq7lbvOYVdzVZ7k8Vssy5p9XnaLJLMggYeOiX 16 | psHIQjvYGnTTEBZZHzWOrc0WGThd69wxTOOkAbCsoTPEwZL8BGUsdtLWtvhp452O 17 | npvaUBzKg39R5X3KTdhB68XptiQfzbQkd3FtrwNuYPUywlsg55Bxkv85n57+xDO3 18 | D9YkgUqEp0RGUXQgCsQ= 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /tests/tls/ca.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpgIBAAKCAQEA5k5cIq7KV/2MfMlelTadL5cQqucY+CzZICFAqvMHGDf93wpP 3 | Zmx+yPWLcbuga7sHRko7qobkDE48KZIx/39qJIcWKV9KH6vysKknJYx6/Zz/D/Ga 4 | GgUeCf4f3Q2eqVe9ZvnLVhkITeBZqK25BJpr3cY8ppxGpcKAivOM9S6F95O7AVC3 5 | iLJSEw0FgTLdrDgi4gd3IGqLiehSFHZl65+Usno+zB0nyYSIC/EJsEY96uqJ2FZx 6 | nCWjR8q7i7W67kHxOdOwyDZ8mAvpzmiUEb3RSu3baADpqd8/lb9TVl1WGYFPdG/l 7 | IkyQbIwYTqwRR60sXJvYYmmAYPt4+RQJmv4gwwIDAQABAoIBAQCFuFG+wYYy+MCt 8 | Y65LLN6vVyMSWAQjdMbM5QHLQDiKU1hQPIhFjBFBVXCVpL9MTde3dDqYlKGsk3BT 9 | ItNs6eoTM2wmsXE0Wn4bHNvh7WMsBhACjeFP4lDCtI6DpvjMkmkidT8eyoIL1Yu5 10 | aMTYa2Dd79AfXPWYIQrJowfhBBY83KuW5fmYnKKDVLqkT9nf2dgmmQz85RgtNiZC 11 | zFkIsNmPqH1zRbcw0wORfOBrLFvsMc4Tt8EY5Wz3NnH8Zfgf8Q3MgARH1yspz3Vp 12 | B+EYHbsK17xZ+P59KPiX3yefvyYWEUjFF7ymVsVnDxLugYl4pXwWUpm19GxeDvFk 13 | cgBUD5OBAoGBAP7lBdCp6lx6fYtxdxUm3n4MMQmYcac4qZdeBIrvpFMnvOBBuixl 14 | eavcfFmFdwgAr8HyVYiu9ynac504IYvmtYlcpUmiRBbmMHbvLQEYHl7FYFKNz9ej 15 | 2ue4oJE3RsPdLsD3xIlc+xN8oT1j0knyorwsHdj0Sv77eZzZS9XZZfJzAoGBAOdO 16 | CibYmoNqK/mqDHkp6PgsnbQGD5/CvPF/BLUWV1QpHxLzUQQeoBOQW5FatHe1H5zi 17 | mbq3emBefVmsCLrRIJ4GQu4vsTMfjcpGLwviWmaK6pHbGPt8IYeEQ2MNyv59EtA2 18 | pQy4dX7/Oe6NLAR1UEQjXmCuXf+rxnxF3VJd1nRxAoGBANb9eusl9fusgSnVOTjJ 19 | AQ7V36KVRv9hZoG6liBNwo80zDVmms4JhRd1MBkd3mkMkzIF4SkZUnWlwLBSANGM 20 | dX/3eZ5i1AVwgF5Am/f5TNxopDbdT/o1RVT/P8dcFT7s1xuBn+6wU0F7dFBgWqVu 21 | lt4aY85zNrJcj5XBHhqwdDGLAoGBAIksPNUAy9F3m5C6ih8o/aKAQx5KIeXrBUZq 22 | v43tK+kbYfRJHBjHWMOBbuxq0G/VmGPf9q9GtGqGXuxZG+w+rYtJx1OeMQZShjIZ 23 | ITl5CYeahrXtK4mo+fF2PMh3m5UE861LWuKKWhPwpJiWXC5grDNcjlHj1pcTdeip 24 | PjHkuJPhAoGBAIh35DptqqdicOd3dr/+/m2YQywY8aSpMrR0bC06aAkscD7oq4tt 25 | s/jwl0UlHIrEm/aMN7OnGIbpfkVdExfGKYaa5NRlgOwQpShwLufIo/c8fErd2zb8 26 | K3ptlwBxMrayMXpS3DP78r83Z0B8/FSK2guelzdRJ3ftipZ9io1Gss1C 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/tls/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAvUAWLOE6TEp3FYSfEnJMwYtJg3KIW5BjiAOOvFVOVQfJ5eEa 3 | vzyJ1Z+8DUgLznFnUkAeD9GjPvP7awl3NPJKLQSMkV5Tp+ea4YyV+Aa4R7flROEa 4 | zCGvmleydZw0VqN1atVZ0ikEoglM/APJQd70ec7KSR3QoxaV2/VNCHmyAPdP+0WI 5 | llV54VXX1CZrWSHaCSn1gzo3WjnGbxTOCQE5Z4k5hqJAwLWWhxDv+FX/jD38Sq3H 6 | gMFNpXJv6FYwwaKU8awghHdSY/qlBPE/1rU83vIBFJ3jW6I1WnQDfCQ69of5vshK 7 | N4v4hok56ScwdUnk8lw6xvJx1Uav/XQB9qGh4QIDAQABAoIBAQCHLO5p8hotAgdb 8 | JFZm26N9nxrMPBOvq0ucjEX4ucnwrFaGzynGrNwa7TRqHCrqs0/EjS2ryOacgbL0 9 | eldeRy26SASLlN+WD7UuI7e+6DXabDzj3RHB+tGuIbPDk+ZCeBDXVTsKBOhdQN1v 10 | KNkpJrJjCtSsMxKiWvCBow353srJKqCDZcF5NIBYBeDBPMoMbfYn5dJ9JhEf+2h4 11 | 0iwpnWDX1Vqf46pCRa0hwEyMXycGeV2CnfJSyV7z52ZHQrvkz8QspSnPpnlCnbOE 12 | UAvc8kZ5e8oZE7W+JfkK38vHbEGM1FCrBmrC/46uUGMRpZfDferGs91RwQVq/F0n 13 | JN9hLzsBAoGBAPh2pm9Xt7a4fWSkX0cDgjI7PT2BvLUjbRwKLV+459uDa7+qRoGE 14 | sSwb2QBqmQ1kbr9JyTS+Ld8dyUTsGHZK+YbTieAxI3FBdKsuFtcYJO/REN0vik+6 15 | fMaBHPvDHSU2ioq7spZ4JBFskzqs38FvZ0lX7aa3fguMk8GMLnofQ8QxAoGBAML9 16 | o5sJLN9Tk9bv2aFgnERgfRfNjjV4Wd99TsktnCD04D1GrP2eDSLfpwFlCnguck6b 17 | jxikqcolsNhZH4dgYHqRNj+IljSdl+sYZiygO6Ld0XU+dEFO86N3E9NzZhKcQ1at 18 | 85VdwNPCS7JM2fIxEvS9xfbVnsmK6/37ZZ5iI7yxAoGBALw2vRtJGmy60pojfd1A 19 | hibhAyINnlKlFGkSOI7zdgeuRTf6l9BTIRclvTt4hJpFgzM6hMWEbyE94hJoupsZ 20 | bm443o/LCWsox2VI05p6urhD6f9znNWKkiyY78izY+elqksvpjgfqEresaTYAeP5 21 | LQe9KNSK2VuMUP1j4G04M9BxAoGAWe8ITZJuytZOgrz/YIohqPvj1l2tcIYA1a6C 22 | 7xEFSMIIxtpZIWSLZIFJEsCakpHBkPX4iwIveZfmt/JrM1JFTWK6ZZVGyh/BmOIZ 23 | Bg4lU1oBqJTUo+aZQtTCJS29b2n5OPpkNYkXTdP4e9UsVKNDvfPlYZJneUeEzxDr 24 | bqCPIRECgYA544KMwrWxDQZg1dsKWgdVVKx80wEFZAiQr9+0KF6ch6Iu7lwGJHFY 25 | iI6O85paX41qeC/Fo+feIWJVJU2GvG6eBsbO4bmq+KSg4NkABJSYxodgBp9ftNeD 26 | jo1tfw+gudlNe5jXHu7oSX93tqGjR4Cnlgan/KtfkB96yHOumGmOhQ== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/twisted_test_helpers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import subprocess 4 | 5 | import attr 6 | from OpenSSL import SSL 7 | from OpenSSL.SSL import Connection 8 | from twisted.internet.interfaces import IOpenSSLServerConnectionCreator 9 | from twisted.internet.ssl import Certificate, trustRootFromCertificates 10 | from twisted.web.client import BrowserLikePolicyForHTTPS # noqa: F401 11 | from twisted.web.iweb import IPolicyForHTTPS # noqa: F401 12 | from zope.interface.declarations import implementer 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | @attr.s(cmp=False) 18 | class FakeTransport: 19 | """ 20 | A twisted.internet.interfaces.ITransport implementation which sends all its data 21 | straight into an IProtocol object: it exists to connect two IProtocols together. 22 | 23 | To use it, instantiate it with the receiving IProtocol, and then pass it to the 24 | sending IProtocol's makeConnection method: 25 | 26 | server = HTTPChannel() 27 | client.makeConnection(FakeTransport(server, self.reactor)) 28 | 29 | If you want bidirectional communication, you'll need two instances. 30 | """ 31 | 32 | other = attr.ib() 33 | """The Protocol object which will receive any data written to this transport. 34 | 35 | :type: twisted.internet.interfaces.IProtocol 36 | """ 37 | 38 | _reactor = attr.ib() 39 | """Test reactor 40 | 41 | :type: twisted.internet.interfaces.IReactorTime 42 | """ 43 | 44 | _protocol = attr.ib(default=None) 45 | """The Protocol which is producing data for this transport. Optional, but if set 46 | will get called back for connectionLost() notifications etc. 47 | """ 48 | 49 | disconnecting = False 50 | disconnected = False 51 | connected = True 52 | buffer = attr.ib(default=b"") 53 | producer = attr.ib(default=None) 54 | autoflush = attr.ib(default=True) 55 | 56 | def getPeer(self): 57 | return None 58 | 59 | def getHost(self): 60 | return None 61 | 62 | def loseConnection(self, reason=None): 63 | if not self.disconnecting: 64 | logger.info("FakeTransport: loseConnection(%s)", reason) 65 | self.disconnecting = True 66 | if self._protocol: 67 | self._protocol.connectionLost(reason) 68 | 69 | # if we still have data to write, delay until that is done 70 | if self.buffer: 71 | logger.info( 72 | "FakeTransport: Delaying disconnect until buffer is flushed" 73 | ) 74 | else: 75 | self.connected = False 76 | self.disconnected = True 77 | 78 | def abortConnection(self): 79 | logger.info("FakeTransport: abortConnection()") 80 | 81 | if not self.disconnecting: 82 | self.disconnecting = True 83 | if self._protocol: 84 | self._protocol.connectionLost(None) 85 | 86 | self.disconnected = True 87 | 88 | def pauseProducing(self): 89 | if not self.producer: 90 | return 91 | 92 | self.producer.pauseProducing() 93 | 94 | def resumeProducing(self): 95 | if not self.producer: 96 | return 97 | self.producer.resumeProducing() 98 | 99 | def unregisterProducer(self): 100 | if not self.producer: 101 | return 102 | 103 | self.producer = None 104 | 105 | def registerProducer(self, producer, streaming): 106 | self.producer = producer 107 | self.producerStreaming = streaming 108 | 109 | def _produce(): 110 | d = self.producer.resumeProducing() 111 | d.addCallback(lambda x: self._reactor.callLater(0.1, _produce)) 112 | 113 | if not streaming: 114 | self._reactor.callLater(0.0, _produce) 115 | 116 | def write(self, byt): 117 | if self.disconnecting: 118 | raise Exception("Writing to disconnecting FakeTransport") 119 | 120 | self.buffer = self.buffer + byt 121 | 122 | # always actually do the write asynchronously. Some protocols (notably the 123 | # TLSMemoryBIOProtocol) get very confused if a read comes back while they are 124 | # still doing a write. Doing a callLater here breaks the cycle. 125 | if self.autoflush: 126 | self._reactor.callLater(0.0, self.flush) 127 | 128 | def writeSequence(self, seq): 129 | for x in seq: 130 | self.write(x) 131 | 132 | def flush(self, maxbytes=None): 133 | if not self.buffer: 134 | # nothing to do. Don't write empty buffers: it upsets the 135 | # TLSMemoryBIOProtocol 136 | return 137 | 138 | if self.disconnected: 139 | return 140 | 141 | if getattr(self.other, "transport") is None: 142 | # the other has no transport yet; reschedule 143 | if self.autoflush: 144 | self._reactor.callLater(0.0, self.flush) 145 | return 146 | 147 | if maxbytes is not None: 148 | to_write = self.buffer[:maxbytes] 149 | else: 150 | to_write = self.buffer 151 | 152 | logger.info("%s->%s: %s", self._protocol, self.other, to_write) 153 | 154 | try: 155 | self.other.dataReceived(to_write) 156 | except Exception as e: 157 | logger.exception("Exception writing to protocol: %s", e) 158 | return 159 | 160 | self.buffer = self.buffer[len(to_write) :] 161 | if self.buffer and self.autoflush: 162 | self._reactor.callLater(0.0, self.flush) 163 | 164 | if not self.buffer and self.disconnecting: 165 | logger.info("FakeTransport: Buffer now empty, completing disconnect") 166 | self.disconnected = True 167 | 168 | def setTcpNoDelay(self, new: bool) -> None: 169 | pass 170 | 171 | 172 | def get_test_https_policy(): 173 | """Get a test IPolicyForHTTPS which trusts the test CA cert 174 | 175 | Returns: 176 | IPolicyForHTTPS 177 | """ 178 | ca_file = get_test_ca_cert_file() 179 | with open(ca_file) as stream: 180 | content = stream.read() 181 | cert = Certificate.loadPEM(content) 182 | trust_root = trustRootFromCertificates([cert]) 183 | return BrowserLikePolicyForHTTPS(trustRoot=trust_root) 184 | 185 | 186 | def get_test_ca_cert_file(): 187 | """Get the path to the test CA cert 188 | 189 | The keypair is generated with: 190 | 191 | openssl genrsa -out ca.key 2048 192 | openssl req -new -x509 -key ca.key -days 3650 -out ca.crt \ 193 | -subj '/CN=synapse test CA' 194 | """ 195 | return os.path.join(os.path.dirname(__file__), "tls/ca.crt") 196 | 197 | 198 | def get_test_key_file(): 199 | """get the path to the test key 200 | 201 | The key file is made with: 202 | 203 | openssl genrsa -out server.key 2048 204 | """ 205 | return os.path.join(os.path.dirname(__file__), "tls/server.key") 206 | 207 | 208 | cert_file_count = 0 209 | 210 | CONFIG_TEMPLATE = b"""\ 211 | [default] 212 | basicConstraints = CA:FALSE 213 | keyUsage=nonRepudiation, digitalSignature, keyEncipherment 214 | subjectAltName = %(sanentries)s 215 | """ 216 | 217 | 218 | def create_test_cert_file(sanlist): 219 | """build an x509 certificate file 220 | 221 | Args: 222 | sanlist: list[bytes]: a list of subjectAltName values for the cert 223 | 224 | Returns: 225 | str: the path to the file 226 | """ 227 | global cert_file_count 228 | csr_filename = "server.csr" 229 | cnf_filename = "server.%i.cnf" % (cert_file_count,) 230 | cert_filename = "server.%i.crt" % (cert_file_count,) 231 | cert_file_count += 1 232 | 233 | # first build a CSR 234 | subprocess.check_call( 235 | [ 236 | "openssl", 237 | "req", 238 | "-new", 239 | "-key", 240 | get_test_key_file(), 241 | "-subj", 242 | "/", 243 | "-out", 244 | csr_filename, 245 | ] 246 | ) 247 | 248 | # now a config file describing the right SAN entries 249 | sanentries = b",".join(sanlist) 250 | with open(cnf_filename, "wb") as f: 251 | f.write(CONFIG_TEMPLATE % {b"sanentries": sanentries}) 252 | 253 | # finally the cert 254 | ca_key_filename = os.path.join(os.path.dirname(__file__), "tls/ca.key") 255 | ca_cert_filename = get_test_ca_cert_file() 256 | subprocess.check_call( 257 | [ 258 | "openssl", 259 | "x509", 260 | "-req", 261 | "-in", 262 | csr_filename, 263 | "-CA", 264 | ca_cert_filename, 265 | "-CAkey", 266 | ca_key_filename, 267 | "-set_serial", 268 | "1", 269 | "-extfile", 270 | cnf_filename, 271 | "-out", 272 | cert_filename, 273 | ] 274 | ) 275 | 276 | return cert_filename 277 | 278 | 279 | @implementer(IOpenSSLServerConnectionCreator) 280 | class TestServerTLSConnectionFactory: 281 | """An SSL connection creator which returns connections which present a certificate 282 | signed by our test CA.""" 283 | 284 | def __init__(self, sanlist): 285 | """ 286 | Args: 287 | sanlist: list[bytes]: a list of subjectAltName values for the cert 288 | """ 289 | self._cert_file = create_test_cert_file(sanlist) 290 | 291 | def serverConnectionForTLS(self, tlsProtocol): 292 | ctx = SSL.Context(SSL.SSLv23_METHOD) 293 | ctx.use_certificate_file(self._cert_file) 294 | ctx.use_privatekey_file(get_test_key_file()) 295 | return Connection(ctx, None) 296 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py, check_codestyle, check_types 3 | 4 | [testenv] 5 | 6 | # As of twisted 16.4, trial tries to import the tests as a package (previously 7 | # it loaded the files explicitly), which means they need to be on the 8 | # pythonpath. Our sdist doesn't include the 'tests' package, so normally it 9 | # doesn't work within the tox virtualenv. 10 | # 11 | # As a workaround, we tell tox to do install with 'pip -e', which just 12 | # creates a symlink to the project directory instead of unpacking the sdist. 13 | usedevelop=true 14 | 15 | extras = 16 | dev 17 | 18 | allowlist_externals = poetry 19 | 20 | commands = 21 | poetry run coverage run --source=sygnal -m twisted.trial tests 22 | poetry run coverage report --sort=cover 23 | poetry run coverage html 24 | 25 | [testenv:check_codestyle] 26 | 27 | allowlist_externals = poetry 28 | 29 | commands = 30 | poetry run ruff sygnal/ tests/ stubs 31 | poetry run black --check --diff sygnal/ tests/ stubs 32 | poetry run isort --check-only --diff sygnal/ tests/ stubs 33 | 34 | [testenv:check_types] 35 | 36 | allowlist_externals = poetry 37 | 38 | commands = 39 | poetry run mypy sygnal/ tests/ stubs 40 | --------------------------------------------------------------------------------