├── .circleci ├── config.yml ├── finish-coverage-report ├── lib.sh └── report-coverage.sh ├── .coveragerc ├── .coveralls.yml ├── .git-blame-ignore-revs ├── .gitattributes ├── .gitignore ├── CONTRIBUTING.rst ├── LICENSE-2.0.txt ├── README.rst ├── code_of_conduct.rst ├── default.nix ├── docs ├── build │ ├── .empty │ └── .gitignore ├── requirements.txt └── source │ ├── CONTRIBUTING.rst │ ├── backup.rst │ ├── code_of_conduct.rst │ ├── conf.py │ ├── configuration.rst │ ├── designs │ ├── backup-recovery.rst │ └── costs.rst │ ├── index.rst │ ├── interface.rst │ └── leases.rst ├── flake.lock ├── flake.nix ├── nix ├── compose.nix ├── flake8-black.nix ├── flake8-isort.nix ├── hypothesis.nix ├── klein.nix ├── lib.nix ├── mypy-zope.nix ├── pycddl.nix ├── pyopenssl.nix ├── sh.nix ├── tahoe-capabilities.nix ├── tahoe-lafs.nix ├── tahoe-versions.nix └── types-pyyaml.nix ├── pyproject.toml ├── requirements └── test.in ├── setup.cfg ├── setup.py ├── slipcover2coveralls.py └── src ├── _zkapauthorizer ├── __init__.py ├── _attrs_zope.py ├── _base64.py ├── _json.py ├── _plugin.py ├── _stack.py ├── _storage_client.py ├── _storage_server.py ├── _types.py ├── api.py ├── backup-recovery.yaml ├── config.py ├── configutil.py ├── controller.py ├── eliot.py ├── foolscap.py ├── lease_maintenance.py ├── model.py ├── newsfragments │ ├── .gitignore │ └── 451.misc ├── pricecalculator.py ├── private.py ├── recover.py ├── replicate.py ├── resource.py ├── schema.py ├── server │ ├── __init__.py │ └── spending.py ├── spending.py ├── sql.py ├── storage_common.py ├── tahoe.py ├── tests │ ├── __init__.py │ ├── _exception.py │ ├── _float_matchers.py │ ├── _sql_matchers.py │ ├── common.py │ ├── fixtures.py │ ├── foolscap.py │ ├── issuer.py │ ├── matchers.py │ ├── privacypass.py │ ├── resources.py │ ├── storage_common.py │ ├── strategies.py │ ├── test_base64.py │ ├── test_client_resource.py │ ├── test_controller.py │ ├── test_foolscap.py │ ├── test_lease_maintenance.py │ ├── test_matchers.py │ ├── test_model.py │ ├── test_plugin.py │ ├── test_pricecalculator.py │ ├── test_private.py │ ├── test_recover.py │ ├── test_replicate.py │ ├── test_schema.py │ ├── test_spending.py │ ├── test_sql.py │ ├── test_storage_client.py │ ├── test_storage_protocol.py │ ├── test_storage_server.py │ ├── test_strategies.py │ ├── test_tahoe.py │ ├── test_tahoe_lafs_integration.py │ ├── test_tokens.py │ └── testing-signing.key └── validators.py └── twisted └── plugins └── zkapauthorizer.py /.circleci/finish-coverage-report: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env nix-shell 2 | #! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/ba34f8c09f10624b009b2999c4390ac2e88dafb9.tar.gz -i bash -p "lcov" "coreutils" "python3.withPackages (ps: [ ps.coverage ps.codecov ])" 3 | 4 | set -xueo pipefail 5 | 6 | # The workspace contains some number of coverage.py data files and some number 7 | # of slipcover json-format files. Our job will be to merge all of that 8 | # information and then present the results. 9 | workspace_path="$1" 10 | shift 11 | 12 | # We can very easily merge all of the coverage.py data using the coverage.py 13 | # tool. 14 | python -m coverage --version 15 | python -m coverage combine --debug=config "$workspace_path" 16 | python -m coverage report --debug=config 17 | 18 | # It is tempting to use lcov as a common format which we can merge other data 19 | # into (eg slipcover) but codecov fails with an uninformative error ("There is 20 | # an error processing the coverage reports.", "unknown error") if we submit 21 | # lcov to it. At least we don't have other coverage data to merge in right 22 | # now. 23 | python -m codecov --required 24 | -------------------------------------------------------------------------------- /.circleci/lib.sh: -------------------------------------------------------------------------------- 1 | # Run a command, enabling cache writes to cachix if possible. The command is 2 | # accepted as a variable number of positional arguments (like argv). 3 | function cache_if_able() { 4 | # Dump some info about our build environment. 5 | describe_build 6 | 7 | if is_cache_writeable; then 8 | # If the cache is available we'll use it. This lets fork owners set 9 | # up their own caching if they want. 10 | echo "Cachix credentials present; will attempt to write to cache." 11 | 12 | # The `cachix watch-exec ...` does our cache population. When it sees 13 | # something added to the store (I guess) it pushes it to the named 14 | # cache. 15 | cachix watch-exec "${CACHIX_NAME}" -- "$@" 16 | else 17 | if is_cache_required; then 18 | echo "Required credentials (CACHIX_AUTH_TOKEN) are missing." 19 | return 1 20 | else 21 | echo "Cachix credentials missing; will not attempt cache writes." 22 | "$@" 23 | fi 24 | fi 25 | } 26 | 27 | function is_cache_writeable() { 28 | # We can only *push* to the cache if we have a CACHIX_AUTH_TOKEN. in-repo 29 | # jobs will get this from CircleCI configuration but jobs from forks may 30 | # not. 31 | [ -v CACHIX_AUTH_TOKEN ] 32 | } 33 | 34 | function is_cache_required() { 35 | # If we're building in the upstream repository then we must use the cache. 36 | # If we're building anything from a fork then we're allowed to not have 37 | # the credentials. 38 | is_upstream 39 | } 40 | 41 | # Return success if the origin of this build is the upstream repository itself 42 | # (and so we expect to have cache credentials available), failure otherwise. 43 | # 44 | # See circleci.txt for notes about how this determination is made. 45 | function is_upstream() { 46 | # CIRCLE_PROJECT_USERNAME is set to the org the build is happening for. 47 | # If a PR targets a fork of the repo then this is set to something other 48 | # than the upstream org (PrivateStorageio in our case). 49 | [ "$CIRCLE_PROJECT_USERNAME" == "PrivateStorageio" ] && 50 | 51 | # CIRCLE_BRANCH is set to the real branch name for in-repo PRs and 52 | # "pull/NNNN" for pull requests from forks. 53 | # 54 | # CIRCLE_PULL_REQUESTS is set to a comma-separated list of the full 55 | # URLs of the PR pages which share an underlying branch, with one of 56 | # them ended with that same "pull/NNNN" for PRs from forks. 57 | ! any_element_endswith "/$CIRCLE_BRANCH" "," "$CIRCLE_PULL_REQUESTS" 58 | } 59 | 60 | # Return success if splitting $3 on $2 results in an array with any element 61 | # that ends with $1, failure otherwise. 62 | function any_element_endswith() { 63 | suffix=$1 64 | shift 65 | 66 | sep=$1 67 | shift 68 | 69 | haystack=$1 70 | shift 71 | 72 | IFS="${sep}" read -r -a elements <<< "$haystack" 73 | for elem in "${elements[@]}"; do 74 | if endswith "$suffix" "$elem"; then 75 | return 0 76 | fi 77 | done 78 | return 1 79 | } 80 | 81 | # Return success if $2 ends with $1, failure otherwise. 82 | function endswith() { 83 | suffix=$1 84 | shift 85 | 86 | haystack=$1 87 | shift 88 | 89 | case "$haystack" in 90 | *${suffix}) 91 | return 0 92 | ;; 93 | 94 | *) 95 | return 1 96 | ;; 97 | esac 98 | } 99 | 100 | function describe_build() { 101 | echo "Building PR for user/org: ${CIRCLE_PROJECT_USERNAME}" 102 | echo "Building branch: ${CIRCLE_BRANCH}" 103 | if is_upstream; then 104 | echo "Upstream build." 105 | else 106 | echo "Non-upstream build." 107 | fi 108 | if is_cache_required; then 109 | echo "Cache is required." 110 | else 111 | echo "Cache not required." 112 | fi 113 | if is_cache_writeable; then 114 | echo "Cache is writeable." 115 | else 116 | echo "Cache not writeable." 117 | fi 118 | } 119 | -------------------------------------------------------------------------------- /.circleci/report-coverage.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env nix-shell 2 | #! nix-shell -i bash -p "curl" -p "python3.withPackages (ps: [ ps.coveralls ps.pyyaml ])" 3 | set -x 4 | find ./result*/ 5 | cp ./result*/coverage/.coverage ./.coverage 6 | coveralls 7 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | _zkapauthorizer 4 | twisted.plugins.zkapauthorizer 5 | 6 | # Measuring branch coverage is slower (so the conventional wisdom goes) but 7 | # too bad: it's an important part of the coverage information. 8 | branch = True 9 | 10 | # Whether or not we actually collect coverage information in parallel, we need 11 | # to have the coverage data files written according to the "parallel" naming 12 | # scheme so that we can use "coverage combine" later to rewrite paths in the 13 | # coverage report. 14 | parallel = True 15 | 16 | omit = 17 | # The Versioneer version file in the repository is generated by 18 | # Versioneer. Let's call it Versioneer's responsibility to ensure it 19 | # works and not pay attention to our test suite's coverage of it. Also, 20 | # the way Versioneer works is that the source file in the repository is 21 | # different from the source file in an installation - which is where we 22 | # measure coverage. When the source files differ like this, it's very 23 | # difficult to produce a coherent coverage report (measurements against 24 | # one source file are meaningless when looking at a different source 25 | # file). 26 | */_zkapauthorizer/_version.py 27 | 28 | # Record absolute paths. This makes `coverage combine` successfully rewrite 29 | # paths from different systems as long as our `[paths]source` configuration is 30 | # correct. 31 | relative_files = False 32 | 33 | [paths] 34 | source = 35 | # This is the nice path we would like to end up with. It must also be 36 | # where the source is actually available when `coverage combine` is 37 | # run. 38 | src/ 39 | 40 | # A Nix build embeds source paths like this one. 41 | /nix/store/*-python*-zero-knowledge-access-pass-authorizer-*/lib/python*/site-packages/ 42 | 43 | # Then the Nix test step combines the data files and rewrites the paths to look like this. 44 | /tmp/nix-build-zkapauthorizer-tests.drv-*/src/ 45 | 46 | # A Windows build embeds source paths like one of these probably. 47 | ?:\*\site-packages\ 48 | 49 | # A macOS build embeds source paths like this one. 50 | /Users/distiller/project/venv/lib/python*/site-packages/ 51 | -------------------------------------------------------------------------------- /.coveralls.yml: -------------------------------------------------------------------------------- 1 | repo_token: "Tv6EVnfxYVcUH8RpzuaAuDADioeETqc05" 2 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Reformat with black targeting Python 3.9 instead of Python 2.7 2 | 5a9f4e2e704bbc85f58f9134027903c69681bdd1 3 | 4 | # Initial autoformat with isort 5 | cb7d6947160f5d717edadc487de3c9fa59ff303d 6 | 7 | # Initial autoformat with black 8 | 61ea3e06f5da6c92020b1e7bb69fa638bc9ce87e -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | src/_zkapauthorizer/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .hypothesis 2 | _trial_temp 3 | *~ 4 | result 5 | __pycache__ 6 | dropin.cache 7 | pip-wheel-metadata 8 | *.egg-info 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing to ZKAPAuthorizer 2 | ============================== 3 | 4 | Contributions are accepted in many forms. 5 | 6 | Examples of contributions include: 7 | 8 | * Bug reports and patch reviews 9 | * Documentation improvements 10 | * Code patches 11 | 12 | File a ticket at: 13 | 14 | https://github.com/PrivateStorageio/ZKAPAuthorizer/issues/new 15 | 16 | ZKAPAuthorizer uses GitHub keep track of bugs, feature requests, and associated patches. 17 | 18 | Contributions are managed using GitHub's Pull Requests. 19 | For a PR to be accepted it needs to have: 20 | 21 | * an associated issue 22 | * all CI tests passing 23 | * patch coverage of 100% as reported by codecov.io 24 | 25 | Updating Dependencies 26 | --------------------- 27 | 28 | Python Dependencies 29 | ................... 30 | 31 | We use Nix to build python packages. 32 | We take a snapshot of our Python dependencies from nixpkgs, 33 | thus our python depedencies (on nix) are automatically pinned. 34 | To do a minor nixpkgs update (and thus a python dependency update), 35 | run 36 | 37 | .. code:: shell 38 | 39 | nix flake lock --update-input nixpkgs 40 | 41 | To do a major nixpkgs update (and thus a python dependency update), 42 | edit ``flake.nix`` and change this line:: 43 | 44 | .. code:: nix 45 | 46 | nixpkgs.url = "github:NixOS/nixpkgs?ref=nixos-22.11"; 47 | 48 | Change the ``ref`` query argument to a suitable value. 49 | Then run the minor update command above. 50 | 51 | For dependencies or versions of dependencies that are required but not available from nixpkgs, 52 | we maintain our own packages in the ``nix/`` directory. 53 | 54 | tahoe-lafs 55 | .......... 56 | 57 | ZKAPAuthorizer declares a dependency on Tahoe-LAFS with a narrow version range. 58 | This means that Tahoe-LAFS will be installed when ZKAPAuthorizer is installed. 59 | It also means that ZKAPAuthorizer exerts a great deal of control over the version of Tahoe-LAFS chosen. 60 | 61 | When installing using native Python packaging mechanisms 62 | (for example, pip) 63 | the relevant Tahoe-LAFS dependency declaration is in ``setup.cfg``. 64 | See the comments there about the narrow version constraint used. 65 | 66 | Several Nix packages are available which use different versions of Tahoe-LAFS. 67 | The version is reflected in the package name. 68 | For example, 69 | ``zkapauthorizer-python39-tahoe_1_17_1`` has a dependency on Tahoe-LAFS 1.17.1. 70 | 71 | There is also a ``tahoe_dev`` variation that depends on a recent version of Tahoe-LAFS ``master``. 72 | 73 | To update to the current master@HEAD revision, run: 74 | 75 | .. code:: shell 76 | 77 | nix flake lock --update-input tahoe-lafs-dev 78 | 79 | We intend for these updates to be performed periodically. 80 | At the moment, they must be performed manually. 81 | It might be worthwhile to `automate this process ` in the future. 82 | 83 | .. note:: 84 | 85 | Since tahoe-lafs doesn't have correct version information when installed from a github archive, 86 | the packaging in ``nix/tahoe-versions.nix`` includes a fake version number. 87 | This will need to be update manually at least when the minor version of tahoe-lafs changes. 88 | 89 | If you want to test different versions, 90 | you can override the ``tahoe-lafs-dev`` input on the command line. 91 | 92 | .. code:: shell 93 | 94 | nix build --override-input tahoe-lafs-dev /path/to/tahoe-lafs-version .#zkapauthorizer-python39-tahoe_dev 95 | 96 | The input can also be overridden for the test packages. 97 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Zero-Knowledge Access Pass Authorizer 2 | ===================================== 3 | 4 | |coverage|_ 5 | |circleci|_ 6 | 7 | What is this? 8 | ------------- 9 | 10 | Zero-Knowledge Access Pass (ZKAP) Authorizer is a `Tahoe-LAFS`_ storage-system plugin which authorizes storage operations based on privacy-respecting passes. 11 | 12 | Such passes derive from `PrivacyPass`_. 13 | They allow a Tahoe-LAFS client to prove it has a right to access without revealing additional information. 14 | 15 | Copyright 16 | --------- 17 | 18 | Copyright 2019 PrivateStorage.io, LLC 19 | 20 | Licensed under the Apache License, Version 2.0 (the "License"); 21 | you may not use this file except in compliance with the License. 22 | You may obtain a copy of the License at 23 | 24 | http://www.apache.org/licenses/LICENSE-2.0 25 | 26 | Unless required by applicable law or agreed to in writing, software 27 | distributed under the License is distributed on an "AS IS" BASIS, 28 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 29 | See the License for the specific language governing permissions and 30 | limitations under the License. 31 | 32 | .. _Tahoe-LAFS: https://tahoe-lafs.org/ 33 | 34 | .. _PrivacyPass: https://privacypass.github.io/ 35 | 36 | .. |coverage| image:: https://codecov.io/gh/PrivateStorageio/ZKAPAuthorizer/branch/master/graph/badge.svg 37 | .. _coverage: https://codecov.io/gh/PrivateStorageio/ZKAPAuthorizer 38 | 39 | .. |circleci| image:: https://circleci.com/gh/PrivateStorageio/ZKAPAuthorizer.svg?style=svg 40 | .. _circleci: https://circleci.com/gh/PrivateStorageio/ZKAPAuthorizer 41 | -------------------------------------------------------------------------------- /code_of_conduct.rst: -------------------------------------------------------------------------------- 1 | Contributor Code of Conduct 2 | =========================== 3 | 4 | As contributors and maintainers of this project, and in the interest of 5 | fostering an open and welcoming community, we pledge to respect all people who 6 | contribute through reporting issues, posting feature requests, updating 7 | documentation, submitting pull requests or patches, and other activities. 8 | 9 | We are committed to making participation in this project a harassment-free 10 | experience for everyone, regardless of level of experience, gender, gender 11 | identity and expression, sexual orientation, disability, personal appearance, 12 | body size, race, ethnicity, age, religion, or nationality. 13 | 14 | Examples of unacceptable behavior by participants include: 15 | 16 | * The use of sexualized language or imagery 17 | * Personal attacks 18 | * Trolling or insulting/derogatory comments 19 | * Public or private harassment 20 | * Publishing other's private information, such as physical or electronic 21 | addresses, without explicit permission 22 | * Other unethical or unprofessional conduct 23 | 24 | Project maintainers have the right and responsibility to remove, edit, or 25 | reject comments, commits, code, wiki edits, issues, and other contributions 26 | that are not aligned to this Code of Conduct, or to ban temporarily or 27 | permanently any contributor for other behaviors that they deem inappropriate, 28 | threatening, offensive, or harmful. 29 | 30 | By adopting this Code of Conduct, project maintainers commit themselves to 31 | fairly and consistently applying these principles to every aspect of managing 32 | this project. Project maintainers who do not follow or enforce the Code of 33 | Conduct may be permanently removed from the project team. 34 | 35 | This Code of Conduct applies both within project spaces and in public spaces 36 | when an individual is representing the project or its community. 37 | 38 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 39 | reported by contacting a project maintainer (see below). All 40 | complaints will be reviewed and investigated and will result in a response that 41 | is deemed necessary and appropriate to the circumstances. Maintainers are 42 | obligated to maintain confidentiality with regard to the reporter of an 43 | incident. 44 | 45 | You may send reports to `our Conduct email `_. 46 | 47 | This Code of Conduct is adapted from the 48 | `Contributor Covenant homepage `_, 49 | `version 1.3.0 `_, 50 | and the Twisted code of conduct. 51 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | (import 2 | ( 3 | let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in 4 | fetchTarball { 5 | url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; 6 | sha256 = lock.nodes.flake-compat.locked.narHash; 7 | } 8 | ) 9 | { src = ./.; } 10 | ).defaultNix 11 | -------------------------------------------------------------------------------- /docs/build/.empty: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PrivateStorageio/ZKAPAuthorizer/05c1e11786a3a054b098bb1b00811d92ff3ae38d/docs/build/.empty -------------------------------------------------------------------------------- /docs/build/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinxcontrib-redoc 3 | -------------------------------------------------------------------------------- /docs/source/CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/source/backup.rst: -------------------------------------------------------------------------------- 1 | ZKAP Backup/Restore 2 | =================== 3 | 4 | A large part of the intended purpose of ZKAPs is to allow a value exchange between storage provider and storage consumer. 5 | As such the ZKAPs themselves represent some value. 6 | Thus it is to be expected that users will want that value safe-guarded. 7 | One way to do this is for the internal state of ZKAPAuthorizer to be backed up periodically. 8 | 9 | Overview 10 | -------- 11 | 12 | ZKAPAuthorizer's internal state can be backed up and restored by backing up and restoring a SQLite3 database it maintains. 13 | After a backup has been taken it is possible to update a small "checkpoint" that keeps track of spent ZKAPs. 14 | This makes it relatively efficient to keep a backup up-to-date with respect to spending operations. 15 | Whenever a new voucher is purchased a new complete backup must be made to capture the associated new state. 16 | 17 | Backup 18 | ------ 19 | 20 | The Database 21 | ~~~~~~~~~~~~ 22 | 23 | ZKAPAuthorizer keeps all of its internal state in a SQLite3 database. 24 | This database is kept in the private directory of the Tahoe-LAFS node into which the plugin is installed. 25 | The database filename is ``privatestorageio-zkapauthz-v1.sqlite3``. 26 | For example, 27 | for a Tahoe-LAFS node that keeps it state at ``~/.tahoe``, 28 | the ZKAPAuthorizer database can be found at ``~/.tahoe/private/privatestorageio-zkapauthz-v1.sqlite3``. 29 | 30 | The existence of the databaes file is consider part of ZKAPAuthorizer's public interface. 31 | The fact that all of ZKAPAuthorizer's internal state is stored in this database is considered part of the public interface as well. 32 | 33 | The exact schema and contents of this database are *not* considered part of the public interface. 34 | Third-parties should feel free to back up this database file 35 | (following SQLite3-recommended practices) 36 | and restore it as necessary to recover using this backup. 37 | Third-parties should not make any other assumptions about the file 38 | (such as that it has a particular schema). 39 | 40 | The Checkpoint 41 | ~~~~~~~~~~~~~~ 42 | 43 | ZKAPAuthorizer spends ZKAPs in a deterministic order. 44 | This means if the next ZKAP to be spent is known then it is possible to separate all other ZKAPs into "already spent" and "not spent" groups. 45 | ZKAPAuthorizer exposes the next ZKAP to be spent like this:: 46 | 47 | GET /storage-plugins/privatestorageio-zkapauthz-v1/unblinded-token?limit=1 48 | 49 | The checkpoint is the first element of the ``unblinded-tokens`` property of the response. 50 | 51 | See :file:interface.rst for details. 52 | 53 | Third-parties should periodically get this value and update the backup with it. 54 | 55 | Restore 56 | ------- 57 | 58 | The Database 59 | ~~~~~~~~~~~~ 60 | 61 | It is sufficient to copy the backed up database file into the correct location. 62 | This is the same location from which it was originally copied, 63 | relative to the Tahoe-LAFS node directory. 64 | 65 | This must be done while the Tahoe-LAFS node is not running. 66 | It may be done prior to the first run. 67 | 68 | The Checkpoint 69 | ~~~~~~~~~~~~~~ 70 | 71 | After the Tahoe-LAFS node is started the checkpoint can be used to discard the "already spent" ZKAPs from the database:: 72 | 73 | PATCH /storage-plugins/privatestorageio-zkapauthz-v1/unblinded-token 74 | Content-Type: application/json 75 | 76 | { "first-unspent": } 77 | 78 | This shortens the time it takes for the node to complete the recovery process proportionally to the number of "already spent" ZKAPs are being discarded. 79 | -------------------------------------------------------------------------------- /docs/source/code_of_conduct.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../code_of_conduct.rst 2 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'Zero-Knowledge Access Pass Authorizer' 23 | copyright = '2019, PrivateStorage.io, LLC' 24 | author = 'PrivateStorage.io, LLC' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '0.0' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | "sphinxcontrib.redoc", 43 | ] 44 | 45 | # Configure redoc 46 | redoc = [ 47 | { 48 | 'name': 'ZKAPAuthorizer Backup/Recovery API', 49 | 'page': 'designs/backup-recovery-openapi', 50 | 'spec': '../../src/_zkapauthorizer/backup-recovery.yaml', 51 | 'embed': True, 52 | }, 53 | ] 54 | 55 | # Add any paths that contain templates here, relative to this directory. 56 | templates_path = ['_templates'] 57 | 58 | # The suffix(es) of source filenames. 59 | # You can specify multiple suffix as a list of string: 60 | # 61 | # source_suffix = ['.rst', '.md'] 62 | source_suffix = '.rst' 63 | 64 | # The master toctree document. 65 | master_doc = 'index' 66 | 67 | # The language for content autogenerated by Sphinx. Refer to documentation 68 | # for a list of supported languages. 69 | # 70 | # This is also used if you do content translation via gettext catalogs. 71 | # Usually you set "language" from the command line for these cases. 72 | language = None 73 | 74 | # List of patterns, relative to source directory, that match files and 75 | # directories to ignore when looking for source files. 76 | # This pattern also affects html_static_path and html_extra_path. 77 | exclude_patterns = [] 78 | 79 | # The name of the Pygments (syntax highlighting) style to use. 80 | pygments_style = None 81 | 82 | 83 | # -- Options for HTML output ------------------------------------------------- 84 | 85 | # The theme to use for HTML and HTML Help pages. See the documentation for 86 | # a list of builtin themes. 87 | # 88 | html_theme = 'alabaster' 89 | 90 | # Theme options are theme-specific and customize the look and feel of a theme 91 | # further. For a list of options available for each theme, see the 92 | # documentation. 93 | # 94 | # html_theme_options = {} 95 | 96 | # Add any paths that contain custom static files (such as style sheets) here, 97 | # relative to this directory. They are copied after the builtin static files, 98 | # so a file named "default.css" will overwrite the builtin "default.css". 99 | html_static_path = [] 100 | 101 | # Custom sidebar templates, must be a dictionary that maps document names 102 | # to template names. 103 | # 104 | # The default sidebars (for documents that don't match any pattern) are 105 | # defined by theme itself. Builtin themes are using these templates by 106 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 107 | # 'searchbox.html']``. 108 | # 109 | # html_sidebars = {} 110 | 111 | 112 | # -- Options for HTMLHelp output --------------------------------------------- 113 | 114 | # Output file base name for HTML help builder. 115 | htmlhelp_basename = 'ZKAPAuthorizer-doc' 116 | 117 | 118 | # -- Options for LaTeX output ------------------------------------------------ 119 | 120 | latex_elements = { 121 | # The paper size ('letterpaper' or 'a4paper'). 122 | # 123 | # 'papersize': 'letterpaper', 124 | 125 | # The font size ('10pt', '11pt' or '12pt'). 126 | # 127 | # 'pointsize': '10pt', 128 | 129 | # Additional stuff for the LaTeX preamble. 130 | # 131 | # 'preamble': '', 132 | 133 | # Latex figure (float) alignment 134 | # 135 | # 'figure_align': 'htbp', 136 | } 137 | 138 | # Grouping the document tree into LaTeX files. List of tuples 139 | # (source start file, target name, title, 140 | # author, documentclass [howto, manual, or own class]). 141 | latex_documents = [ 142 | (master_doc, 'ZKAPAuthorizer.tex', 'ZKAP Authorizer Documentation', 143 | 'PrivateStorage.io, LLC', 'manual'), 144 | ] 145 | 146 | 147 | # -- Options for manual page output ------------------------------------------ 148 | 149 | # One entry per manual page. List of tuples 150 | # (source start file, name, description, authors, manual section). 151 | man_pages = [ 152 | (master_doc, 'zkapauthorizer', 'ZKAP Authorizer Documentation', 153 | [author], 1) 154 | ] 155 | 156 | 157 | # -- Options for Texinfo output ---------------------------------------------- 158 | 159 | # Grouping the document tree into Texinfo files. List of tuples 160 | # (source start file, target name, title, author, 161 | # dir menu entry, description, category) 162 | texinfo_documents = [ 163 | (master_doc, 'ZKAPAuthorizer', 'ZKAP Authorizer Documentation', 164 | author, 'ZKAPAuthorizer', 'One line description of project.', 165 | 'Miscellaneous'), 166 | ] 167 | 168 | 169 | # -- Options for Epub output ------------------------------------------------- 170 | 171 | # Bibliographic Dublin Core info. 172 | epub_title = project 173 | 174 | # The unique identifier of the text. This can be a ISBN number 175 | # or the project homepage. 176 | # 177 | # epub_identifier = '' 178 | 179 | # A unique identification for the text. 180 | # 181 | # epub_uid = '' 182 | 183 | # A list of files that should not be packed into the epub file. 184 | epub_exclude_files = ['search.html'] 185 | 186 | 187 | # -- Extension configuration ------------------------------------------------- 188 | -------------------------------------------------------------------------------- /docs/source/configuration.rst: -------------------------------------------------------------------------------- 1 | Configuration 2 | ============= 3 | 4 | Client 5 | ------ 6 | 7 | To enable the plugin at all, add its name to the list of storage plugins in the Tahoe-LAFS configuration 8 | (``tahoe.cfg`` in the relevant node directory):: 9 | 10 | [client] 11 | storage.plugins = privatestorageio-zkapauthz-v2 12 | 13 | Then configure the plugin as desired in the ``storageclient.plugins.privatestorageio-zkapauthz-v2`` section. 14 | 15 | redeemer 16 | ~~~~~~~~ 17 | 18 | This item configures the voucher redeemer the client will use to redeem vouchers submitted to it. 19 | The ``dummy`` value is useful for testing purposes only. 20 | 21 | For example:: 22 | 23 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 24 | redeemer = dummy 25 | issuer-public-key = YXNkYXNkYXNkYXNkYXNkCg== 26 | 27 | The value of the ``issuer-public-key`` item is included as-is as the public key in the successful redemption response. 28 | 29 | A ``redeemer`` value of ``ristretto`` causes the client to speak Ristretto-flavored PrivacyPass to an issuer server. 30 | In this case the ``ristretto-issuer-root-url`` item is also required. 31 | The client uses this URL to determine the server to which to send redemption requests. 32 | Additionally, 33 | the client will only interact with storage servers which announce the same issuer URL. 34 | 35 | For example:: 36 | 37 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 38 | redeemer = ristretto 39 | ristretto-issuer-root-url = https://issuer.example.invalid/ 40 | 41 | 42 | The client can also be configured with the value of a single pass:: 43 | 44 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 45 | pass-value = 1048576 46 | 47 | The value given here must agree with the value servers use in their configuration or the storage service will be unusable. 48 | 49 | The client can also be configured with the number of passes to expect in exchange for one voucher:: 50 | 51 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 52 | default-token-count = 32768 53 | 54 | The value given here must agree with the value the issuer uses in its configuration or redemption may fail. 55 | 56 | allowed-public-keys 57 | ~~~~~~~~~~~~~~~~~~~ 58 | 59 | Regardless of which redeemer is selected, 60 | the client must also be configured with the public part of the issuer key pair which it will allow to sign tokens:: 61 | 62 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 63 | allowed-public-keys = AAAA...,BBBB...,CCCC... 64 | 65 | The ``allowed-public-keys`` value is a comma-separated list of encoded public keys. 66 | When tokens are received from an issuer during redemption, 67 | these are the only public keys which will satisfy the redeemer and cause the tokens to be made available to the client to be spent. 68 | Tokens received with any other public key will be sequestered and will *not* be spent until some further action is taken. 69 | 70 | lease.crawl-interval.mean 71 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 72 | 73 | This item controls the frequency at which the lease maintenance crawler runs. 74 | The lease maintenance crawler visits all shares and renews their leases if necessary. 75 | The crawler will run at random intervals. 76 | The client will try to make the average (mean) interval between runs equal to this setting. 77 | The value is an integer number of seconds. 78 | For example to run on average every 26 days:: 79 | 80 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 81 | lease.crawl-interval.mean = 2246400 82 | 83 | 84 | lease.crawl-interval.range 85 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 86 | 87 | This item also controls the frequency of lease maintenance crawler runs. 88 | The random intervals between runs have a uniform distribution with this item's value as its range. 89 | The value is an integer number of seconds. 90 | For example to make all intervals fall within a 7 day period:: 91 | 92 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 93 | lease.crawl-interval.range = 302400 94 | 95 | 96 | lease.min-time-remaining 97 | ~~~~~~~~~~~~~~~~~~~~~~~~ 98 | 99 | This item controls the lease renewal behavior of the lease maintenance crawler. 100 | It specifies an amount of time left on a lease. 101 | If the crawler encounters a lease with less time left than this then it will renew the lease. 102 | The value is an integer number of seconds. 103 | For example to renew leases on all shares which will expire in less than one week:: 104 | 105 | [storageclient.plugins.privatestorageio-zkapauthz-v2] 106 | lease.min-time-remaining = 604800 107 | 108 | Server 109 | ------ 110 | 111 | To enable the plugin at all, add its name to the list of storage plugins in the Tahoe-LAFS configuration 112 | (``tahoe.cfg`` in the relevant node directory):: 113 | 114 | [storage] 115 | plugins = privatestorageio-zkapauthz-v2 116 | 117 | Then also configure the Ristretto-flavored PrivacyPass issuer the server will announce to clients:: 118 | 119 | [storageserver.plugins.privatestorageio-zkapauthz-v2] 120 | ristretto-issuer-root-url = https://issuer.example.invalid/ 121 | 122 | The value of a single pass in the system can be configured here as well:: 123 | 124 | [storageserver.plugins.privatestorageio-zkapauthz-v2] 125 | pass-value = 1048576 126 | 127 | If no ``pass-value`` is given then a default will be used. 128 | The value given here must agree with the value clients use in their configuration or the storage service will be unusable. 129 | 130 | The storage server must also be configured with the path to the Ristretto-flavored PrivacyPass signing key. 131 | To avoid placing secret material in tahoe.cfg, 132 | this configuration is done using a path:: 133 | 134 | [storageserver.plugins.privatestorageio-zkapauthz-v2] 135 | ristretto-signing-key-path = /path/to/signing.key 136 | 137 | The signing key is the keystone secret to the entire system and must be managed with extreme care to prevent unintended disclosure. 138 | If things go well a future version of ZKAPAuthorizer will remove the requirement that the signing key be distributed to storage servers. 139 | -------------------------------------------------------------------------------- /docs/source/designs/costs.rst: -------------------------------------------------------------------------------- 1 | Costs 2 | ===== 3 | 4 | ZKAPAuthorizer defines costs for certain Tahoe-LAFS storage operations. 5 | It overlays its own protocol on the Tahoe-LAFS storage protocol which accepts ZKAPs as payments along with these operations. 6 | The underlying storage operations are only allowed when the supplied payment covers the cost. 7 | 8 | Storage-Time 9 | ```````````` 10 | 11 | Storage servers incur a storage cost over time to provide service to storage clients. 12 | A storage server must hold ciphertext from the time it is uploaded until the last time a client needs to download it. 13 | 14 | The unit of cost ZKAPAuthorizer imposes is storage × time. 15 | The currency used by ZKAPAuthorizer is a (Z)ero (K)nowledge (A)ccess (P)ass -- a ZKAP. 16 | If a ZKAP is worth 1 MB × 1 month (configurable per-grid) then a client must spend 1 ZKAP to store up to 1 MB for up to 1 month. 17 | To store up to 1 MB for up to 2 months a client spends 2 ZKAPs. 18 | To store up to 2 MB for up to 1 month a client spends 2 ZKAPs. 19 | 20 | A ZKAP is the smallest unit of the currency. 21 | When sizes or times do not fall on integer multiples of 1 MB or 1 month the cost is rounded up. 22 | 23 | Leases 24 | ------ 25 | 26 | The period of time a Tahoe-LAFS storage server promises to retain a share is controlled by "leases". 27 | A lease has an expiration time after which it is no longer effective. 28 | A lease is associated with a single share. 29 | As long as at least one lease has not expired a storage server will keep that share. 30 | Clients are required to periodically "renew" leases for shares they wish the server to keep. 31 | 32 | The length of a lease (1 month) provides the "time" component of storage-time. 33 | 34 | Here are some examples: 35 | 36 | * renewing the lease on a 100 KB share costs 1 ZKAP 37 | * renewing the lease on a 1 MB share costs 1 ZKAP 38 | * renewing the lease on a 1.5 MB share costs 2 ZKAPs 39 | * renewing the lease on a 10 MB share costs 10 ZKAPs 40 | 41 | Renewing a lease sets the expiration time to be 1 month after the time of the operation. 42 | 43 | Shares 44 | ------ 45 | 46 | Tahoe-LAFS storage servers accept "shares" for storage. 47 | Immutable data is represented as shares in "buckets". 48 | Mutable data is represented as shares in "slots". 49 | All shares in the same bucket (or slot) relate to the same "file". 50 | 51 | The size of a share provides the "storage" component of storage-time. 52 | 53 | Immutable Data 54 | ~~~~~~~~~~~~~~ 55 | 56 | The original Tahoe-LAFS storage protocol automatically adds a lease to all immutable shares it receives at the time the upload completes. 57 | It also automatically renews leases on all shares in the same bucket as the newly uploaded share. 58 | 59 | When ZKAPAuthorizer is used newly uploaded immutable shares still have a lease added to them. 60 | The behavior of renewing leases on all other shares in the same bucket is disabled. 61 | 62 | The cost of uploading an immutable share is the size of the share times the duration of a lease. 63 | Here are some examples: 64 | 65 | * a 100 KB share costs 1 ZKAP to upload 66 | * a 1 MB share costs 1 ZKAP to upload 67 | * a 1.5 MB share costs 2 ZKAPs to upload 68 | * a 10 MB share costs 10 ZKAPs to upload 69 | 70 | Mutable Data 71 | ~~~~~~~~~~~~ 72 | 73 | The original Tahoe-LAFS storage protocol automatically renews leases on mutable shares when they are first created and whenever they are changed. 74 | 75 | When ZKAPAuthorizer is used newly uploaded mutable shares still have a lease added to them. 76 | The behavior of renewing leases on all changed shares is disabled. 77 | 78 | The cost of creating a mutable share is the size of the share times the duration of a lease. 79 | This is exactly the same method as is used to compute the cost of uploading an immutable share. 80 | 81 | The cost of modifying a mutable share is based on the change in size that results: 82 | the cost of the share before the change is subtracted from the cost of the share after the change. 83 | If the cost is negative it is considered to be zero. 84 | 85 | Here are some examples: 86 | 87 | * creating a 100 KB share costs 1 ZKAP 88 | * extending a 100 KB share to 200 KB is free 89 | * extending a 1 MB share to 1.5 MB costs 1 ZKAP 90 | * extending a 1.5 MB share to 2 MB is free 91 | * extending a 2 MB share to 10 MB costs 8 ZKAPs 92 | * truncating a 10 MB share to 2 MB is free 93 | * rewriting the contents of a 5 MB share without changing its length is free 94 | 95 | Note that leases are *not* renewed when a mutable share is modified. 96 | When the modification has a positive cost this results in the client being overcharged. 97 | The amount of the overcharge is a function of three variables: 98 | 99 | * The **lease period** currently fixed at 31 days. 100 | * The **remaining lease time** which is the difference between the time when the current lease expires and the time of the operation. 101 | * The **price increase** which is the number of ZKAPs the modification costs. 102 | 103 | The amount of the overcharge is **lease period remaining** / **lease period** × **price increase**. 104 | See for efforts to remedy this. 105 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. ZKAP Authorizer documentation master file, created by 2 | sphinx-quickstart on Fri Jun 7 13:47:30 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to ZKAP Authorizer's documentation! 7 | =========================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 1 11 | :caption: Contents: 12 | 13 | code_of_conduct 14 | CONTRIBUTING 15 | interface 16 | configuration 17 | leases 18 | 19 | .. toctree:: 20 | :maxdepth: 1 21 | :caption: Designs: 22 | 23 | designs/costs 24 | designs/backup-recovery 25 | 26 | Indices and tables 27 | ================== 28 | 29 | * :ref:`genindex` 30 | * :ref:`modindex` 31 | * :ref:`search` 32 | -------------------------------------------------------------------------------- /docs/source/interface.rst: -------------------------------------------------------------------------------- 1 | Interface 2 | ========= 3 | 4 | Client 5 | ------ 6 | 7 | When enabled in a Tahoe-LAFS client node, 8 | ZKAPAuthorizer publishes an HTTP-based interface inside the main Tahoe-LAFS web interface. 9 | 10 | All endpoints in the interface require an authorization token. 11 | Without the token, 12 | requests will receive an HTTP UNAUTHORIZED (401) response. 13 | 14 | To be authorized to access the resources at the endpoints, 15 | requests must include the correct secret token in the value for **Authorization** in the request header. 16 | For example, if the secret token is ``ABCDEF``:: 17 | 18 | Authorization: tahoe-lafs ABCDEF 19 | 20 | The correct value for the token can be read from the Tahoe-LAFS node's ``private/api_auth_token`` file. 21 | 22 | ``GET /storage-plugins/privatestorageio-zkapauthz-v2/version`` 23 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 24 | 25 | This endpoint returns the version of the ZKAPAuthorizer Python package in use by the Tahoe-LAFS client node. 26 | 27 | The response is **OK** with an ``application/json`` **Content-Type**:: 28 | 29 | { "version": 30 | } 31 | 32 | ``PUT /storage-plugins/privatestorageio-zkapauthz-v2/voucher`` 33 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 34 | 35 | This endpoint allows an external agent which has submitted a payment to cause the plugin to redeem the voucher for tokens. 36 | The request body for this endpoint must have the ``application/json`` content-type. 37 | The request body contains a simple json object containing the voucher:: 38 | 39 | {"voucher": ""} 40 | 41 | The endpoint responds to such a request with an **OK** HTTP response code if the voucher is accepted for processing. 42 | If the voucher cannot be accepted at the time of the request then the response code will be anything other than **OK**. 43 | 44 | If the response is **OK** then a repeated request with the same body will have no effect. 45 | If the response is not **OK** then a repeated request with the same body will try to accept the number again. 46 | 47 | ``GET /storage-plugins/privatestorageio-zkapauthz-v2/voucher/`` 48 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 49 | 50 | This endpoint allows an external agent to monitor the status of the redemption of a voucher. 51 | This endpoint accepts no request body. 52 | 53 | If the voucher is not known then the response is **NOT FOUND**. 54 | For any voucher which has previously been submitted, 55 | the response is **OK** with an ``application/json`` content-type response body like:: 56 | 57 | { "version": 1 58 | , "number": 59 | , "expected-tokens": 60 | , "created": 61 | , "state": 62 | } 63 | 64 | The ``version`` property indicates the semantic version of the data being returned. 65 | When properties are removed or the meaning of a property is changed, 66 | the value of the ``version`` property will be incremented. 67 | The addition of new properties is **not** accompanied by a bumped version number. 68 | 69 | The ``number`` property merely indicates the voucher which was requested. 70 | The ``expected-tokens`` property indicates the total number of ZKAPs for which the client intends to redeem the voucher. 71 | Vouchers created using old versions of ZKAPAuthorizer will have a best-guess value here because the real value was not recorded. 72 | The ``created`` property indicates when the voucher was first added to the node. 73 | The ``state`` property is an object that gives more details about the current state of the voucher. 74 | The following values are possible:: 75 | 76 | { "name": "pending" 77 | , "counter": 78 | } 79 | 80 | The integer *counter* value indicates how many successful sub-redemptions have completed for this voucher. 81 | 82 | :: 83 | 84 | { "name": "redeeming" 85 | , "started": 86 | , "counter": 87 | } 88 | 89 | The *started* timestamp gives the time when the most recent redemption attempt began. 90 | The integer *counter* value has the same meaning as it does for the *pending* state. 91 | 92 | :: 93 | 94 | { "name": "redeemed" 95 | , "finished": 96 | , "token-count": 97 | } 98 | 99 | The *finished* timestamp gives the time when redemption completed successfully. 100 | The integer *token-count* gives the number tokens for which the voucher was redeemed. 101 | 102 | :: 103 | 104 | { "name": "double-spend" 105 | , "finished": 106 | } 107 | 108 | The *finished* timestamp gives the time when the double-spend error was encountered. 109 | 110 | :: 111 | 112 | { "name": "unpaid" 113 | , "finished": 114 | } 115 | 116 | The *finished* timestamp gives the time when the unpaid error was encountered. 117 | 118 | :: 119 | 120 | { "name": "error" 121 | "finished": 122 | , "details": 123 | } 124 | 125 | The *finished* timestamp gives the time when this other error condition was encountered. 126 | The *details* string may give additional details about what the error was. 127 | 128 | ``GET /storage-plugins/privatestorageio-zkapauthz-v2/voucher`` 129 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 130 | 131 | This endpoint allows an external agent to retrieve the status of all vouchers. 132 | This endpoint accepts no request body. 133 | 134 | The response is **OK** with ``application/json`` content-type response body like:: 135 | 136 | {"vouchers": [, ...]} 137 | 138 | The elements of the list are objects like the one returned by issuing a **GET** to a child of this collection resource. 139 | 140 | ``GET /storage-plugins/privatestorageio-zkapauthz-v2/lease-maintenance`` 141 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 142 | 143 | This endpoint allows an external agent to retrieve information about automatic spending for lease maintenance. 144 | 145 | This endpoint accepts no request body. 146 | 147 | The response is **OK** with ``application/json`` content-type response body like:: 148 | 149 | { "spendable": 150 | , "lease-maintenance-spending": 151 | } 152 | 153 | The value associated with ``spendable`` gives the number of unblinded tokens in the node's database which can actually be spent. 154 | 155 | The ```` may be ``null`` if the lease maintenance process has never run. 156 | If it has run, 157 | ```` has two properties: 158 | 159 | * ``when``: associated with an ISO8601 datetime string giving the approximate time the process ran 160 | * ``count``: associated with a number giving the number of passes which would need to be spent to renew leases on all stored objects seen during the lease maintenance activity 161 | 162 | ``POST /storage-plugins/privatestorageio-zkapauthz-v2/calculate-price`` 163 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 164 | 165 | This endpoint allows an agent to calculate the number of ZKAPs it will cost to store a collection of files of specified sizes. 166 | This is intended as the basis for tools which aid in user understanding of the cost of their actions. 167 | 168 | The request body must be ``application/json`` encoded and contain an object like:: 169 | 170 | { "version": 1 171 | , "sizes: [ , ... ] 172 | } 173 | 174 | The ``version`` property must currently be **1**. 175 | The ``sizes`` property is a list of integers giving file sizes in bytes. 176 | 177 | The response is **OK** with ``application/json`` content-type response body like:: 178 | 179 | { "price": , "period": } 180 | 181 | The ``price`` property gives the number of ZKAPs which would have to be spent to store files of the given sizes. 182 | The ``period`` property gives the number of seconds those files would be stored by spending that number of ZKAPs. 183 | 184 | The price obtained this way is valid in two scenarios. 185 | First, 186 | the case where none of the files have been uploaded yet. 187 | In this case uploading the files and storing them for **period** seconds will cost **price** ZKAPs. 188 | Second, 189 | the case where the files have already been uploaded but their leases need to be renewed. 190 | In this case, renewing the leases so they last until **period** seconds after the current time will cost **price** ZKAPs. 191 | Note that in this case any lease time currently remaining on any files has no bearing on the calculated price. 192 | -------------------------------------------------------------------------------- /docs/source/leases.rst: -------------------------------------------------------------------------------- 1 | Leases 2 | ====== 3 | 4 | Leases held on shares are treated as a guarantee that a storage server will hold those shares for the duration of the lease. 5 | Leases have an expiration date which can be changed with a renewal operation to a date at a fixed distance in the future of the renewal. 6 | Lease renewal requires the expenditure of ZKAPs in proportion to the size of the shares and the distance to the new expiration date. 7 | Because lease the expiration date is advanced from the time of the renewal and not the time of the original expiration, 8 | care is taken to only renew leases for which the expiration time will soon arrive. 9 | 10 | Design 11 | ------ 12 | 13 | The process of checking lease age and renewing them is automated in the client storage plugin. 14 | The storage plugin interface is not ideally shaped to support this functionality. 15 | The following designs have been considered. 16 | 17 | Option A 18 | ~~~~~~~~ 19 | 20 | Each ZKAPAuthorizerStorageClient is a service which is a child of the client node. 21 | Each creates its own service child using lease_maintenance_service(). 22 | This results in linear factor of redundant lease maintenance work (equal to number of storage servers). 23 | Requires change to Tahoe-LAFS to add clients as service children. 24 | 25 | Option B 26 | ~~~~~~~~ 27 | 28 | Each ZKAPAuthorizerStorageClient is a service which is a child of the client node. 29 | Each creates its own service child using lease_maintenance_service(). 30 | Lease maintenance function is augmented with a check against all other lease maintenance services. 31 | Only the arbitrary-sort-key-smallest service ever actually runs. 32 | This results in small-k linear factor overhead (on number of storage servers) to choose a winner but no lease maintenance overhead. 33 | Requires change to Tahoe-LAFS to add clients as service children. 34 | 35 | Option C 36 | ~~~~~~~~ 37 | 38 | The plugin interface has a method to create a service which is a child of the client node. 39 | The service is the lease maintenance service as created by lease_maintenance_service(). 40 | There is only one so there is no winner-selection overhead or redundant lease maintenance work. 41 | Requires change to Tahoe-LAFS to call new method to get service and add result as service child. 42 | 43 | Option D 44 | ~~~~~~~~ 45 | 46 | The plugin creates and starts a single lease maintenance service itself. 47 | The plugin reaches deep into the guts of something to find a client node so it can initialize the lease maintenance service 48 | (an expression liked ``get_rref.im_self._on_status_changed.watchers[0].__closure__[0].cell_contents`` was considered to reach the ``StorageFarmBroker`` which is a child of ``_Client``). 49 | The plugin glues it into the reactor itself for shutdown notification. 50 | There is only one service so no winner-selection or redundant lease maintenance work is required. 51 | This can be improved to Option C at some point. 52 | 53 | On closer inspection, even the complex expression above is not sufficient to reach the correct object. 54 | Even if a similar expression is found which works, 55 | this option is likely more complex and fragile than *Option E*. 56 | 57 | Option E 58 | ~~~~~~~~ 59 | The plugin creates and starts a single lease maintenance service itself. 60 | The plugin monkey-patches ``allmydata.client._Client`` to perform initialization of the service at an appropriate time. 61 | There is only one service so no winner-selection or redundant lease maintenance work is required. 62 | This can be improved to Option C at some point. 63 | 64 | Implementation 65 | -------------- 66 | 67 | *Option E* is currently implemented. 68 | Monkey-patching is performed at import time by ``_zkapauthorizer._plugin``. 69 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "challenge-bypass-ristretto": { 4 | "inputs": { 5 | "fenix": "fenix", 6 | "libchallenge_bypass_ristretto_ffi-src": "libchallenge_bypass_ristretto_ffi-src", 7 | "naersk": "naersk", 8 | "nixpkgs": [ 9 | "nixpkgs" 10 | ] 11 | }, 12 | "locked": { 13 | "lastModified": 1730816636, 14 | "narHash": "sha256-N9wCJx3mjJJdejdJrB/KJcglQB+IgCUlORsGCuxe5H4=", 15 | "owner": "LeastAuthority", 16 | "repo": "python-challenge-bypass-ristretto", 17 | "rev": "78d0652021c4e10f77d9e85287f4a914a9461ccf", 18 | "type": "github" 19 | }, 20 | "original": { 21 | "owner": "LeastAuthority", 22 | "repo": "python-challenge-bypass-ristretto", 23 | "type": "github" 24 | } 25 | }, 26 | "fenix": { 27 | "inputs": { 28 | "nixpkgs": [ 29 | "challenge-bypass-ristretto", 30 | "nixpkgs" 31 | ], 32 | "rust-analyzer-src": "rust-analyzer-src" 33 | }, 34 | "locked": { 35 | "lastModified": 1730788380, 36 | "narHash": "sha256-jzniRMqEjqYC7yyx0nIiiEnQ4xlZQMnA7R6mvyYZER8=", 37 | "owner": "nix-community", 38 | "repo": "fenix", 39 | "rev": "280efe0e9b7b824518091a5aff76065785f81649", 40 | "type": "github" 41 | }, 42 | "original": { 43 | "owner": "nix-community", 44 | "repo": "fenix", 45 | "type": "github" 46 | } 47 | }, 48 | "flake-compat": { 49 | "flake": false, 50 | "locked": { 51 | "lastModified": 1673956053, 52 | "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", 53 | "owner": "edolstra", 54 | "repo": "flake-compat", 55 | "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", 56 | "type": "github" 57 | }, 58 | "original": { 59 | "owner": "edolstra", 60 | "repo": "flake-compat", 61 | "type": "github" 62 | } 63 | }, 64 | "flake-utils": { 65 | "locked": { 66 | "lastModified": 1659877975, 67 | "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", 68 | "owner": "numtide", 69 | "repo": "flake-utils", 70 | "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", 71 | "type": "github" 72 | }, 73 | "original": { 74 | "owner": "numtide", 75 | "repo": "flake-utils", 76 | "type": "github" 77 | } 78 | }, 79 | "libchallenge_bypass_ristretto_ffi-src": { 80 | "flake": false, 81 | "locked": { 82 | "lastModified": 1709936990, 83 | "narHash": "sha256-69xR3hsM3o7KDQ8scyMg/pgGlKgTYa5WnDV8wvBNsTE=", 84 | "owner": "brave-intl", 85 | "repo": "challenge-bypass-ristretto-ffi", 86 | "rev": "3e22c067e9e4135e5b8bbec7a611ae0e2545648b", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "brave-intl", 91 | "repo": "challenge-bypass-ristretto-ffi", 92 | "type": "github" 93 | } 94 | }, 95 | "naersk": { 96 | "inputs": { 97 | "nixpkgs": [ 98 | "challenge-bypass-ristretto", 99 | "nixpkgs" 100 | ] 101 | }, 102 | "locked": { 103 | "lastModified": 1721727458, 104 | "narHash": "sha256-r/xppY958gmZ4oTfLiHN0ZGuQ+RSTijDblVgVLFi1mw=", 105 | "owner": "nix-community", 106 | "repo": "naersk", 107 | "rev": "3fb418eaf352498f6b6c30592e3beb63df42ef11", 108 | "type": "github" 109 | }, 110 | "original": { 111 | "owner": "nix-community", 112 | "repo": "naersk", 113 | "type": "github" 114 | } 115 | }, 116 | "nixpkgs": { 117 | "locked": { 118 | "lastModified": 1730327045, 119 | "narHash": "sha256-xKel5kd1AbExymxoIfQ7pgcX6hjw9jCgbiBjiUfSVJ8=", 120 | "owner": "NixOS", 121 | "repo": "nixpkgs", 122 | "rev": "080166c15633801df010977d9d7474b4a6c549d7", 123 | "type": "github" 124 | }, 125 | "original": { 126 | "owner": "NixOS", 127 | "ref": "nixos-24.05", 128 | "repo": "nixpkgs", 129 | "type": "github" 130 | } 131 | }, 132 | "root": { 133 | "inputs": { 134 | "challenge-bypass-ristretto": "challenge-bypass-ristretto", 135 | "flake-compat": "flake-compat", 136 | "flake-utils": "flake-utils", 137 | "nixpkgs": "nixpkgs", 138 | "tahoe-lafs-dev": "tahoe-lafs-dev" 139 | } 140 | }, 141 | "rust-analyzer-src": { 142 | "flake": false, 143 | "locked": { 144 | "lastModified": 1730749868, 145 | "narHash": "sha256-b8UMpLh61z6T4J1NObustAuFGoJiAKBTaPTrEf3Amts=", 146 | "owner": "rust-lang", 147 | "repo": "rust-analyzer", 148 | "rev": "b51f9bc736dc0472481a47d7c05de2901323e543", 149 | "type": "github" 150 | }, 151 | "original": { 152 | "owner": "rust-lang", 153 | "ref": "nightly", 154 | "repo": "rust-analyzer", 155 | "type": "github" 156 | } 157 | }, 158 | "tahoe-lafs-dev": { 159 | "flake": false, 160 | "locked": { 161 | "lastModified": 1729722347, 162 | "narHash": "sha256-ZEuRf9Its7mphz3cfFU4VsUKTjvDEEkQImS8I6WkSJ8=", 163 | "owner": "tahoe-lafs", 164 | "repo": "tahoe-lafs", 165 | "rev": "17b22368573a4d84f0df9d05205b24aa7119bc4f", 166 | "type": "github" 167 | }, 168 | "original": { 169 | "owner": "tahoe-lafs", 170 | "repo": "tahoe-lafs", 171 | "type": "github" 172 | } 173 | } 174 | }, 175 | "root": "root", 176 | "version": 7 177 | } 178 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "A Tahoe-LAFS storage-system plugin which authorizes storage operations based on privacy-respecting tokens."; 3 | inputs = { 4 | nixpkgs.url = "github:NixOS/nixpkgs?ref=nixos-24.05"; 5 | flake-utils.url = "github:numtide/flake-utils"; 6 | challenge-bypass-ristretto.url = github:LeastAuthority/python-challenge-bypass-ristretto; 7 | challenge-bypass-ristretto.inputs.nixpkgs.follows = "nixpkgs"; 8 | flake-compat = { 9 | url = "github:edolstra/flake-compat"; 10 | flake = false; 11 | }; 12 | 13 | # Sometimes it is nice to be able to test against weird versions of some 14 | # of our dependencies, like arbitrary git revisions or source in local 15 | # paths. If we make those dependencies inputs we can override them easily 16 | # from the command line. 17 | tahoe-lafs-dev = { 18 | # More recent versions of Tahoe-LAFS probably provide a flake but we 19 | # also want to consume older versions which don't, so just treat them 20 | # all as non-flakes. 21 | flake = false; 22 | url = "github:tahoe-lafs/tahoe-lafs"; 23 | }; 24 | }; 25 | 26 | outputs = { self, nixpkgs, flake-utils, tahoe-lafs-dev, challenge-bypass-ristretto, ... }: 27 | flake-utils.lib.eachSystem [ "x86_64-linux" ] (system: let 28 | 29 | pkgs = nixpkgs.legacyPackages.${system}; 30 | lib = pkgs.lib; 31 | 32 | # The names of the nixpkgs Python derivations for which we will expose 33 | # packages. 34 | pyVersions = [ "python310" "python39" ]; 35 | 36 | # All of the versions our Tahoe-LAFS dependency for which we will expose 37 | # packages. 38 | tahoeVersions = pkgs.python3Packages.callPackage ./nix/tahoe-versions.nix { 39 | inherit tahoe-lafs-dev; 40 | }; 41 | 42 | # The matrix of package configurations. 43 | packageCoordinates = lib.attrsets.cartesianProductOfSets { 44 | pyVersion = pyVersions; 45 | tahoe-lafs = tahoeVersions; 46 | challenge-bypass-ristretto = [ (pyVersion: challenge-bypass-ristretto.packages.${system}."${pyVersion}-challenge-bypass-ristretto") ]; 47 | }; 48 | 49 | # To avoid being completely overwhelming, for some inputs we only 50 | # support a single configuration. Pick that configuration here. 51 | defaultConfig = builtins.head packageCoordinates; 52 | 53 | # A formatter to construct the appropriate package name for a certain 54 | # configuration. 55 | packageName = { pyVersion, tahoe-lafs, challenge-bypass-ristretto }: 56 | # We only support one version of challenge so we don't bother burning 57 | # its version into the name. 58 | "zkapauthorizer-${pyVersion}-tahoe_${tahoe-lafs.version}"; 59 | 60 | # Construct a matrix of package-building derivations. 61 | # 62 | # data Version = Version { version :: string, buildArgs :: attrset } 63 | # data Coordinate = Coordinate { pyVersion :: string, tahoe-lafs :: Version } 64 | # 65 | # [ Coordinate ] -> { name = derivation; } 66 | packageMatrix = derivationMatrix packageName packageForVersion; 67 | 68 | # The Hypothesis profiles of the test packages which we will expose. 69 | hypothesisProfiles = [ "fast" "ci" "big" "default" ]; 70 | 71 | # The coverage collection options for the test packages which we will expose. 72 | coverageOptions = [ false true ]; 73 | 74 | # The matrix of test configurations. 75 | testCoordinates = lib.attrsets.cartesianProductOfSets { 76 | pyVersion = pyVersions; 77 | tahoe-lafs = tahoeVersions; 78 | hypothesisProfile = hypothesisProfiles; 79 | collectCoverage = coverageOptions; 80 | challenge-bypass-ristretto = [ (pyVersion: challenge-bypass-ristretto.packages.${system}."${pyVersion}-challenge-bypass-ristretto") ]; 81 | }; 82 | 83 | # A formatter to construct the appropriate derivation name for a test 84 | # configuration. 85 | testName = { pyVersion, tahoe-lafs, hypothesisProfile, collectCoverage, challenge-bypass-ristretto }: 86 | builtins.concatStringsSep "-" [ 87 | "tests" 88 | "${pyVersion}" 89 | "tahoe_${tahoe-lafs.version}" 90 | (if hypothesisProfile == null then "default" else hypothesisProfile) 91 | (if collectCoverage then "cov" else "nocov") 92 | ]; 93 | 94 | # Construct a matrix of test-running derivations. 95 | # 96 | # data Coordinate = Coordinate 97 | # { pyVersion :: string 98 | # , tahoe-lafs :: Version 99 | # , hypothesisProfile :: string 100 | # , collectCoverage :: bool 101 | # } 102 | # 103 | # [ Coordinate ] -> { name = derivation; } 104 | testMatrix = derivationMatrix testName testsForVersion; 105 | 106 | defaultPackageName = packageName defaultConfig; 107 | 108 | inherit (import ./nix/lib.nix { 109 | inherit pkgs lib; 110 | src = ./.; 111 | }) packageForVersion testsForVersion derivationMatrix toWheel; 112 | 113 | in rec { 114 | devShells = { 115 | default = pkgs.mkShell { 116 | # Avoid leaving .pyc all over the source tree when manually 117 | # triggering tests runs. 118 | PYTHONDONTWRITEBYTECODE = "1"; 119 | 120 | # Make the source for two significant C-language dependencies easily 121 | # available. Unfortunately, these are the source archives. Unpack 122 | # them and use `directory ...` in gdb to help it find them. 123 | # 124 | # TODO: Automatically unpack them and provide them as source 125 | # directories instead. 126 | SQLITE_SRC = "${pkgs.sqlite.src}"; 127 | PYTHON_SRC = "${pkgs.${defaultConfig.pyVersion}.src}"; 128 | 129 | # Make pudb the default. We make sure it is installed below. 130 | PYTHONBREAKPOINT = "pudb.set_trace"; 131 | 132 | buildInputs = [ 133 | # Put a Python environment that has all of the development, test, 134 | # and runtime dependencies in it - but not the package itself. 135 | (pkgs.${defaultConfig.pyVersion}.withPackages ( 136 | ps: with ps; 137 | [ pudb ] 138 | ++ self.packages.${system}.default.passthru.lintInputs 139 | ++ self.packages.${system}.default.passthru.checkInputs 140 | ++ self.packages.${system}.default.propagatedBuildInputs 141 | )) 142 | 143 | # Give us gdb in case we need to debug CPython or an extension. 144 | pkgs.gdb 145 | 146 | # Since we use CircleCI it is handy to have the CircleCI CLI tool 147 | # available - for example, for validating config changes. 148 | pkgs.circleci-cli 149 | ]; 150 | 151 | # Add the working copy's package source to the Python environment so 152 | # we get a convenient way to test against local changes. Observe 153 | # that the use of $PWD means this only works if you run `nix 154 | # develop` from the top of a source checkout. 155 | shellHook = 156 | '' 157 | export PYTHONPATH=$PWD/src 158 | ''; 159 | }; 160 | }; 161 | 162 | packages = 163 | testMatrix testCoordinates // 164 | packageMatrix packageCoordinates // 165 | { default = self.packages.${system}.${defaultPackageName}; 166 | wheel = toWheel self.packages.${system}.default; 167 | }; 168 | 169 | apps = let 170 | tahoe-env = 171 | let pkg = self.packages.${system}.default; 172 | in pkg.passthru.python.withPackages (ps: [ pkg ]); 173 | 174 | checks-env = 175 | let pkg = self.packages.${system}.default; 176 | in pkg.passthru.python.withPackages (ps: 177 | # Put some dependencies useful for different kinds of static 178 | # checks into the environment. We ignore `ps` here and take 179 | # packages from `pkg` instead. We got `python` from `pkg` too so 180 | # we know these packages are compatible with the package set we're 181 | # constructing. 182 | 183 | # Start with the various linting tools, including mypy. 184 | pkg.passthru.lintInputs 185 | 186 | # mypy requires all of the runtime dependencies in the environment 187 | # as well 188 | ++ pkg.propagatedBuildInputs 189 | 190 | # and the test-time dependencies if you want the test suite to 191 | # type check, too. 192 | ++ pkg.passthru.checkInputs 193 | ); 194 | twine-env = pkgs.python3.withPackages (ps: [ ps.twine ]); 195 | in { 196 | default = { type = "app"; program = "${tahoe-env}/bin/tahoe"; }; 197 | twine = { type = "app"; program = "${twine-env}/bin/twine"; }; 198 | black = { type = "app"; program = "${checks-env}/bin/black"; }; 199 | isort = { type = "app"; program = "${checks-env}/bin/isort"; }; 200 | flake8 = { type = "app"; program = "${checks-env}/bin/flake8"; }; 201 | mypy = { type = "app"; program = "${checks-env}/bin/mypy"; }; 202 | }; 203 | }); 204 | } 205 | -------------------------------------------------------------------------------- /nix/compose.nix: -------------------------------------------------------------------------------- 1 | { lib, buildPythonPackage, fetchPypi }: 2 | 3 | buildPythonPackage rec { 4 | pname = "compose"; 5 | version = "1.4.8"; 6 | 7 | src = fetchPypi { 8 | inherit pname version; 9 | sha256 = "sha256-mpRabfC4LE6xYlHmQbHb1yXxLDtH5idwN4GbUnCPGTo="; 10 | }; 11 | 12 | meta = with lib; { 13 | homepage = "https://github.com/mentalisttraceur/python-compose"; 14 | description = "The classic compose, with all the Pythonic features."; 15 | license = licenses.bsd0; 16 | }; 17 | } 18 | -------------------------------------------------------------------------------- /nix/flake8-black.nix: -------------------------------------------------------------------------------- 1 | { fetchPypi 2 | , buildPythonPackage 3 | , flake8 4 | , black 5 | , tomli 6 | , setuptools 7 | }: 8 | buildPythonPackage rec { 9 | pname = "flake8-black"; 10 | version = "0.3.6"; 11 | 12 | src = fetchPypi { 13 | inherit pname version; 14 | hash = "sha256-DfvKMnR3d5KlvLKviHpMrXLHLQ6GyU4I46PeFRu0HDQ="; 15 | }; 16 | 17 | format = "pyproject"; 18 | # doCheck = false; 19 | buildInputs = [ setuptools ]; 20 | propagatedBuildInputs = [ flake8 black tomli ]; 21 | pythonImportsCheck = [ "flake8_black" ]; 22 | } 23 | -------------------------------------------------------------------------------- /nix/flake8-isort.nix: -------------------------------------------------------------------------------- 1 | { fetchPypi 2 | , buildPythonPackage 3 | , flake8 4 | , isort 5 | }: 6 | buildPythonPackage rec { 7 | pname = "flake8-isort"; 8 | version = "6.0.0"; 9 | 10 | src = fetchPypi { 11 | inherit pname version; 12 | hash = "sha256-U39FOmYNfpA/YC7Po2E2sUDeJ531jQLrG2oMhOg8Uow="; 13 | }; 14 | 15 | doCheck = false; 16 | propagatedBuildInputs = [ flake8 isort ]; 17 | pythonImportsCheck = [ "flake8_isort" ]; 18 | } 19 | -------------------------------------------------------------------------------- /nix/hypothesis.nix: -------------------------------------------------------------------------------- 1 | { hypothesis, fetchFromGitHub }: 2 | hypothesis.overrideAttrs (old: rec { 3 | version = "6.74.1"; 4 | name = "hypothesis-${version}"; 5 | src = fetchFromGitHub { 6 | owner = "HypothesisWorks"; 7 | repo = "hypothesis"; 8 | rev = "hypothesis-python-${version}"; 9 | hash = "sha256-bzbC9TmqqvrgTkJ3aZjp3Dd9MgeGxOkj1bz03Ng2sCo="; 10 | }; 11 | }) 12 | -------------------------------------------------------------------------------- /nix/klein.nix: -------------------------------------------------------------------------------- 1 | { klein, fetchPypi }: 2 | klein.overrideAttrs (old: rec { 3 | pname = "klein"; 4 | version = "23.5.0"; 5 | src = fetchPypi { 6 | inherit pname version; 7 | sha256 = "sha256-kGkSt6tBDZp/NRICg5w81zoqwHe9AHHIYcMfDu92Aoc="; 8 | }; 9 | }) 10 | -------------------------------------------------------------------------------- /nix/mypy-zope.nix: -------------------------------------------------------------------------------- 1 | { fetchPypi 2 | , buildPythonPackage 3 | , pythonPackages 4 | , zope_interface 5 | , zope_schema 6 | }: 7 | buildPythonPackage rec { 8 | pname = "mypy-zope"; 9 | version = "0.3.11"; 10 | 11 | src = fetchPypi { 12 | inherit pname version; 13 | hash = "sha256-1CVfnwTUjHkIO71OL+oGUTpqx7jeBvjEzlY/2FFCygU="; 14 | }; 15 | 16 | # doCheck = false; 17 | propagatedBuildInputs = [ pythonPackages.mypy zope_interface zope_schema ]; 18 | pythonImportsCheck = [ "mypy_zope" ]; 19 | } 20 | -------------------------------------------------------------------------------- /nix/pycddl.nix: -------------------------------------------------------------------------------- 1 | { lib, fetchPypi, buildPythonPackage, rustPlatform }: 2 | buildPythonPackage rec { 3 | pname = "pycddl"; 4 | version = "0.4.0"; 5 | format = "pyproject"; 6 | 7 | src = fetchPypi { 8 | inherit pname version; 9 | sha256 = "sha256-w0CGbPeiXyS74HqZXyiXhvaAMUaIj5onwjl9gWKAjqY="; 10 | }; 11 | 12 | nativeBuildInputs = with rustPlatform; [ 13 | maturinBuildHook 14 | cargoSetupHook 15 | ]; 16 | 17 | cargoDeps = rustPlatform.fetchCargoTarball { 18 | inherit src; 19 | name = "${pname}-${version}"; 20 | hash = "sha256-g96eeaqN9taPED4u+UKUcoitf5aTGFrW2/TOHoHEVHs="; 21 | }; 22 | } 23 | -------------------------------------------------------------------------------- /nix/pyopenssl.nix: -------------------------------------------------------------------------------- 1 | { pyopenssl, fetchPypi, isPyPy }: 2 | pyopenssl.overrideAttrs (old: rec { 3 | pname = "pyOpenSSL"; 4 | version = "23.2.0"; 5 | name = "${pname}-${version}"; 6 | src = fetchPypi { 7 | inherit pname version; 8 | sha256 = "J2+TH1WkUufeppxxc+mE6ypEB85BPJGKo0tV+C+bi6w="; 9 | }; 10 | }) 11 | -------------------------------------------------------------------------------- /nix/sh.nix: -------------------------------------------------------------------------------- 1 | { lib }: 2 | rec { 3 | # render a list of environment variable values as a single string in shell 4 | # syntax for setting environment variables to values. 5 | # 6 | # [{string, string}] -> string 7 | envToShell = env: builtins.concatStringsSep " " (lib.attrsets.mapAttrsToList (k: v: "${k}=${v}") env); 8 | 9 | # render a list of argument strings to a single string in shell syntax for 10 | # passing the strings as arguments to a program. 11 | argvToShell = builtins.concatStringsSep " "; 12 | 13 | # render a Python argument list, a trial environment, and a trial argument 14 | # list as a single string in shell-syntax for running trial with that 15 | # environment and those arguments. 16 | # 17 | # [{string: string}] -> [string] -> [string] -> string 18 | trial = pythonEnv: envVars: pythonArgs: trialArgs: '' 19 | ${envToShell envVars} ${pythonEnv}/bin/python -m ${argvToShell pythonArgs} twisted.trial ${argvToShell trialArgs} 20 | ''; 21 | } 22 | -------------------------------------------------------------------------------- /nix/tahoe-capabilities.nix: -------------------------------------------------------------------------------- 1 | { lib, buildPythonPackage, fetchPypi, attrs }: 2 | 3 | buildPythonPackage rec { 4 | pname = "tahoe-capabilities"; 5 | version = "2023.1.5"; 6 | 7 | src = fetchPypi { 8 | inherit pname version; 9 | sha256 = "sha256-PdHCrznvsiOmdySrJOXB9GcDXfxqJPOUG0rL/8S/3D8="; 10 | }; 11 | 12 | propagatedBuildInputs = [ attrs ]; 13 | 14 | meta = with lib; { 15 | homepage = "https://github.com/tahoe-lafs/tahoe-capabilities"; 16 | description = "Simple, re-usable types for interacting with Tahoe-LAFS capabilities"; 17 | license = licenses.gpl2; 18 | }; 19 | } 20 | -------------------------------------------------------------------------------- /nix/tahoe-lafs.nix: -------------------------------------------------------------------------------- 1 | { lib, pythonPackages, buildPythonPackage, tahoe-lafs-version, tahoe-lafs-src, postPatch }: 2 | buildPythonPackage { 3 | pname = "tahoe-lafs"; 4 | version = tahoe-lafs-version; 5 | src = tahoe-lafs-src; 6 | 7 | postPatch = 8 | (if postPatch == null then "" else postPatch) + 9 | # This < is really trying to be a !=. We provide a new-enough Autobahn 10 | # that it actually works, so remove the constraint from the Python metadata. 11 | '' 12 | sed -i -e "s/autobahn < 22.4.1/autobahn/" setup.py 13 | ''; 14 | 15 | dontUseSetuptoolsCheck = true; 16 | propagatedBuildInputs = with pythonPackages; [ 17 | zfec 18 | zope_interface 19 | foolscap 20 | cryptography 21 | twisted 22 | pyyaml 23 | six 24 | magic-wormhole 25 | eliot 26 | pyrsistent 27 | attrs 28 | autobahn 29 | future 30 | netifaces 31 | pyutil 32 | collections-extended 33 | klein 34 | werkzeug 35 | treq 36 | cbor2 37 | pycddl 38 | click 39 | psutil 40 | filelock 41 | distro 42 | appdirs 43 | bcrypt 44 | aniso8601 45 | ]; 46 | } 47 | -------------------------------------------------------------------------------- /nix/tahoe-versions.nix: -------------------------------------------------------------------------------- 1 | # Return a list of { version = string; buildArgs = attrset; } attrsets, with 2 | # each element describing one version of Tahoe-LAFS we can build against. 3 | { fetchPypi, tahoe-lafs-dev }: 4 | let 5 | v1_18_0 = fetchPypi { 6 | pname = "tahoe-lafs"; 7 | version = "1.18.0"; 8 | sha256 = "sha256-cXpHDfNO3TGta5RGfauqHK7dfy9SM7BLidjP6TbjF/4="; 9 | }; 10 | in 11 | [ 12 | { 13 | # The version ends up in the output name and dots conflict with their use 14 | # by Nix to select set attributes and end up require annoying quoting in 15 | # command line usage. Avoid that by using a different component separator 16 | # (`_`). 17 | version = "1_18_0"; 18 | buildArgs = { 19 | version = "1.18.0"; 20 | src = v1_18_0; 21 | }; 22 | } 23 | 24 | # Some other version. Often probably a recent master revision, but who 25 | # knows. 26 | { 27 | version = "dev"; 28 | buildArgs = rec { 29 | src = tahoe-lafs-dev.outPath; 30 | # Make up a version to call it. We don't really know what it is so 31 | # we'll call it something close to another version we know about. If we 32 | # really need to know what version it was then the Nix derivation has 33 | # this information and we can dig it out. 34 | version = "1.18.0.post1"; 35 | postPatch = 36 | let 37 | versionFileContents = version: '' 38 | # This _version.py is generated by ZKAPAuthorizer's tahoe-lafs.nix. 39 | # TODO: We can have more metadata after we switch to flakes. 40 | # Then the `self` input will have a `sourceInfo` attribute telling 41 | __pkgname__ = "tahoe-lafs" 42 | real_version = "${version}" 43 | full_version = "${version}" 44 | branch = "" 45 | verstr = "${version}" 46 | __version__ = verstr 47 | ''; 48 | in 49 | '' 50 | cp ${builtins.toFile "_version.py" (versionFileContents version)} src/allmydata/_version.py 51 | ''; 52 | }; 53 | } 54 | ] 55 | -------------------------------------------------------------------------------- /nix/types-pyyaml.nix: -------------------------------------------------------------------------------- 1 | { fetchPypi 2 | , buildPythonPackage 3 | }: 4 | buildPythonPackage rec { 5 | pname = "types-PyYAML"; 6 | version = "6.0.12.9"; 7 | 8 | src = fetchPypi { 9 | inherit pname version; 10 | hash = "sha256-xRsb1tmd3wqiiEp6MogQ6/cKQmLCkhldP0+aAAX57rY="; 11 | }; 12 | } 13 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel", "six"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.towncrier] 6 | package = "_zkapauthorizer" 7 | package_dir = "src" 8 | filename = "ChangeLog.rst" 9 | 10 | [[tool.towncrier.type]] 11 | directory = "feature" 12 | name = "Features" 13 | showcontent = true 14 | 15 | [[tool.towncrier.type]] 16 | directory = "bugfix" 17 | name = "Bugfixes" 18 | showcontent = true 19 | 20 | [[tool.towncrier.type]] 21 | directory = "doc" 22 | name = "Improved Documentation" 23 | showcontent = true 24 | 25 | [[tool.towncrier.type]] 26 | directory = "removal" 27 | name = "Deprecations and Removals" 28 | showcontent = true 29 | 30 | [[tool.towncrier.type]] 31 | directory = "misc" 32 | name = "Misc" 33 | showcontent = false 34 | 35 | [tool.black] 36 | target-version = ['py39'] 37 | extend-exclude = ''' 38 | # A regex preceded with ^/ will apply only to files and directories 39 | # in the root of the project. 40 | ^/src/_zkapauthorizer/_version.py 41 | ''' 42 | 43 | [tool.isort] 44 | profile = "black" 45 | skip = ["src/_zkapauthorizer/_version.py"] 46 | -------------------------------------------------------------------------------- /requirements/test.in: -------------------------------------------------------------------------------- 1 | eliot-tree == 21.0.0 2 | coverage == 6.3.2 3 | fixtures == 4.0.0 4 | testtools == 2.5.0 5 | testresources == 2.0.1 6 | hypothesis == 6.45.1 7 | openapi_spec_validator == 0.4.0 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # Generally describe the project 2 | [metadata] 3 | # See https://packaging.python.org/guides/distributing-packages-using-setuptools/#name 4 | # for requiremnts of a valid project name. 5 | name = zero-knowledge-access-pass-authorizer 6 | version = attr: _zkapauthorizer.__version__ 7 | description = A `Tahoe-LAFS`_ storage-system plugin which authorizes storage operations based on privacy-respecting tokens. 8 | long_description = file: README.rst 9 | keywords = tahoe-lafs, storage, privacy, cryptography 10 | license = Apache 2.0 11 | classifiers = 12 | Framework :: Twisted 13 | Programming Language :: Python :: 3 14 | Programming Language :: Python :: 3.9 15 | author = PrivateStorage.io Inc. 16 | maintainer = PrivateStorage.io Inc. 17 | home_page = https://private.storage/ 18 | author_email = support@private.storage 19 | platform = POSIX 20 | 21 | [options] 22 | # All of the source is in the src directory. The weird syntax is defining a 23 | # dict with a key "" and a value "src". 24 | package_dir = 25 | =src 26 | # Explicitly list our packages because the automatic finder can't deal with 27 | # the plugins package we want to ship. 28 | packages = 29 | _zkapauthorizer 30 | _zkapauthorizer.server 31 | _zkapauthorizer.tests 32 | twisted.plugins 33 | 34 | install_requires = 35 | # Pin attrs with `provides()` until we have a tahoe-lafs release 36 | # that doesn't need it anymore. TODO: Remove that pin when tahoe-lafs 37 | # 1.20 is out. 38 | attrs <= 23.2.0 39 | cattrs 40 | zope.interface 41 | eliot >= 1.11 42 | aniso8601 43 | # compose explicitly documents that it uses SemVer 2.0.0 for its version 44 | # scheme. 45 | compose ~= 1.0 46 | python-challenge-bypass-ristretto 47 | # The pip resolver sometimes finds treq's dependencies first and these are 48 | # incompatible with Tahoe-LAFS'. So duplicate them here (the ones that 49 | # have been observed to cause problems). 50 | Twisted[tls,conch] >= 19.10.0 51 | 52 | tahoe-capabilities >= 2022.9.1 53 | 54 | # Tahoe has no stable Python API but we use its Python API so there's 55 | # basically no wiggle room here. We use a tiny range that just covers 56 | # what we test plus a little because our Nix packaging provides a 57 | # Tahoe-LAFS with a .postNNN version. 58 | tahoe-lafs >=1.18.0,<1.18.1 59 | treq 60 | pyutil 61 | prometheus-client 62 | # Include colorama as a dependency to help pip-compile deal with multiple 63 | # platforms. In particular, tqdm depends on colorama only on Windows. By 64 | # including it here, pip-compile will generate hashes (and install it) on 65 | # all platforms. colorama and pywin32 are our only depdencies that are only 66 | # required on some platforms; we can't include pywin32 here as it does not 67 | # install cross-platform. 68 | colorama 69 | 70 | sqlparse 71 | cbor2 72 | 73 | # twisted-supporting websocket library (Tahoe, among others, already 74 | # depend on this) 75 | autobahn >= 21.11.1, != 22.5.1, != 22.4.2, != 22.4.1 76 | 77 | [flake8] 78 | # Enforce all pyflakes constraints, and also prohibit tabs for indentation. 79 | # Reference: 80 | # https://flake8.pycqa.org/en/latest/user/error-codes.html 81 | # https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes 82 | # https://pypi.org/project/flake8-isort/#error-codes 83 | # https://pypi.org/project/flake8-black/#flake8-validation-codes 84 | select = F, W191, I, BLK 85 | 86 | [mypy] 87 | plugins = mypy_zope:plugin 88 | strict = True 89 | show_column_numbers = True 90 | pretty = True 91 | show_error_codes = True 92 | 93 | [mypy-_zkapauthorizer.tests.test_tahoe] 94 | disallow_subclassing_any = False 95 | 96 | [mypy-_zkapauthorizer.tests.*] 97 | disallow_untyped_calls = False 98 | disallow_subclassing_any = False 99 | 100 | [mypy-eliottree.*] 101 | ignore_missing_imports = True 102 | [mypy-testtools.*] 103 | ignore_missing_imports = True 104 | [mypy-treq.*] 105 | ignore_missing_imports = True 106 | [mypy-allmydata.*] 107 | ignore_missing_imports = True 108 | [mypy-fixtures.*] 109 | ignore_missing_imports = True 110 | [mypy-testresources.*] 111 | ignore_missing_imports = True 112 | [mypy-openapi_spec_validator.*] 113 | ignore_missing_imports = True 114 | [mypy-eliot.*] 115 | ignore_missing_imports = True 116 | [mypy-cbor2.*] 117 | ignore_missing_imports = True 118 | [mypy-prometheus_client.*] 119 | ignore_missing_imports = True 120 | [mypy-autobahn.*] 121 | ignore_missing_imports = True 122 | [mypy-challenge_bypass_ristretto.*] 123 | ignore_missing_imports = True 124 | [mypy-aniso8601.*] 125 | ignore_missing_imports = True 126 | [mypy-pyutil.*] 127 | ignore_missing_imports = True 128 | [mypy-sqlparse.*] 129 | ignore_missing_imports = True 130 | [mypy-compose.*] 131 | ignore_missing_imports = True 132 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | package_data={ 5 | "": ["backup-recovery.yaml", "testing-signing.key"], 6 | }, 7 | ) 8 | -------------------------------------------------------------------------------- /slipcover2coveralls.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from hashlib import md5 4 | from json import dump, load 5 | from sys import argv, stdin, stdout 6 | from typing import Iterator, Union 7 | 8 | 9 | def main(service_job_id, service_name) -> int: 10 | slipcover_data = load(stdin) 11 | digests = dict(digest_source_files(slipcover_data)) 12 | dump( 13 | slipcover_to_coveralls(service_job_id, service_name, slipcover_data, digests), 14 | stdout, 15 | ) 16 | return 0 17 | 18 | 19 | def slipcover_to_coveralls( 20 | service_job_id: str, service_name: str, slipcover_data: dict, digests: dict 21 | ) -> dict: 22 | """ 23 | Convert slipcover's coverage data format to coveralls' coverage data 24 | format. 25 | """ 26 | # slipcover data looks like 27 | # {"files": {"filename": {"executed_lines": [ints], "missing_lines": [ints]}}} 28 | # 29 | # slipcover only measured covered/not-covered so there is no execution count information. 30 | # line numbers are 1-based 31 | 32 | # coveralls data looks like 33 | # { 34 | # "service_job_id": "1234567890", 35 | # "service_name": "travis-ci", 36 | # "source_files": [ 37 | # { 38 | # "name": "example.rb", 39 | # "source_digest": "asdfasdf1234asfasdf2345", 40 | # "coverage": [null, 1, null] 41 | # }, 42 | # { 43 | # "name": "lib/two.rb", 44 | # "source_digest": "asdf1234asdfsdfggsfgd9423", 45 | # "coverage": [null, 1, 0, null] 46 | # } 47 | # ] 48 | # } 49 | # 50 | # where each "coverage" list element corresponds to a source line number 51 | # of that element's index in the list + 1 and the values mean: 52 | # 53 | # - null: not relevant 54 | # - int: number of times executed 55 | return { 56 | "service_job_id": service_job_id, 57 | "service_name": service_name, 58 | "source_files": [ 59 | _one_coveralls_entry(filename, digests[filename], coverage) 60 | for (filename, coverage) in slipcover_data["files"].items() 61 | ], 62 | } 63 | 64 | 65 | def _one_coveralls_entry(filename: str, digest: str, slipcover_entry: dict) -> dict: 66 | return { 67 | "name": filename, 68 | "source_digest": digest, 69 | "coverage": _to_coveralls_coverage( 70 | set(slipcover_entry["executed_lines"]), 71 | set(slipcover_entry["missing_lines"]), 72 | ), 73 | } 74 | 75 | 76 | def _to_coveralls_coverage( 77 | executed: set[int], missing: set[int] 78 | ) -> list[Union[int, None]]: 79 | max_line = max(max(executed, default=0), max(missing, default=0)) 80 | # Start at line number 1 to match slipcover's 1-based numbering. The 81 | # first result will land at index 0 in the result to match coveralls' 82 | # 0-based numbering. End at maxline + 1 so we don't miss the last line. 83 | line_numbers = range(1, max_line + 2) 84 | return [ 85 | 0 if lineno in missing else 1 if lineno in executed else None 86 | for lineno in line_numbers 87 | ] 88 | 89 | 90 | def digest_source_files(slipcover_data) -> Iterator[tuple[str, str]]: 91 | for filename in slipcover_data["files"]: 92 | digest = md5() 93 | with open(filename, "rb") as src: 94 | digest.update(src.read()) 95 | yield (filename, digest.hexdigest()) 96 | 97 | 98 | if __name__ == "__main__": 99 | raise SystemExit(main(*argv[1:])) 100 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | __all__ = [ 16 | "__version__", 17 | "NAME", 18 | ] 19 | 20 | 21 | # Hotfix Tahoe-LAFS #3883 22 | from allmydata import stats 23 | 24 | stats.eventually = lambda f: f() 25 | 26 | # The identifier for this plugin. This appears in URLs for resources the 27 | # client plugin exposes, configuration files, etc. 28 | NAME = "privatestorageio-zkapauthz-v2" 29 | 30 | # Don't forget to bump the version number in nix/lib.nix too. 31 | __version__ = "2022.8.21" 32 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/_attrs_zope.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import attrs 4 | from zope.interface import Interface 5 | 6 | 7 | @attrs.define(repr=False) 8 | class _ProvidesValidator: 9 | interface: type[Interface] = attrs.field() 10 | 11 | def __call__(self, inst: object, attr: attrs.Attribute, value: object) -> None: # type: ignore[type-arg] 12 | """ 13 | We use a callable class to be able to change the ``__repr__``. 14 | """ 15 | if not self.interface.providedBy(value): 16 | msg = ( 17 | f"'{attr.name}' must provide {self.interface!r} " 18 | f"which {value!r} doesn't." 19 | ) 20 | raise TypeError( 21 | msg, 22 | attr, 23 | self.interface, 24 | value, 25 | ) 26 | 27 | def __repr__(self) -> str: 28 | return f"" 29 | 30 | 31 | def provides(interface: type[Interface]) -> _ProvidesValidator: 32 | """ 33 | A validator that raises a `TypeError` if the initializer is called 34 | with an object that does not provide the requested *interface* (checks are 35 | performed using ``interface.providedBy(value)`` (see `zope.interface 36 | `_). 37 | 38 | :param interface: The interface to check for. 39 | :type interface: ``zope.interface.Interface`` 40 | 41 | :raises TypeError: With a human readable error message, the attribute 42 | (of type `attrs.Attribute`), the expected interface, and the 43 | value it got. 44 | """ 45 | return _ProvidesValidator(interface) 46 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/_base64.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | This module implements base64 encoding-related functionality. 17 | """ 18 | 19 | from base64 import b64decode as _b64decode 20 | from binascii import Error 21 | from re import compile as _compile 22 | 23 | _b64decode_validator = _compile(b"^[A-Za-z0-9-_]*={0,2}$") 24 | 25 | 26 | def urlsafe_b64decode(s: bytes) -> bytes: 27 | """ 28 | Like ``base64.b64decode`` but with validation. 29 | """ 30 | if not _b64decode_validator.match(s): 31 | raise Error("Non-base64 digit found") 32 | return _b64decode(s, altchars=b"-_") 33 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/_json.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from json import dumps as _dumps 16 | from json import loads as _loads 17 | from typing import Any, cast 18 | 19 | from ._types import JSON 20 | 21 | 22 | def dumps_utf8(o: Any) -> bytes: 23 | """ 24 | Serialize an object to a UTF-8-encoded JSON byte string. 25 | """ 26 | return _dumps(o).encode("utf-8") 27 | 28 | 29 | def loads(data: bytes) -> JSON: 30 | """ 31 | Load a JSON object from a byte string. 32 | 33 | Raise an exception including ``data`` if the parse fails. 34 | """ 35 | try: 36 | return cast(JSON, _loads(data)) 37 | except ValueError as e: 38 | raise ValueError("{!r}: {!r}".format(e, data)) 39 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/_stack.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from contextlib import contextmanager 16 | from typing import Generator 17 | 18 | try: 19 | from resource import RLIMIT_STACK, getrlimit, setrlimit 20 | except ImportError: 21 | # Not available on Windows, unfortunately. 22 | RLIMIT_STACK = -1 23 | 24 | def getrlimit(resource: int, /) -> tuple[int, int]: 25 | return (-1, -1) 26 | 27 | def setrlimit(resource: int, limits: tuple[int, int], /) -> None: 28 | pass 29 | 30 | 31 | @contextmanager 32 | def less_limited_stack() -> Generator[None, None, None]: 33 | """ 34 | A context manager which removes the resource limit on stack size, to the 35 | extent possible, for execution of the context. 36 | 37 | More precisely, the soft stack limit is raised to the hard limit. 38 | """ 39 | soft, hard = getrlimit(RLIMIT_STACK) 40 | try: 41 | # We can raise the soft limit to the hard limit and no higher. 42 | setrlimit(RLIMIT_STACK, (hard, hard)) 43 | except ValueError: 44 | # Well, not on macOS: https://bugs.python.org/issue34602 45 | yield 46 | else: 47 | yield 48 | setrlimit(RLIMIT_STACK, (soft, hard)) 49 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/_types.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Re-usable type definitions for ZKAPAuthorizer. 17 | """ 18 | 19 | from datetime import datetime 20 | from typing import ( 21 | TYPE_CHECKING, 22 | Callable, 23 | Generic, 24 | Mapping, 25 | Sequence, 26 | TypedDict, 27 | TypeVar, 28 | Union, 29 | ) 30 | 31 | from attrs import Attribute as _Attribute 32 | from typing_extensions import Literal 33 | 34 | GetTime = Callable[[], datetime] 35 | 36 | _T = TypeVar("_T") 37 | 38 | if TYPE_CHECKING: 39 | Attribute = _Attribute 40 | else: 41 | 42 | class Attribute(_Attribute, Generic[_T]): 43 | pass 44 | 45 | 46 | # mypy does not support recursive types so we can't say much about what's in 47 | # the containers here. 48 | JSON = Union[None, int, float, str, Sequence, Mapping] 49 | 50 | # The contents of the [storageserver.plugins.privatestorageio-zkapauthz-v2] 51 | # section of a storage server's tahoe.cfg. 52 | ServerConfig = TypedDict( 53 | "ServerConfig", 54 | { 55 | "pass-value": str, 56 | "ristretto-issuer-root-url": str, 57 | "ristretto-signing-key-path": str, 58 | "prometheus-metrics-path": str, 59 | "prometheus-metrics-interval": str, 60 | }, 61 | total=False, 62 | ) 63 | 64 | 65 | class NonRedeemerConfig(TypedDict): 66 | """ 67 | ``[storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in 68 | the non-redeeming configuration. 69 | 70 | In this configuration vouchers are accepted for redemption but no 71 | redemption attempt will ever complete. 72 | """ 73 | 74 | redeemer: Literal["non"] 75 | 76 | 77 | # [storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in the 78 | # dummy redeemer configuration. 79 | # 80 | # In this configuration vouchers are redeemed for values which are 81 | # structurally valid but otherwise nonsense. 82 | DummyRedeemerConfig = TypedDict( 83 | "DummyRedeemerConfig", 84 | { 85 | "redeemer": Literal["dummy"], 86 | "issuer-public-key": str, 87 | "allowed-public-keys": str, 88 | # XXX All the other types should have these too but it's so tedious... 89 | "lease.crawl-interval.mean": str, 90 | "lease.crawl-interval.range": str, 91 | "lease.min-time-remaining": str, 92 | }, 93 | ) 94 | 95 | 96 | class DoubleSpendRedeemerConfig(TypedDict): 97 | """ 98 | ``[storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in 99 | the double-spend error configuration. 100 | 101 | In this configuration vouchers are accepted for redemption but all 102 | redemption attempts fail with an "already redeemed" error. 103 | """ 104 | 105 | redeemer: Literal["double-spend"] 106 | 107 | 108 | class UnpaidRedeemerConfig(TypedDict): 109 | """ 110 | ``[storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in 111 | the unpaid configuration. 112 | 113 | In this configuration vouchers are accepted for redemption but all 114 | redemption attempts fail with an "unpaid voucher" error. 115 | """ 116 | 117 | redeemer: Literal["unpaid"] 118 | 119 | 120 | class ErrorRedeemerConfig(TypedDict): 121 | """ 122 | ``[storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in 123 | the generic error configuration. 124 | 125 | In this configuration vouchers are accepted for redemption but all 126 | redemption attempts fail with an unstructured error with the associated 127 | details. 128 | """ 129 | 130 | redeemer: Literal["error"] 131 | details: str 132 | 133 | 134 | # [storageserver.plugins.privatestorageio-zkapauthz-v2]`` contents in the 135 | # production configuration. 136 | # 137 | # In this configuration vouchers are submitted to a remote "redemption server" 138 | # (aka "payment server"). 139 | RistrettoRedeemerConfig = TypedDict( 140 | "RistrettoRedeemerConfig", 141 | { 142 | "redeemer": Literal["ristretto"], 143 | "ristretto-issuer-root-url": str, 144 | "pass-value": str, 145 | "default-token-count": str, 146 | "allowed-public-keys": str, 147 | "lease.crawl-interval.mean": str, 148 | "lease.crawl-interval.range": str, 149 | "lease.min-time-remaining": str, 150 | }, 151 | total=False, 152 | ) 153 | 154 | # The contents of the [storageclient.plugins.privatestorageio-zkapauthz-v2] 155 | # section of a client node's tahoe.cfg. 156 | ClientConfig = Union[ 157 | NonRedeemerConfig, 158 | DummyRedeemerConfig, 159 | DoubleSpendRedeemerConfig, 160 | UnpaidRedeemerConfig, 161 | ErrorRedeemerConfig, 162 | RistrettoRedeemerConfig, 163 | ] 164 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/api.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | __all__ = [ 16 | "MorePassesRequired", 17 | "LeaseRenewalRequired", 18 | "ZKAPAuthorizerStorageServer", 19 | "ZKAPAuthorizerStorageClient", 20 | ] 21 | 22 | from ._storage_client import ZKAPAuthorizerStorageClient 23 | from ._storage_server import LeaseRenewalRequired, ZKAPAuthorizerStorageServer 24 | from .storage_common import MorePassesRequired 25 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/backup-recovery.yaml: -------------------------------------------------------------------------------- 1 | openapi: "3.0.0" 2 | 3 | info: 4 | title: "Replication / Recovery" 5 | description: >- 6 | This API allows replication and recovery of ZKAPAuthorizer internal state. 7 | Replication is in a single direction from this single Tahoe-LAFS node to 8 | the Tahoe-LAFS grid. 9 | version: "1.0.0" 10 | 11 | paths: 12 | /storage-plugins/privatestorageio-zkapauthz-v1/replicate: 13 | post: 14 | description: >- 15 | Configure an on-grid replica. 16 | 17 | Once a replica has been configured this node will keep that replica 18 | up-to-date with local state changes. 19 | responses: 20 | 201: # CREATED 21 | description: >- 22 | The replication system is now configured. 23 | 24 | A replica has not necessarily been created yet but ZKAPAuthorizer 25 | will begin creating one now. The response includes the recovery 26 | capability for that replica. 27 | content: 28 | application/json: 29 | schema: 30 | $ref: "#/components/schemas/ReplicaConfiguration" 31 | 32 | 409: # CONFLICT 33 | description: >- 34 | The replication system was already configured. 35 | 36 | The response includes the recovery capability for the replica that 37 | is being maintained. 38 | content: 39 | application/json: 40 | schema: 41 | $ref: "#/components/schemas/ReplicaConfiguration" 42 | 43 | components: 44 | schemas: 45 | Error: 46 | type: "object" 47 | properties: 48 | details: 49 | type: "string" 50 | description: >- 51 | A free-form text field which may give further details about 52 | the failure. 53 | 54 | ReplicaConfiguration: 55 | type: "object" 56 | properties: 57 | recovery-capability: 58 | type: "string" 59 | description: >- 60 | The Tahoe-LAFS read-only capability for the recovery data. 61 | 62 | This is the capability which can be submitted in order to initiate 63 | a recovery from the replica. 64 | 65 | RecoveryStatus: 66 | type: "object" 67 | properties: 68 | "stage": 69 | type: "string" 70 | description: >- 71 | A short string indicating where the system is in the recovery 72 | process right now. 73 | 74 | "failure_reason": 75 | type: "string" 76 | description: >- 77 | If recovery has failed, a human-meaningful description of the 78 | reason for the failure. 79 | 80 | responses: 81 | ErrorResponse: 82 | description: >- 83 | Something broken. 84 | content: 85 | application/json: 86 | schema: 87 | $ref: "#/components/schemas/Error" 88 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Helpers for reading values from the Tahoe-LAFS configuration. 17 | """ 18 | 19 | __all__ = [ 20 | "REPLICA_RWCAP_BASENAME", 21 | "Config", 22 | "EmptyConfig", 23 | "empty_config", 24 | "read_duration", 25 | "read_node_url", 26 | ] 27 | 28 | from datetime import timedelta 29 | from typing import Protocol, TypeVar, Union, cast 30 | 31 | from allmydata.node import _Config as Config 32 | from attrs import define 33 | from hyperlink import DecodedURL 34 | from twisted.python.filepath import FilePath 35 | 36 | from . import NAME 37 | 38 | _T = TypeVar("_T") 39 | 40 | # The basename of the replica read-write capability file in the node's private 41 | # directory, if replication is configured. 42 | REPLICA_RWCAP_BASENAME = NAME + ".replica-rwcap" 43 | 44 | # The version number in NAME doesn't match the version here because the 45 | # database is persistent state and we need to be sure to load the older 46 | # version even if we signal an API compatibility break by bumping the version 47 | # number elsewhere. Consider this version number part of a different scheme 48 | # where we're versioning our ability to open the database at all. The schema 49 | # inside the database is versioned by yet another mechanism. 50 | CONFIG_DB_NAME = "privatestorageio-zkapauthz-v1.sqlite3" 51 | 52 | 53 | class TahoeConfig(Protocol): 54 | """ 55 | A representation of the configuration for a Tahoe-LAFS node. 56 | """ 57 | 58 | def get_config( 59 | self, 60 | section: str, 61 | option: str, 62 | default: object = object(), 63 | boolean: bool = False, 64 | ) -> object: 65 | """ 66 | Read an option from a section of the configuration. 67 | """ 68 | 69 | def get_private_path(self, name: str) -> str: 70 | """ 71 | Construct a path beneath the private directory of the node this 72 | configuration belongs to. 73 | 74 | :param name: A path relative to the private directory. 75 | """ 76 | 77 | 78 | @define 79 | class EmptyConfig: 80 | """ 81 | Weakly pretend to be a Tahoe-LAFS configuration object with no 82 | configuration. 83 | """ 84 | 85 | _basedir: FilePath = FilePath(".") # type: ignore[no-untyped-call] 86 | 87 | def get_config( 88 | self, 89 | section: str, 90 | option: str, 91 | default: object = object(), 92 | boolean: bool = False, 93 | ) -> object: 94 | return default 95 | 96 | def get_private_path(self, name: str) -> str: 97 | private = self._basedir.child("private") # type: ignore[no-untyped-call] 98 | child = private.child(name) 99 | child_str = cast(str, child.path) 100 | return child_str 101 | 102 | 103 | empty_config = EmptyConfig() 104 | 105 | 106 | def read_node_url(config: Config) -> DecodedURL: 107 | """ 108 | Get the root of the node's HTTP API. 109 | """ 110 | return DecodedURL.from_text( 111 | FilePath(config.get_config_path("node.url")) # type: ignore[no-untyped-call] 112 | .getContent() 113 | .decode("ascii") 114 | .strip() 115 | ) 116 | 117 | 118 | def read_duration(cfg: Config, option: str, default: _T) -> Union[timedelta, _T]: 119 | """ 120 | Read an integer number of seconds from the ZKAPAuthorizer section of a 121 | Tahoe-LAFS config. 122 | 123 | :param cfg: The Tahoe-LAFS config object to consult. 124 | :param option: The name of the option to read. 125 | 126 | :return: ``None`` if the option is missing, otherwise the parsed duration 127 | as a ``timedelta``. 128 | """ 129 | section_name = "storageclient.plugins." + NAME 130 | value_str = cfg.get_config( 131 | section=section_name, 132 | option=option, 133 | default=None, 134 | ) 135 | if value_str is None: 136 | return default 137 | return timedelta(seconds=int(value_str)) 138 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/configutil.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Basic utilities related to the Tahoe configuration file. 17 | """ 18 | 19 | from typing import Iterable, TypeVar 20 | 21 | _K = TypeVar("_K") 22 | _V = TypeVar("_V") 23 | 24 | 25 | def _merge_dictionaries(dictionaries: Iterable[dict[_K, _V]]) -> dict[_K, _V]: 26 | """ 27 | Collapse a sequence of dictionaries into one, with collisions resolved by 28 | taking the value from later dictionaries in the sequence. 29 | 30 | :param dictionaries: The dictionaries to collapse. 31 | 32 | :return: The collapsed dictionary. 33 | """ 34 | result = {} 35 | for d in dictionaries: 36 | result.update(d) 37 | return result 38 | 39 | 40 | def _tahoe_config_quote(text: str) -> str: 41 | """ 42 | Quote **%** in a unicode string. 43 | 44 | :param text: The string on which to perform quoting. 45 | 46 | :return: The string with ``%%`` replacing ``%``. 47 | """ 48 | return text.replace("%", "%%") 49 | 50 | 51 | def config_string_from_sections( 52 | divided_sections: Iterable[dict[str, dict[str, str]]] 53 | ) -> str: 54 | """ 55 | Get the .ini-syntax unicode string representing the given configuration 56 | values. 57 | 58 | :param [dict] divided_sections: The configuration to use to generate the 59 | string. Each ``dict`` maps a top-level section name to a ``dict`` of 60 | key/value pairs. Dictionaries may have overlapping top-level 61 | sections, in which case the section items are merged (for collisions, 62 | last value wins). 63 | """ 64 | sections = _merge_dictionaries(divided_sections) 65 | return "".join( 66 | list( 67 | "[{name}]\n{items}\n".format( 68 | name=name, 69 | items="\n".join( 70 | "{key} = {value}".format(key=key, value=_tahoe_config_quote(value)) 71 | for (key, value) in contents.items() 72 | ), 73 | ) 74 | for (name, contents) in sections.items() 75 | ) 76 | ) 77 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/eliot.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Eliot field, message, and action definitions for ZKAPAuthorizer. 17 | """ 18 | 19 | from functools import wraps 20 | from json import JSONEncoder 21 | from typing import Any, Awaitable, Callable, Optional, TypeVar, cast 22 | 23 | from allmydata.util.eliotutil import log_call_deferred as _log_call_deferred 24 | from eliot import ActionType, Field, MessageType 25 | from eliot import log_call as _log_call 26 | from eliot import start_action 27 | from eliot.json import EliotJSONEncoder 28 | from eliot.testing import capture_logging as _capture_logging 29 | from twisted.internet.defer import Deferred 30 | from typing_extensions import ParamSpec 31 | 32 | from ._types import JSON 33 | 34 | PRIVACYPASS_MESSAGE = Field( 35 | "message", 36 | str, 37 | "The PrivacyPass request-binding data associated with a pass.", 38 | ) 39 | 40 | INVALID_REASON = Field( 41 | "reason", 42 | str, 43 | "The reason given by the server for rejecting a pass as invalid.", 44 | ) 45 | 46 | PASS_COUNT = Field( 47 | "count", 48 | int, 49 | "A number of passes.", 50 | ) 51 | 52 | GET_PASSES = MessageType( 53 | "zkapauthorizer:get-passes", 54 | [PRIVACYPASS_MESSAGE, PASS_COUNT], 55 | "An attempt to spend passes is beginning.", 56 | ) 57 | 58 | SPENT_PASSES = MessageType( 59 | "zkapauthorizer:spent-passes", 60 | [PASS_COUNT], 61 | "An attempt to spend passes has succeeded.", 62 | ) 63 | 64 | INVALID_PASSES = MessageType( 65 | "zkapauthorizer:invalid-passes", 66 | [INVALID_REASON, PASS_COUNT], 67 | "An attempt to spend passes has found some to be invalid.", 68 | ) 69 | 70 | RESET_PASSES = MessageType( 71 | "zkapauthorizer:reset-passes", 72 | [PASS_COUNT], 73 | "Some passes involved in a failed spending attempt have not definitely been spent and are being returned for future use.", 74 | ) 75 | 76 | SIGNATURE_CHECK_FAILED = MessageType( 77 | "zkapauthorizer:storage-client:signature-check-failed", 78 | [PASS_COUNT], 79 | "Some passes the client tried to use were rejected for having invalid signatures.", 80 | ) 81 | 82 | CALL_WITH_PASSES = ActionType( 83 | "zkapauthorizer:storage-client:call-with-passes", 84 | [PASS_COUNT], 85 | [], 86 | "A storage operation is being started which may spend some passes.", 87 | ) 88 | 89 | CURRENT_SIZES = Field( 90 | "current_sizes", 91 | dict, 92 | "A dictionary mapping the numbers of existing shares to their existing sizes.", 93 | ) 94 | 95 | TW_VECTORS_SUMMARY = Field( 96 | "tw_vectors_summary", 97 | dict, 98 | "A dictionary mapping share numbers from tw_vectors to test and write vector summaries.", 99 | ) 100 | 101 | NEW_SIZES = Field( 102 | "new_sizes", 103 | dict, 104 | "A dictionary like that of CURRENT_SIZES but for the sizes computed for the shares after applying tw_vectors.", 105 | ) 106 | 107 | NEW_PASSES = Field( 108 | "new_passes", 109 | int, 110 | "The number of passes computed as being required for the change in size.", 111 | ) 112 | 113 | MUTABLE_PASSES_REQUIRED = MessageType( 114 | "zkapauthorizer:storage:mutable-passes-required", 115 | [CURRENT_SIZES, TW_VECTORS_SUMMARY, NEW_SIZES, NEW_PASSES], 116 | "Some number of passes has been computed as the cost of updating a mutable.", 117 | ) 118 | 119 | T = TypeVar("T") 120 | P = ParamSpec("P") 121 | Q = ParamSpec("Q") 122 | 123 | JSONT = TypeVar("JSONT", bound=JSON) 124 | 125 | 126 | def log_call( 127 | action_type: Optional[str] = None, 128 | include_args: Optional[list[str]] = None, 129 | include_result: bool = True, 130 | ) -> Callable[[Callable[P, JSONT]], Callable[P, JSONT]]: 131 | return cast( 132 | Callable[[Callable[P, JSONT]], Callable[P, JSONT]], 133 | _log_call( 134 | action_type=action_type, 135 | include_args=include_args, 136 | include_result=include_result, 137 | ), 138 | ) 139 | 140 | 141 | def log_call_deferred( 142 | action_type: Optional[str] = None, 143 | ) -> Callable[[Callable[P, Deferred[JSONT]]], Callable[P, Deferred[JSONT]]]: 144 | return cast( 145 | Callable[[Callable[P, Deferred[JSONT]]], Callable[P, Deferred[JSONT]]], 146 | _log_call_deferred(action_type), 147 | ) 148 | 149 | 150 | def log_call_coroutine( 151 | action_type: str, 152 | ) -> Callable[[Callable[P, Awaitable[JSONT]]], Callable[P, Awaitable[JSONT]]]: 153 | def decorate_log_call_coroutine( 154 | f: Callable[P, Awaitable[JSONT]] 155 | ) -> Callable[P, Awaitable[JSONT]]: 156 | @wraps(f) 157 | async def logged_f(*a: P.args, **kw: P.kwargs) -> JSONT: 158 | with start_action(action_type=action_type): 159 | return await f(*a, **kw) 160 | 161 | return logged_f 162 | 163 | return decorate_log_call_coroutine 164 | 165 | 166 | def capture_logging( 167 | assertion: Any, 168 | *assertionArgs: Any, 169 | encoder_: JSONEncoder = EliotJSONEncoder, 170 | **assertionKwargs: Any, 171 | ) -> Callable[[Callable[P, T]], Callable[P, T]]: 172 | return cast( 173 | Callable[[Callable[P, T]], Callable[P, T]], 174 | _capture_logging( 175 | assertion, *assertionArgs, encoder_=encoder_, **assertionKwargs 176 | ), 177 | ) 178 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/foolscap.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Definitions related to the Foolscap-based protocol used by ZKAPAuthorizer 17 | to communicate between storage clients and servers. 18 | """ 19 | 20 | from allmydata.interfaces import Offset, RIStorageServer, StorageIndex 21 | from foolscap.constraint import Any, ByteStringConstraint, IConstraint 22 | from foolscap.copyable import Copyable, RemoteCopy 23 | from foolscap.remoteinterface import RemoteInterface, RemoteMethodSchema 24 | from foolscap.schema import DictOf, ListOf 25 | 26 | 27 | class ShareStat(Copyable, RemoteCopy): 28 | """ 29 | Represent some metadata about a share. 30 | 31 | :ivar int size: The size. in bytes, of the share. 32 | 33 | :ivar int lease_expiration: The POSIX timestamp of the time at which the 34 | lease on this share expires, or None if there is no lease. 35 | """ 36 | 37 | typeToCopy = copytype = "ShareStat" 38 | 39 | # To be a RemoteCopy it must be possible to instantiate this with no 40 | # arguments. :/ So supply defaults for these attributes. 41 | # 42 | # Also, using attrs.define here completely breaks some internal Foolscap 43 | # registration mechanism so do it the hard way. 44 | def __init__(self, size: int = 0, lease_expiration: int = 0) -> None: 45 | self.size = size 46 | self.lease_expiration = lease_expiration 47 | 48 | # The RemoteCopy interface 49 | def setCopyableState(self, state: dict[str, Any]) -> None: 50 | self.__dict__ = state 51 | 52 | def __eq__(self, other: object) -> bool: 53 | if isinstance(other, ShareStat): 54 | return (self.size, self.lease_expiration) == ( 55 | other.size, 56 | other.lease_expiration, 57 | ) 58 | return NotImplemented 59 | 60 | 61 | # The Foolscap convention seems to be to try to constrain inputs to valid 62 | # values. So we'll try to limit the number of passes a client can supply. 63 | # Foolscap may be moving away from this so we may eventually drop this as 64 | # well. Though it may still make sense on a non-Foolscap protocol (eg HTTP) 65 | # which Tahoe-LAFS may eventually support. 66 | # 67 | # If a pass is worth 128 KiB of storage for some amount of time, 2 ** 20 68 | # passes is worth 128 GiB of storage for some amount of time. It is an 69 | # arbitrary upper limit on the size of immutable files but maybe it's large 70 | # enough to not be an issue for a while. 71 | # 72 | # The argument for having a limit here at all is protection against denial of 73 | # service attacks that exhaust server memory but creating unbearably large 74 | # lists. 75 | # 76 | # A limit of 2 ** 20 passes translates to 177 MiB (times some constant factor 77 | # for Foolscap/Python overhead). That should be tolerable. 78 | _MAXIMUM_PASSES_PER_CALL = 2**20 79 | 80 | # This is the length of a serialized Ristretto-flavored PrivacyPass pass The 81 | # pass is a combination of token preimages and unblinded token signatures, 82 | # each base64-encoded. 83 | _PASS_LENGTH = 177 84 | 85 | # Take those values and turn them into the appropriate Foolscap constraint 86 | # objects. Foolscap seems to have a convention of representing these as 87 | # CamelCase module-level values so I replicate that here. 88 | _Pass = ByteStringConstraint(maxLength=_PASS_LENGTH, minLength=_PASS_LENGTH) # type: ignore[no-untyped-call] 89 | _PassList = ListOf(_Pass, maxLength=_MAXIMUM_PASSES_PER_CALL) # type: ignore[no-untyped-call] 90 | 91 | 92 | def add_passes(schema: RemoteMethodSchema) -> RemoteMethodSchema: 93 | """ 94 | Add a ``passes`` parameter to the given method schema. 95 | 96 | :param schema: An existing method schema to modify. 97 | 98 | :return: A schema like ``schema`` but with one additional required 99 | argument. 100 | """ 101 | return add_arguments(schema, [("passes", _PassList)]) 102 | 103 | 104 | def add_arguments( 105 | schema: RemoteMethodSchema, kwargs: list[tuple[str, IConstraint]] 106 | ) -> RemoteMethodSchema: 107 | """ 108 | Create a new schema like ``schema`` but with the arguments given by 109 | ``kwargs`` prepended to the signature. 110 | 111 | :param schema: The existing schema. 112 | 113 | :param kwargs: The arguments to prepend to the signature of ``schema``. 114 | 115 | :return: The new schema object. 116 | """ 117 | new_kwargs = dict(schema.argConstraints) 118 | new_kwargs.update(kwargs) 119 | modified_schema = RemoteMethodSchema(**new_kwargs) # type: ignore[no-untyped-call] 120 | # Initialized from **new_kwargs, RemoteMethodSchema.argumentNames is in 121 | # some arbitrary, probably-incorrect order. This breaks user code which 122 | # tries to use positional arguments. Put them back in the order they were 123 | # in originally (in the input ``schema``), prepended with the newly added 124 | # arguments. 125 | modified_schema.argumentNames = ( 126 | # The new arguments 127 | list(argName for (argName, _) in kwargs) 128 | + 129 | # The original arguments in the original order 130 | schema.argumentNames 131 | ) 132 | return modified_schema 133 | 134 | 135 | class RIPrivacyPassAuthorizedStorageServer(RemoteInterface): 136 | """ 137 | An object which can store and retrieve shares, subject to pass-based 138 | authorization. 139 | 140 | This is much the same as ``allmydata.interfaces.RIStorageServer`` but 141 | several of its methods take an additional ``passes`` parameter. Clients 142 | are expected to supply suitable passes and only after the passes have been 143 | validated is service provided. 144 | """ 145 | 146 | __remote_name__ = "RIPrivacyPassAuthorizedStorageServer.tahoe.privatestorage.io" 147 | 148 | get_version = RIStorageServer["get_version"] 149 | 150 | allocate_buckets = add_passes(RIStorageServer["allocate_buckets"]) 151 | 152 | add_lease = add_passes(RIStorageServer["add_lease"]) 153 | 154 | get_buckets = RIStorageServer["get_buckets"] 155 | 156 | def share_sizes( # type: ignore[no-untyped-def] 157 | storage_index_or_slot=StorageIndex, 158 | # Notionally, ChoiceOf(None, SetOf(int, maxLength=MAX_BUCKETS)). 159 | # However, support for such a construction appears to be 160 | # unimplemented in Foolscap. So, instead... 161 | sharenums=Any(), 162 | ): 163 | """ 164 | Get the size of the given shares in the given storage index or slot. If a 165 | share has no stored state, its size is reported as 0. 166 | """ 167 | return DictOf(int, Offset) # type: ignore[no-untyped-call] 168 | 169 | def stat_shares( # type: ignore[no-untyped-def] 170 | storage_indexes_or_slots=ListOf(StorageIndex), # type: ignore[no-untyped-call,assignment] 171 | ): 172 | """ 173 | Get various metadata about shares in the given storage index or slot. 174 | 175 | :return [{int: ShareStat}]: A list of share stats. Dictionaries in 176 | the list corresponds to the results for each storage index 177 | requested by the ``storage_indexes_or_slots`` argument. Items in 178 | the dictionary give share stats for each share known to this 179 | server to be associated with the corresponding storage index. 180 | Keys are share numbers and values are the stats. 181 | """ 182 | # Any() should be ShareStat but I don't know how to spell that. 183 | return ListOf(DictOf(int, Any())) # type: ignore[no-untyped-call] 184 | 185 | slot_readv = RIStorageServer["slot_readv"] 186 | 187 | slot_testv_and_readv_and_writev = add_passes( 188 | RIStorageServer["slot_testv_and_readv_and_writev"], 189 | ) 190 | 191 | advise_corrupt_share = RIStorageServer["advise_corrupt_share"] 192 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/newsfragments/.gitignore: -------------------------------------------------------------------------------- 1 | !.gitignore 2 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/newsfragments/451.misc: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/pricecalculator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2020 PrivateStorage.io, LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Calculate the price, in ZKAPs, for storing files. 18 | 19 | The underlying storage system operates only on individual shares. Thus, it 20 | *does not* use this file-oriented calculator. However, for end-users, 21 | file-oriented pricing is often more helpful. This calculator builds on the 22 | share-oriented price calculation to present file-oriented price information. 23 | 24 | It accounts for erasure encoding data expansion. It does not account for the 25 | real state of the storage system (e.g., if some data is *already* stored then 26 | storing it "again" is essentially free but this will not be reflected by this 27 | calculator). 28 | """ 29 | 30 | from typing import Iterable 31 | 32 | from attrs import frozen 33 | 34 | from .storage_common import required_passes, share_size_for_data 35 | 36 | 37 | @frozen 38 | class PriceCalculator(object): 39 | """ 40 | :ivar int _shares_needed: The number of shares which are required to 41 | reconstruct the original data. 42 | 43 | :ivar int _shares_total: The total number of shares which will be 44 | produced in the erasure encoding process. 45 | 46 | :ivar int _pass_value: The bytes component of the bytes×time value of a 47 | single pass. 48 | """ 49 | 50 | _shares_needed: int 51 | _shares_total: int 52 | _pass_value: int 53 | 54 | def calculate(self, sizes: Iterable[int]) -> int: 55 | """ 56 | Calculate the price to store data of the given sizes for one lease 57 | period. 58 | 59 | :param [int] sizes: The sizes of the individual data items in bytes. 60 | 61 | :return int: The number of ZKAPs required. 62 | """ 63 | share_sizes = (share_size_for_data(self._shares_needed, size) for size in sizes) 64 | all_required_passes = ( 65 | required_passes(self._pass_value, [share_size] * self._shares_total) 66 | for share_size in share_sizes 67 | ) 68 | price = sum(all_required_passes, 0) 69 | return price 70 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/private.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Tahoe-LAFS -- secure, distributed storage grid 3 | # 4 | # Copyright © 2020 The Tahoe-LAFS Software Foundation 5 | # 6 | # Copyright 2019 PrivateStorage.io, LLC 7 | 8 | """ 9 | Support code for applying token-based HTTP authorization rules to a 10 | Twisted Web resource hierarchy. 11 | """ 12 | 13 | from typing import Callable, Union 14 | 15 | # https://github.com/twisted/nevow/issues/106 may affect this code but if so 16 | # then the hotfix Tahoe-LAFS applies should deal with it. 17 | # 18 | # We want to avoid depending on the Tahoe-LAFS Python API since it isn't 19 | # public but we do want to make sure that hotfix is applied. This seems like 20 | # an alright compromise. 21 | import allmydata.web.private as awp 22 | import attr 23 | from attrs import define, frozen 24 | from cryptography.hazmat.primitives.constant_time import bytes_eq 25 | from twisted.cred.checkers import ANONYMOUS 26 | from twisted.cred.credentials import ICredentials 27 | from twisted.cred.error import UnauthorizedLogin 28 | from twisted.cred.portal import IRealm, Portal 29 | from twisted.internet.defer import Deferred, fail, succeed 30 | from twisted.python.failure import Failure 31 | from twisted.web.guard import HTTPAuthSessionWrapper 32 | from twisted.web.iweb import ICredentialFactory, IRequest 33 | from twisted.web.resource import IResource 34 | from zope.interface import implementer 35 | from zope.interface.interface import InterfaceClass 36 | 37 | del awp 38 | 39 | SCHEME = b"tahoe-lafs" 40 | 41 | 42 | AvatarId = Union[bytes, tuple[()]] 43 | 44 | 45 | class IToken(ICredentials): 46 | def equals(auth_token: bytes) -> bool: 47 | pass 48 | 49 | 50 | @implementer(IToken) 51 | @frozen 52 | class Token(object): 53 | proposed_token: bytes 54 | 55 | def equals(self, valid_token: bytes) -> bool: 56 | return bytes_eq( 57 | valid_token, 58 | self.proposed_token, 59 | ) 60 | 61 | 62 | @frozen 63 | class TokenChecker(object): 64 | get_auth_token: Callable[[], bytes] 65 | 66 | credentialInterfaces = [IToken] 67 | 68 | def requestAvatarId(self, credentials: Token) -> Deferred[AvatarId]: 69 | required_token = self.get_auth_token() 70 | if credentials.equals(required_token): 71 | return succeed(ANONYMOUS) 72 | return fail(Failure(UnauthorizedLogin())) # type: ignore[no-untyped-call] 73 | 74 | 75 | @implementer(ICredentialFactory) 76 | @attr.s 77 | class TokenCredentialFactory(object): 78 | scheme = SCHEME 79 | authentication_realm = b"tahoe-lafs" 80 | 81 | def getChallenge(self, request: IRequest) -> dict[bytes, bytes]: 82 | return {b"realm": self.authentication_realm} 83 | 84 | def decode(self, response: bytes, request: IRequest) -> Token: 85 | return Token(response) 86 | 87 | 88 | @implementer(IRealm) 89 | @define 90 | class PrivateRealm(object): 91 | _root: IResource 92 | 93 | def _logout(self) -> None: 94 | pass 95 | 96 | def requestAvatar( 97 | self, avatarId: str, mind: object, *interfaces: InterfaceClass 98 | ) -> tuple[InterfaceClass, IResource, Callable[[], None]]: 99 | if IResource in interfaces: 100 | return (IResource, self._root, self._logout) 101 | raise NotImplementedError( 102 | "PrivateRealm supports IResource not {}".format(interfaces), 103 | ) 104 | 105 | 106 | def _create_private_tree( 107 | get_auth_token: Callable[[], bytes], vulnerable: IResource 108 | ) -> HTTPAuthSessionWrapper: 109 | realm = PrivateRealm(vulnerable) 110 | checker = TokenChecker(get_auth_token) 111 | portal = Portal(realm, [checker]) # type: ignore[no-untyped-call] 112 | credentials = TokenCredentialFactory() 113 | return HTTPAuthSessionWrapper(portal, [credentials]) # type: ignore[no-untyped-call] 114 | 115 | 116 | def create_private_tree( 117 | get_auth_token: Callable[[], bytes], vulnerable_tree: IResource 118 | ) -> HTTPAuthSessionWrapper: 119 | """ 120 | Create a new resource tree that only allows requests if they include a 121 | correct `Authorization: tahoe-lafs ` header (where 122 | `api_auth_token` matches the private configuration value). 123 | 124 | :param get_auth_token: Get the valid authorization token. 125 | 126 | :param vulnerable_tree: Create the resource hierarchy which will be 127 | protected by the authorization mechanism. 128 | """ 129 | return _create_private_tree( 130 | get_auth_token, 131 | vulnerable_tree, 132 | ) 133 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/server/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/server/spending.py: -------------------------------------------------------------------------------- 1 | from typing import Sequence 2 | 3 | import attr 4 | from attrs import Factory, define, field 5 | from challenge_bypass_ristretto import PublicKey 6 | from prometheus_client import CollectorRegistry 7 | from twisted.internet.interfaces import IReactorTime 8 | from zope.interface import Interface, implementer 9 | 10 | 11 | class ISpender(Interface): 12 | """ 13 | An ``ISpender`` can records spent ZKAPs and reports double spends. 14 | """ 15 | 16 | def mark_as_spent(public_key: PublicKey, passes: Sequence[bytes]) -> None: 17 | """ 18 | Record the given ZKAPs (associated to the given public key as having 19 | been spent. 20 | 21 | This does *not* report errors and should only be used in cases when 22 | recording spending that has already happened. This can be because 23 | we could not contact the spending service when they were spent, or 24 | because we can't yet check before making changes to the node. 25 | """ 26 | 27 | 28 | @define 29 | class _SpendingData(object): 30 | spent_tokens: dict[bytes, list[bytes]] = field(init=False, default=Factory(dict)) 31 | 32 | def reset(self) -> None: 33 | self.spent_tokens.clear() 34 | 35 | 36 | @implementer(ISpender) 37 | @attr.s 38 | class RecordingSpender(object): 39 | """ 40 | An in-memory :py:`ISpender` implementation that exposes the spent tokens 41 | for testing purposes. 42 | """ 43 | 44 | _recorder: _SpendingData = field( 45 | validator=attr.validators.instance_of(_SpendingData) 46 | ) 47 | 48 | @classmethod 49 | def make(cls) -> tuple[_SpendingData, ISpender]: 50 | recorder = _SpendingData() 51 | return recorder, cls(recorder) 52 | 53 | def mark_as_spent(self, public_key: PublicKey, passes: Sequence[bytes]) -> None: 54 | self._recorder.spent_tokens.setdefault(public_key.encode_base64(), []).extend( 55 | passes 56 | ) 57 | 58 | 59 | def get_spender(reactor: IReactorTime, registry: CollectorRegistry) -> ISpender: 60 | """ 61 | Return an :py:`ISpender` to be used with the given storage server configuration. 62 | """ 63 | recorder, spender = RecordingSpender.make() 64 | return spender 65 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/spending.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | A module for logic controlling the manner in which ZKAPs are spent. 17 | """ 18 | 19 | from __future__ import annotations 20 | 21 | from collections.abc import Container 22 | from typing import Callable 23 | 24 | import attr 25 | from zope.interface import Attribute, Interface, implementer 26 | 27 | from ._attrs_zope import provides 28 | from .eliot import GET_PASSES, INVALID_PASSES, RESET_PASSES, SPENT_PASSES 29 | from .model import Pass, UnblindedToken, VoucherStore 30 | 31 | 32 | class IPassGroup(Interface): 33 | """ 34 | A group of passed meant to be spent together. 35 | """ 36 | 37 | unblinded_tokens: list[UnblindedToken] = Attribute( 38 | "The unblinded signatures used to create the passes." 39 | ) 40 | passes: list[Pass] = Attribute("The passes themselves.") 41 | 42 | def split(select_indices: Container[int]) -> tuple[IPassGroup, IPassGroup]: 43 | """ 44 | Create two new ``IPassGroup`` providers. The first contains all 45 | passes in this group at the given indices. The second contains all 46 | the others. 47 | 48 | :param select_indices: The indices of the passes to include in the 49 | first resulting group. 50 | 51 | :return: The two new groups. 52 | """ 53 | 54 | def expand(by_amount: int) -> IPassGroup: 55 | """ 56 | Create a new ``IPassGroup`` provider which contains all of this 57 | group's passes and some more. 58 | 59 | :param by_amount: The number of additional passes the resulting group 60 | should contain. 61 | 62 | :return: The new group. 63 | """ 64 | 65 | def mark_spent() -> None: 66 | """ 67 | The passes have been spent successfully. Ensure none of them appear in 68 | any ``IPassGroup`` provider created in the future. 69 | """ 70 | 71 | def mark_invalid(reason: str) -> None: 72 | """ 73 | The passes could not be spent. Ensure none of them appear in any 74 | ``IPassGroup`` provider created in the future. 75 | 76 | :param reason: A short description of the reason the passes could not 77 | be spent. 78 | """ 79 | 80 | def reset() -> None: 81 | """ 82 | The passes have not been spent. Return them to for use in a future 83 | ``IPassGroup`` provider. 84 | 85 | :return: ``None`` 86 | """ 87 | 88 | 89 | class IPassFactory(Interface): 90 | """ 91 | An object which can create passes. 92 | """ 93 | 94 | def get(message: bytes, num_passes: int) -> IPassGroup: 95 | """ 96 | :param message: A request-binding message for the resulting passes. 97 | 98 | :param num_passes: The number of passes to request. 99 | 100 | :return: A group of passes bound to the given message and of the 101 | requested size. 102 | """ 103 | 104 | def mark_spent(unblinded_tokens: list[UnblindedToken]) -> None: 105 | """ 106 | See ``IPassGroup.mark_spent`` 107 | """ 108 | 109 | def mark_invalid(reason: str, unblinded_tokens: list[UnblindedToken]) -> None: 110 | """ 111 | See ``IPassGroup.mark_invalid`` 112 | """ 113 | 114 | def reset(unblinded_tokens: list[UnblindedToken]) -> None: 115 | """ 116 | See ``IPassGroup.reset`` 117 | """ 118 | 119 | 120 | @implementer(IPassGroup) 121 | @attr.s 122 | class PassGroup(object): 123 | """ 124 | Track the state of a group of passes intended as payment for an operation. 125 | 126 | :ivar _message: The request binding message for this group of 127 | passes. 128 | 129 | :ivar IPassFactory _factory: The factory which created this pass group. 130 | 131 | :ivar list[Pass] passes: The passes of which this group consists. 132 | """ 133 | 134 | _message: bytes = attr.ib(validator=attr.validators.instance_of(bytes)) 135 | _factory: IPassFactory = attr.ib(validator=provides(IPassFactory)) 136 | _tokens: list[tuple[UnblindedToken, Pass]] = attr.ib( 137 | validator=attr.validators.instance_of(list) 138 | ) 139 | 140 | @property 141 | def passes(self) -> list[Pass]: 142 | return list(pass_ for (unblinded_token, pass_) in self._tokens) 143 | 144 | @property 145 | def unblinded_tokens(self) -> list[UnblindedToken]: 146 | return list(unblinded_token for (unblinded_token, pass_) in self._tokens) 147 | 148 | def split(self, select_indices: Container[int]) -> tuple[PassGroup, PassGroup]: 149 | selected = [] 150 | unselected = [] 151 | for idx, t in enumerate(self._tokens): 152 | if idx in select_indices: 153 | selected.append(t) 154 | else: 155 | unselected.append(t) 156 | return ( 157 | attr.evolve(self, tokens=selected), 158 | attr.evolve(self, tokens=unselected), 159 | ) 160 | 161 | def expand(self, by_amount: int) -> PassGroup: 162 | return self + self._factory.get(self._message, by_amount) 163 | 164 | def __add__(self, other: IPassGroup) -> PassGroup: 165 | return attr.evolve( 166 | self, 167 | tokens=self._tokens + list(zip(other.unblinded_tokens, other.passes)), 168 | ) 169 | 170 | def mark_spent(self) -> None: 171 | self._factory.mark_spent(self.unblinded_tokens) 172 | 173 | def mark_invalid(self, reason: str) -> None: 174 | self._factory.mark_invalid(reason, self.unblinded_tokens) 175 | 176 | def reset(self) -> None: 177 | self._factory.reset(self.unblinded_tokens) 178 | 179 | 180 | @implementer(IPassFactory) 181 | @attr.s 182 | class SpendingController(object): 183 | """ 184 | A ``SpendingController`` gives out ZKAPs and arranges for re-spend 185 | attempts when necessary. 186 | """ 187 | 188 | get_unblinded_tokens: Callable[[int], list[UnblindedToken]] = attr.ib() 189 | discard_unblinded_tokens: Callable[[list[UnblindedToken]], None] = attr.ib() 190 | invalidate_unblinded_tokens: Callable[[str, list[UnblindedToken]], None] = attr.ib() 191 | reset_unblinded_tokens: Callable[[list[UnblindedToken]], None] = attr.ib() 192 | 193 | tokens_to_passes: Callable[[bytes, list[UnblindedToken]], list[Pass]] = attr.ib() 194 | 195 | @classmethod 196 | def for_store( 197 | cls, 198 | tokens_to_passes: Callable[[bytes, list[UnblindedToken]], list[Pass]], 199 | store: VoucherStore, 200 | ) -> "SpendingController": 201 | return cls( 202 | get_unblinded_tokens=store.get_unblinded_tokens, 203 | discard_unblinded_tokens=store.discard_unblinded_tokens, 204 | invalidate_unblinded_tokens=store.invalidate_unblinded_tokens, 205 | reset_unblinded_tokens=store.reset_unblinded_tokens, 206 | tokens_to_passes=tokens_to_passes, 207 | ) 208 | 209 | def get(self, message: bytes, num_passes: int) -> PassGroup: 210 | unblinded_tokens = self.get_unblinded_tokens(num_passes) 211 | passes = self.tokens_to_passes(message, unblinded_tokens) 212 | GET_PASSES.log( 213 | message=message.decode("utf-8"), 214 | count=num_passes, 215 | ) 216 | return PassGroup(message, self, list(zip(unblinded_tokens, passes))) 217 | 218 | def mark_spent(self, unblinded_tokens: list[UnblindedToken]) -> None: 219 | SPENT_PASSES.log( 220 | count=len(unblinded_tokens), 221 | ) 222 | self.discard_unblinded_tokens(unblinded_tokens) 223 | 224 | def mark_invalid(self, reason: str, unblinded_tokens: list[UnblindedToken]) -> None: 225 | INVALID_PASSES.log( 226 | reason=reason, 227 | count=len(unblinded_tokens), 228 | ) 229 | self.invalidate_unblinded_tokens(reason, unblinded_tokens) 230 | 231 | def reset(self, unblinded_tokens: list[UnblindedToken]) -> None: 232 | RESET_PASSES.log( 233 | count=len(unblinded_tokens), 234 | ) 235 | self.reset_unblinded_tokens(unblinded_tokens) 236 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/sql.py: -------------------------------------------------------------------------------- 1 | """ 2 | Model SQL-related datatypes. 3 | 4 | This is focused on SQLite3 and no doubt nevertheless incomplete. The goal is 5 | to support testing the replication/recovery system. 6 | """ 7 | 8 | from contextlib import AbstractContextManager 9 | from datetime import datetime 10 | from enum import Enum, auto 11 | from sqlite3 import Connection as _SQLite3Connection 12 | from typing import TYPE_CHECKING, Any, Iterable, Optional, Protocol, Union 13 | 14 | from attrs import frozen 15 | from sqlparse import parse 16 | from typing_extensions import TypeAlias 17 | 18 | SQLType: TypeAlias = Union[int, float, str, bytes, datetime, None] 19 | SQLRuntimeType: tuple[type, ...] = (int, float, str, bytes, datetime, type(None)) 20 | 21 | if TYPE_CHECKING: 22 | # _Parameters only exists in the typeshed sqlite3 pyi files. We can only 23 | # import from pyi files when we're type checking. 24 | # 25 | # Also, yes, it's private. However, it expands to a ~30 term expression. 26 | # This is the lesser of two evils. 27 | from sqlite3.dbapi2 import _Parameters as Parameters 28 | 29 | 30 | class AbstractCursor(Protocol): 31 | """ 32 | A SQLite3 database cursor. 33 | """ 34 | 35 | @property 36 | def lastrowid(self) -> Optional[int]: ... 37 | 38 | @property 39 | def rowcount(self) -> Optional[int]: ... 40 | 41 | def execute(self, statement: str, args: "Parameters", /) -> "AbstractCursor": ... 42 | 43 | def executemany( 44 | self, statement: str, args: Iterable["Parameters"] 45 | ) -> "AbstractCursor": ... 46 | 47 | def close(self) -> None: ... 48 | 49 | def fetchall(self) -> list[Any]: ... 50 | 51 | def fetchmany(self, n: int) -> list[Any]: ... 52 | 53 | def fetchone(self) -> Any: ... 54 | 55 | 56 | class AbstractConnection(Protocol): 57 | """ 58 | A SQLite3 database connection. 59 | """ 60 | 61 | def iterdump(self) -> Iterable[str]: ... 62 | 63 | def cursor(self, cursorClass: None = None) -> AbstractCursor: ... 64 | 65 | def __enter__(self) -> AbstractContextManager["AbstractConnection"]: ... 66 | 67 | def __exit__( 68 | self, 69 | exc_type: Optional[type], 70 | exc_value: Optional[BaseException], 71 | exc_tb: Optional[Any], 72 | ) -> bool: ... 73 | 74 | 75 | Connection = AbstractConnection 76 | Cursor = AbstractCursor 77 | 78 | 79 | class UnboundConnect(Protocol): 80 | """ 81 | Connect to a SQLite3 database. 82 | """ 83 | 84 | def __call__( 85 | self, 86 | path: str, 87 | isolation_level: Optional[str] = None, 88 | ) -> _SQLite3Connection: 89 | """ 90 | Get a new database connection. 91 | """ 92 | 93 | 94 | class BoundConnect(Protocol): 95 | """ 96 | Connect to a certain (ie, not parameterized) SQLite3 database. 97 | """ 98 | 99 | def __call__( 100 | self, 101 | isolation_level: Optional[str] = None, 102 | ) -> _SQLite3Connection: 103 | """ 104 | Get a new database connection. 105 | """ 106 | 107 | 108 | class StorageAffinity(Enum): 109 | """ 110 | Represent the different "storage affinities" possible for a SQLite3 111 | column. 112 | """ 113 | 114 | INT = auto() 115 | TEXT = auto() 116 | BLOB = auto() 117 | REAL = auto() 118 | NUMERIC = auto() 119 | 120 | 121 | @frozen 122 | class Column: 123 | """ 124 | Represent a column in a SQLite3 table. 125 | 126 | :ivar affinity: The expected type affinity for values in this column. See 127 | https://www.sqlite.org/datatype3.html 128 | """ 129 | 130 | affinity: StorageAffinity 131 | 132 | 133 | @frozen 134 | class Table: 135 | """ 136 | Represent a table in a SQLite3 database. 137 | 138 | :ivar columns: The columns that make up this table. 139 | """ 140 | 141 | columns: list[tuple[str, Column]] 142 | 143 | 144 | class Statement(Protocol): 145 | @property 146 | def table_name(self) -> str: ... 147 | 148 | def statement(self) -> str: ... 149 | 150 | def arguments(self) -> tuple[SQLType, ...]: ... 151 | 152 | 153 | @frozen 154 | class Insert: 155 | """ 156 | Represent an insertion of one row into a table. 157 | 158 | :ivar table_name: The name of the table where the row can be inserted. 159 | 160 | :ivar table: A representation of the table itself. 161 | 162 | :ivar fields: The values which can be inserted. 163 | """ 164 | 165 | table_name: str 166 | table: Table 167 | fields: tuple[SQLType, ...] 168 | 169 | def statement(self) -> str: 170 | names = ", ".join((escape_identifier(name) for (name, _) in self.table.columns)) 171 | placeholders = ", ".join("?" * len(self.table.columns)) 172 | return ( 173 | f"INSERT INTO {escape_identifier(self.table_name)} " 174 | f"({names}) " 175 | f"VALUES ({placeholders})" 176 | ) 177 | 178 | def arguments(self) -> tuple[SQLType, ...]: 179 | return self.fields 180 | 181 | 182 | def quote_sql_value(cursor: Cursor, value: SQLType) -> str: 183 | """ 184 | Use the SQL `quote()` function to return the quoted version of `value`. 185 | 186 | :returns: the quoted value 187 | """ 188 | if isinstance(value, (int, float, datetime)): 189 | return str(value) 190 | if value is None: 191 | return "NULL" 192 | if isinstance(value, (str, bytes)): 193 | cursor.execute("SELECT quote(?);", (value,)) 194 | result = cursor.fetchall()[0][0] 195 | assert isinstance(result, str) 196 | return result 197 | raise ValueError(f"Do not know how to quote value of type {type(value)}") 198 | 199 | 200 | @frozen 201 | class Update: 202 | """ 203 | Represent an update to some rows in a table. 204 | 205 | Currently this updates all rows. 206 | 207 | :ivar table_name: The name of the table to which the update applies. 208 | 209 | :ivar table: A representation of the table itself. 210 | 211 | :ivar fields: The new values for each column in the table. 212 | """ 213 | 214 | table_name: str 215 | table: Table 216 | fields: tuple[SQLType, ...] 217 | 218 | def statement(self) -> str: 219 | field_names = list(name for (name, _) in self.table.columns) 220 | assignments = ", ".join( 221 | f"{escape_identifier(name)} = ?" for name in field_names 222 | ) 223 | return f"UPDATE {escape_identifier(self.table_name)} SET {assignments}" 224 | 225 | def arguments(self) -> tuple[SQLType, ...]: 226 | return self.fields 227 | 228 | 229 | @frozen 230 | class Select: 231 | """ 232 | Represent a query about a certain table 233 | 234 | :ivar table_name: valid SQL identifier for a table 235 | """ 236 | 237 | table_name: str 238 | 239 | def statement(self) -> str: 240 | return f"SELECT * FROM {escape_identifier(self.table_name)}" 241 | 242 | def arguments(self) -> tuple[()]: 243 | return () 244 | 245 | 246 | @frozen 247 | class Delete: 248 | """ 249 | Represent the deletion of some rows from a table. 250 | 251 | Currently this deletes all rows. 252 | 253 | :ivar table_name: The name of the table from which to rows can be deleted. 254 | """ 255 | 256 | table_name: str 257 | 258 | def statement(self) -> str: 259 | return f"DELETE FROM {escape_identifier(self.table_name)}" 260 | 261 | def arguments(self) -> tuple[()]: 262 | return () 263 | 264 | 265 | def escape_identifier(string: str) -> str: 266 | """ 267 | Escape an arbitrary string for use as a SQLite3 identifier. 268 | """ 269 | return f"'{string}'" 270 | 271 | 272 | def column_ddl(name: str, column: Column) -> str: 273 | """ 274 | Get a column DDL fragment for a column of the given name and type. 275 | 276 | :return: *bar* in **create table foo ( bar )** 277 | """ 278 | return f"{escape_identifier(name)} {column.affinity.name}" 279 | 280 | 281 | def create_table(name: str, table: Table) -> str: 282 | """ 283 | Get a table creation DDL statement for a table of the given name and type. 284 | """ 285 | columns = ", ".join(column_ddl(name, column) for (name, column) in table.columns) 286 | return f"CREATE TABLE {escape_identifier(name)} ({columns})" 287 | 288 | 289 | def statement_mutates(statement: str) -> bool: 290 | """ 291 | predicate to decide if `statement` will change the database 292 | """ 293 | if statement == "BEGIN IMMEDIATE TRANSACTION": 294 | return False 295 | (parsed,) = parse(statement) 296 | return parsed.get_type() not in {"SELECT"} 297 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | The automated unit test suite. 17 | """ 18 | 19 | 20 | def _configure_hypothesis() -> None: 21 | """ 22 | Select define Hypothesis profiles and select one based on environment 23 | variables. 24 | """ 25 | from os import environ 26 | 27 | from hypothesis import HealthCheck, settings 28 | 29 | base = settings( 30 | suppress_health_check=[ 31 | # CPU resources available to builds typically varies significantly 32 | # from run to run making it difficult to determine if "too slow" 33 | # data generation is a result of the code or the execution 34 | # environment. Prevent these checks from (intermittently) failing 35 | # tests that are otherwise fine. 36 | HealthCheck.too_slow, 37 | ], 38 | # With the same reasoning, disable the test deadline. 39 | deadline=None, 40 | ) 41 | 42 | settings.register_profile("default", base) 43 | 44 | settings.register_profile( 45 | "ci", 46 | base, 47 | # Make CI runs a little more aggressive in amount of coverage they try 48 | # to provide. 49 | max_examples=200, 50 | ) 51 | 52 | settings.register_profile( 53 | "fast", 54 | base, 55 | max_examples=2, 56 | ) 57 | 58 | settings.register_profile( 59 | "big", 60 | base, 61 | max_examples=10000, 62 | # The only rule-based state machine we have now is quite simple and 63 | # can probably be completely explored in about 5 steps. Give it some 64 | # headroom beyond that in case I'm wrong but don't let it run to the 65 | # full 50 because, combined with searching for 10000 successful 66 | # examples this makes the stateful test take *ages* to complete. 67 | stateful_step_count=15, 68 | ) 69 | 70 | profile_name = environ.get("ZKAPAUTHORIZER_HYPOTHESIS_PROFILE", "fast") 71 | settings.load_profile(profile_name) 72 | print("Loaded profile {}".format(profile_name)) 73 | 74 | 75 | _configure_hypothesis() 76 | 77 | 78 | def _monkeypatch_tahoe_3874() -> None: 79 | # Fix https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3874 80 | from allmydata.testing.web import _FakeTahoeUriHandler 81 | from hyperlink import DecodedURL 82 | from twisted.web import http 83 | from twisted.web.iweb import IRequest 84 | 85 | def render_GET(self: _FakeTahoeUriHandler, request: IRequest) -> bytes: 86 | uri = DecodedURL.from_text(request.uri.decode("utf8")) 87 | capability = None 88 | for arg, value in uri.query: 89 | if arg == "uri" and value is not None: 90 | capability = value.encode("ascii") 91 | # it's legal to use the form "/uri/" 92 | if capability is None and request.postpath and request.postpath[0]: 93 | capability = request.postpath[0] 94 | 95 | # if we don't yet have a capability, that's an error 96 | if capability is None: 97 | request.setResponseCode(http.BAD_REQUEST) 98 | return b"GET /uri requires uri=" 99 | 100 | # the user gave us a capability; if our Grid doesn't have any 101 | # data for it, that's an error. 102 | if capability not in self.data: 103 | request.setResponseCode(http.BAD_REQUEST) 104 | return "No data for '{}'".format(capability.decode("ascii")).encode("ascii") 105 | 106 | return self.data[capability] # type: ignore[no-any-return] 107 | 108 | _FakeTahoeUriHandler.render_GET = render_GET 109 | 110 | 111 | _monkeypatch_tahoe_3874() 112 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/_exception.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2009-2012 testtools developers. 2 | # 3 | # Copyright 2019 PrivateStorage.io, LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | __all__ = [ 18 | "MatchesExceptionType", 19 | "Raises", 20 | "raises", 21 | ] 22 | 23 | import sys 24 | from typing import Any, Callable, Optional 25 | 26 | from testtools.content import TracebackContent 27 | from testtools.matchers import Matcher, Mismatch 28 | 29 | 30 | def _is_exception(exc: BaseException) -> bool: 31 | return isinstance(exc, BaseException) 32 | 33 | 34 | def _is_user_exception(exc: BaseException) -> bool: 35 | return isinstance(exc, Exception) 36 | 37 | 38 | class MatchesExceptionType(Matcher): 39 | """ 40 | Match an exc_info tuple against an exception type. 41 | """ 42 | 43 | def __init__(self, exception_type: type) -> None: 44 | """ 45 | Create a MatchesException that will match exc_info's for exception. 46 | 47 | :param exception: An exception type. 48 | """ 49 | Matcher.__init__(self) 50 | self.expected = exception_type 51 | 52 | def match(self, other: tuple[type, BaseException, Any]) -> Optional[Mismatch]: 53 | if type(other) != tuple: 54 | return Mismatch("{!r} is not an exc_info tuple".format(other)) 55 | expected_class = self.expected 56 | etype, evalue, etb = other 57 | if not issubclass(etype, expected_class): 58 | return Mismatch( 59 | "{!r} is an instance of {}, expected an instance of {}.".format( 60 | evalue, 61 | etype, 62 | expected_class, 63 | ), 64 | dict( 65 | traceback=TracebackContent(other, None), 66 | ), 67 | ) 68 | return None 69 | 70 | def __str__(self) -> str: 71 | return f"MatchesExceptionType({self.expected!r})" 72 | 73 | 74 | class Raises(Matcher): 75 | """Match if the matchee raises an exception when called. 76 | 77 | Exceptions which are not subclasses of Exception propogate out of the 78 | Raises.match call unless they are explicitly matched. 79 | """ 80 | 81 | def __init__(self, exception_matcher: Matcher): 82 | """ 83 | Create a Raises matcher. 84 | 85 | :param exception_matcher: Validator for the exception raised by 86 | matchee. The exc_info tuple for the exception raised is passed 87 | into that matcher. 88 | """ 89 | self.exception_matcher = exception_matcher 90 | 91 | def match(self, matchee: Callable[[], object]) -> Optional[Mismatch]: 92 | try: 93 | result = matchee() 94 | return Mismatch("%r returned %r" % (matchee, result)) 95 | # Catch all exceptions: Raises() should be able to match a 96 | # KeyboardInterrupt or SystemExit. 97 | except: 98 | exc_info = sys.exc_info() 99 | mismatch = self.exception_matcher.match(exc_info) 100 | exc_type = exc_info[1] 101 | assert exc_type is not None 102 | # It's safer not to keep the traceback around. 103 | del exc_info 104 | if mismatch: 105 | # The exception did not match, or no explicit matching logic was 106 | # performed. If the exception is a non-user exception then 107 | # propagate it. 108 | if _is_exception(exc_type) and not _is_user_exception(exc_type): 109 | raise 110 | return mismatch 111 | return None 112 | 113 | def __str__(self) -> str: 114 | return "Raises()" 115 | 116 | 117 | def raises(exception_type: type) -> Raises: 118 | """Make a matcher that checks that a callable raises an exception. 119 | 120 | This is a convenience function, exactly equivalent to:: 121 | 122 | return Raises(MatchesExceptionType(exception_type)) 123 | 124 | See `Raises` and `MatchesExceptionType` for more information. 125 | """ 126 | return Raises(MatchesExceptionType(exception_type)) 127 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/_float_matchers.py: -------------------------------------------------------------------------------- 1 | from math import isnan, nextafter 2 | from typing import Optional 3 | 4 | from attrs import define 5 | from testtools.matchers import Mismatch 6 | 7 | 8 | def unit_of_least_precision_distance( 9 | start: float, goal: float, max_distance: int 10 | ) -> int: 11 | """ 12 | Compute the distance from ``start`` to ``goal`` in terms of floating point 13 | "unit of least precision" ("ULP"). 14 | 15 | This is roughly how many floating point values there are between ``start`` 16 | and ``goal``. 17 | 18 | :return: The distance. 19 | 20 | :raise ValueError: If the distance is greater than ``max_distance``. The 21 | cost of the distance calculation is linear on the size of the distance 22 | and the distance between two floating point values could be almost 2 23 | ** 64. You probably want to limit the amount of work done to a much 24 | smaller distance. 25 | """ 26 | # Sometimes a value is exactly an integer and may come out of some system 27 | # as an int instead of a float. We can deal with that so let it through. 28 | # Provide an early error for any other types, though. 29 | if not isinstance(start, (int, float)) or not isinstance(goal, (int, float)): 30 | raise TypeError(f"requires ints or floats, got {start!r} and {goal!r} instead") 31 | 32 | if isnan(start) or isnan(goal): 33 | raise ValueError("Cannot find distance to or from NaN") 34 | 35 | if start == goal: 36 | return 0 37 | 38 | distance = 0 39 | while distance < max_distance: 40 | distance += 1 41 | start = nextafter(start, goal) 42 | if start == goal: 43 | return distance 44 | 45 | raise ValueError(f"{start} is more than {distance} from {goal}") 46 | 47 | 48 | @define 49 | class _MatchFloatWithinDistance(object): 50 | """ 51 | See ``matches_float_within_distance``. 52 | """ 53 | 54 | reference: float 55 | distance: int 56 | max_distance: int 57 | 58 | def match(self, actual: float) -> Optional[Mismatch]: 59 | try: 60 | distance = unit_of_least_precision_distance( 61 | self.reference, actual, self.max_distance 62 | ) 63 | except ValueError: 64 | return Mismatch( 65 | f"float {actual} is more than {self.max_distance} " 66 | f"from {self.reference} - search abandoned " 67 | f"(allowed distance is {self.distance})", 68 | ) 69 | else: 70 | if distance > self.distance: 71 | return Mismatch( 72 | f"distance from {self.reference} " 73 | f"to {actual} " 74 | f"is {distance}, " 75 | f"greater than allowed distance of {self.distance}", 76 | ) 77 | return None 78 | 79 | 80 | def matches_float_within_distance( 81 | reference: float, distance: int, max_distance: int = 100 82 | ) -> _MatchFloatWithinDistance: 83 | """ 84 | Matches a floating point value that is no more than a given distance in 85 | "unit of least precision" steps of a reference value. 86 | 87 | :param reference: The reference floating point value. 88 | :param distance: The maximum allowed distance to a matched value. 89 | 90 | :param max_distance: The maximum distance to search (to try to provide 91 | extra information when the match fails). 92 | """ 93 | 94 | return _MatchFloatWithinDistance(reference, distance, max_distance) 95 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/_sql_matchers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testtools matchers related to SQL functionality. 3 | """ 4 | 5 | from typing import Iterator, Optional, Union 6 | 7 | from attrs import define 8 | from testtools.matchers import AfterPreprocessing, Annotate, Equals 9 | from testtools.matchers import Matcher as _Matcher 10 | from testtools.matchers import Mismatch 11 | 12 | from ..sql import Connection, Insert, Statement, Table, escape_identifier 13 | from ._float_matchers import matches_float_within_distance 14 | 15 | 16 | def equals_database(reference: Connection) -> _Matcher: 17 | """ 18 | :return: A matcher for a SQLite3 connection to a database with the same 19 | state as the reference connection's database. 20 | """ 21 | 22 | # The implementation strategy here is motivated by the need to apply a 23 | # custom floating point comparison function. This means we can't just 24 | # compare dumped SQL statement strings. Instead of trying to parse the 25 | # SQL statement strings to extract the floating point values, we dump the 26 | # database ourselves without bothering to generate the SQL statement 27 | # strings in the first place. Then we can dig into the resulting values, 28 | # notice floats, and compare them with our custom logic. 29 | # 30 | # We need custom logic to compare floats because SQLite3 bugs cause 31 | # certain values not to round-trip through the database correctly. This 32 | # is a huge bummer! Fortunately the error is small and does not 33 | # accumulate. 34 | 35 | return AfterPreprocessing( 36 | lambda actual: list(structured_dump(actual)), 37 | _MatchesDump(list(structured_dump(reference))), 38 | ) 39 | 40 | 41 | def structured_dump(db: Connection) -> Iterator[Union[str, Insert]]: 42 | """ 43 | Dump the whole database, schema and rows, without trying to do any string 44 | formatting. 45 | """ 46 | tables = list(_structured_dump_tables(db)) 47 | for name, sql in tables: 48 | yield sql 49 | yield from _structured_dump_table(db, name) 50 | 51 | 52 | def _structured_dump_tables(db: Connection) -> Iterator[tuple[str, str]]: 53 | curs = db.cursor() 54 | curs.execute( 55 | """ 56 | SELECT [name], [sql] 57 | FROM [sqlite_master] 58 | WHERE [sql] NOT NULL and [type] == 'table' 59 | ORDER BY [name] 60 | """, 61 | (), 62 | ) 63 | yield from curs.fetchall() 64 | 65 | 66 | def _structured_dump_table(db: Connection, table_name: str) -> Iterator[Insert]: 67 | """ 68 | Dump a single database table's rows without trying to do any string 69 | formatting. 70 | """ 71 | curs = db.cursor() 72 | curs.execute(f"PRAGMA table_info({escape_identifier(table_name)})", ()) 73 | 74 | columns = list( 75 | (name, type_) for (cid, name, type_, notnull, dftl_value, pk) in curs.fetchall() 76 | ) 77 | column_names = ", ".join(escape_identifier(name) for (name, type_) in columns) 78 | curs.execute( 79 | f""" 80 | SELECT {column_names} 81 | FROM {escape_identifier(table_name)} 82 | """, 83 | (), 84 | ) 85 | 86 | for rows in iter(lambda: curs.fetchmany(1024), []): 87 | for row in rows: 88 | # We don't have a representation of the table so we'll supply a 89 | # bogus value. This still gives us a convenient container for the 90 | # other values. 91 | yield Insert(table_name, Table([]), row) 92 | 93 | 94 | def _get_matcher(reference: object, actual: object) -> _Matcher: 95 | """ 96 | Return a matcher suitable for comparing the two values for equality. 97 | 98 | All this does is use ``matches_float_within_distance`` if either value is a 99 | float. Otherwise, it uses ``Equals``. 100 | """ 101 | if isinstance(reference, float) and isinstance(actual, float): 102 | # We can't compare floats for exact equality, not for the usual reason 103 | # but because of limitations of SQLite3's support for floats. This is 104 | # particularly bad on Windows. 105 | # 106 | # https://www.exploringbinary.com/incorrect-decimal-to-floating-point-conversion-in-sqlite/ 107 | # https://www.mail-archive.com/sqlite-users@mailinglists.sqlite.org/msg56817.html 108 | # https://www.sqlite.org/src/tktview?name=1248e6cda8 109 | # 110 | # Our test suite knows about values that round-trip with an error of 111 | # as much as 6 ULPs on Windows. In case there are even worse cases, 112 | # we'll allow somewhat more error than that. 113 | return matches_float_within_distance(reference, 10) 114 | return Equals(reference) 115 | 116 | 117 | @define 118 | class _MatchStatement: 119 | """ 120 | Match a single structured SQL statement. Statements are tuples like those 121 | that ``equals_db`` deals with, not actual SQL strings. 122 | """ 123 | 124 | reference: Union[str, Statement] 125 | 126 | def match(self, actual: Statement) -> Optional[Mismatch]: 127 | if isinstance(actual, Insert) and isinstance(self.reference, Insert): 128 | # Match an insert-type statement. 129 | if actual.table_name != self.reference.table_name: 130 | return Mismatch( 131 | f"table name {actual} != {self.reference}", 132 | ) 133 | if len(actual.fields) != len(self.reference.fields): 134 | return Mismatch( 135 | f"length {len(actual.fields)} != {len(self.reference.fields)}", 136 | ) 137 | for actual_field, reference_field in zip( 138 | actual.fields, self.reference.fields 139 | ): 140 | matcher = _get_matcher(reference_field, actual_field) 141 | mismatch = matcher.match(actual_field) 142 | if mismatch is not None: 143 | return mismatch 144 | else: 145 | # Match something else by equality. 146 | return Equals(self.reference).match(actual) 147 | return None 148 | 149 | 150 | @define 151 | class _MatchesDump: 152 | """ 153 | Match a complete database dump's worth of structured SQL statements. 154 | """ 155 | 156 | reference: list[Union[str, Statement]] 157 | 158 | def match(self, actual: list[Union[str, Statement]]) -> Optional[Mismatch]: 159 | for n, (a, r) in enumerate(zip(actual, self.reference)): 160 | mismatch = Annotate(f"row #{n}", _MatchStatement(r)).match(a) 161 | if mismatch is not None: 162 | return mismatch 163 | 164 | if len(actual) != len(self.reference): 165 | return Mismatch( 166 | f"reference has {len(self.reference)} items; " 167 | f"actual as {len(actual)} items", 168 | ) 169 | 170 | return None 171 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/foolscap.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Testing helpers related to Foolscap. 17 | """ 18 | 19 | from typing import Awaitable, Iterable, NoReturn, Type, TypeVar 20 | 21 | from attrs import define, field, frozen 22 | from foolscap.api import ( # type: ignore[attr-defined] 23 | Any, 24 | Copyable, 25 | Referenceable, 26 | RemoteInterface, 27 | ) 28 | from foolscap.copyable import CopyableSlicer, ICopyable 29 | from foolscap.referenceable import RemoteReference 30 | from twisted.internet.defer import fail, succeed 31 | from zope.interface import Interface, implementer 32 | 33 | 34 | class RIStub(RemoteInterface): 35 | pass 36 | 37 | 38 | class RIEcho(RemoteInterface): 39 | def echo(argument=Any()): # type: ignore[no-untyped-def] 40 | return Any() 41 | 42 | 43 | class StubStorageServer(object): 44 | def register_bucket_writer_close_handler(self, handler: object) -> None: 45 | pass 46 | 47 | 48 | def get_anonymous_storage_server() -> StubStorageServer: 49 | return StubStorageServer() 50 | 51 | 52 | class BrokenCopyable(Copyable): 53 | """ 54 | I don't have a ``typeToCopy`` so I can't be serialized. 55 | """ 56 | 57 | 58 | T = TypeVar("T") 59 | 60 | 61 | @implementer( 62 | RIEcho # type: ignore # zope.interface.implementer accepts interface, not ... 63 | ) 64 | class Echoer(Referenceable): 65 | def remote_echo(self, argument: T) -> T: 66 | return argument 67 | 68 | 69 | @frozen 70 | class DummyReferenceable(object): 71 | _interface: Type[Interface] 72 | 73 | def getInterface(self) -> Type[Interface]: 74 | return self._interface 75 | 76 | def doRemoteCall(self, *a: object, **kw: object) -> object: 77 | return None 78 | 79 | 80 | @define 81 | class LocalTracker(object): 82 | """ 83 | Pretend to be a tracker for a ``LocalRemote``. 84 | """ 85 | 86 | interface: Type[RemoteInterface] 87 | interfaceName: str = field(init=False) 88 | 89 | @interfaceName.default 90 | def _interfaceName_default(self) -> str: 91 | return self.interface.__remote_name__ # type: ignore[no-any-return] 92 | 93 | def getURL(self) -> str: 94 | return "pb://abcd@127.0.0.1:12345/efgh" 95 | 96 | 97 | @define 98 | class LocalRemote(RemoteReference): 99 | """ 100 | Adapt a referenceable to behave as if it were a remote reference instead. 101 | 102 | ``foolscap.referenceable.LocalReferenceable`` is in many ways a better 103 | adapter between these interfaces but it also uses ``eventually`` which 104 | complicates matters immensely for testing. 105 | 106 | :ivar _referenceable: The object to which this provides a simulated remote 107 | interface. 108 | """ 109 | 110 | _referenceable: Referenceable 111 | check_args: bool = True 112 | tracker: LocalTracker = field() 113 | 114 | @tracker.default 115 | def _tracker_default(self) -> LocalTracker: 116 | return LocalTracker( 117 | self._referenceable.getInterface(), 118 | ) 119 | 120 | def notifyOnDisconnect( 121 | self, callback: object, *args: object, **kwargs: object 122 | ) -> NoReturn: 123 | raise NotImplementedError() 124 | 125 | def dontNotifyOnDisconnect(self, cookie: object) -> NoReturn: 126 | raise NotImplementedError() 127 | 128 | def callRemoteOnly(self, _name: Any, *args: Any, **kwargs: Any) -> NoReturn: 129 | raise NotImplementedError() 130 | 131 | def callRemote(self, _name: Any, *args: Any, **kwargs: Any) -> Awaitable[object]: 132 | """ 133 | Call the given method on the wrapped object, passing the given arguments. 134 | 135 | Arguments and return are checked for conformance to the remote 136 | interface but they are not actually serialized. 137 | 138 | :return Deferred: The result of the call on the wrapped object. 139 | """ 140 | try: 141 | schema = self._referenceable.getInterface()[_name] 142 | if self.check_args: 143 | schema.checkAllArgs(args, kwargs, inbound=True) 144 | _check_copyables(list(args) + list(kwargs.values())) 145 | result = self._referenceable.doRemoteCall( 146 | _name, 147 | args, 148 | kwargs, 149 | ) 150 | schema.checkResults(result, inbound=False) 151 | _check_copyables([result]) 152 | return succeed(result) 153 | except: 154 | return fail() 155 | 156 | 157 | def _check_copyables(copyables: Iterable[object]) -> None: 158 | """ 159 | Check each object to see if it is a copyable and if it is make sure it can 160 | be sliced. 161 | """ 162 | for obj in copyables: 163 | if ICopyable.providedBy(obj): 164 | list(CopyableSlicer(obj).slice(False, None)) 165 | elif isinstance(obj, dict): 166 | _check_copyables(obj.values()) 167 | elif isinstance(obj, list): 168 | _check_copyables(obj) 169 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/issuer.py: -------------------------------------------------------------------------------- 1 | """ 2 | A ZKAP issuer implemented as a L{twisted.web.resource.Resource}. 3 | """ 4 | 5 | from typing import Any 6 | 7 | from attrs import define, field, frozen 8 | from cattrs.gen import make_dict_unstructure_fn, override 9 | from cattrs.preconf import json 10 | from challenge_bypass_ristretto import ( 11 | BatchDLEQProof, 12 | BlindedToken, 13 | PublicKey, 14 | SigningKey, 15 | ) 16 | from twisted.internet.address import IPv4Address 17 | from twisted.internet.defer import Deferred, maybeDeferred 18 | from twisted.internet.interfaces import IListeningPort, IReactorTCP 19 | from twisted.python.filepath import FilePath 20 | from twisted.web.iweb import IRequest 21 | from twisted.web.resource import Resource 22 | from twisted.web.server import Site 23 | 24 | from .._types import ClientConfig, ServerConfig 25 | 26 | 27 | @define 28 | class Issuer: 29 | """ 30 | Represent a running HTTP server which implements the ZKAP issuer API. 31 | 32 | :ivar port: The listening network port object. 33 | 34 | :ivar signing_key_path: The filesystem path to the file containing the 35 | Ristretto signing key used by the running server. 36 | """ 37 | 38 | port: IListeningPort 39 | signing_key_path: FilePath 40 | 41 | # Some application code uses values of type `Issuer` as "testresources" 42 | # resources values. Unfortunately after the "resource manager" creates 43 | # the "resource" it sets attributes on the "resource" (one attribute for 44 | # each dependency of the resource). Issuer has no need of these as it has 45 | # already been completely initialized by `__init__`. 46 | # 47 | # Define an extra attribute to swallow this value. Unfortunately this 48 | # also means the type can't be frozen. :/ But other than this we will try 49 | # to behave as though it is frozen. 50 | issuer_dir: Any = None 51 | 52 | @property 53 | def allowed_public_keys(self) -> list[PublicKey]: 54 | """ 55 | The public keys corresponding to the signing keys this server will 56 | use. 57 | """ 58 | return [ 59 | PublicKey.from_signing_key( 60 | SigningKey.decode_base64(self.signing_key_path.getContent()) 61 | ) 62 | ] 63 | 64 | @property 65 | def encoded_allowed_public_keys(self) -> str: 66 | """ 67 | The allowed public keys, serialized correctly to be written to a 68 | Tahoe-LAFS configuration file. 69 | """ 70 | return ",".join( 71 | k.encode_base64().decode("ascii") for k in self.allowed_public_keys 72 | ) 73 | 74 | @property 75 | def root_url(self) -> str: 76 | """ 77 | The root resource for the ZKAP issuer HTTP API run by this server. 78 | """ 79 | address = self.port.getHost() 80 | assert isinstance(address, IPv4Address) 81 | return f"http://127.0.0.1:{address.port}/" 82 | 83 | @property 84 | def server_config(self) -> ServerConfig: 85 | """ 86 | The configuration items to add to the ZKAPAuthorizer server 87 | configuration section of the Tahoe-LAFS configuration file for a 88 | ZKAP-enabled storage server. 89 | """ 90 | return { 91 | "ristretto-issuer-root-url": self.root_url, 92 | "ristretto-signing-key-path": self.signing_key_path.asTextMode().path, 93 | } 94 | 95 | @property 96 | def client_config(self) -> ClientConfig: 97 | """ 98 | The configuration items to add to the ZKAPAuthorizer client 99 | configuration section of the Tahoe-LAFS configuration file for a 100 | ZKAP-enabled storage client. 101 | """ 102 | return { 103 | "redeemer": "ristretto", 104 | "ristretto-issuer-root-url": self.root_url, 105 | "allowed-public-keys": self.encoded_allowed_public_keys, 106 | } 107 | 108 | 109 | @frozen 110 | class RedemptionRequest: 111 | """ 112 | Represent the fields of a request to redeem a voucher for some ZKAPs. 113 | 114 | :ivar redeemVoucher: The voucher being redeemed. 115 | :ivar redeemTokens: The blinded tokens submitted for signature. 116 | :ivar redeemCounter: The number of the redemption group to which these 117 | tokens belong. 118 | """ 119 | 120 | redeemVoucher: str 121 | redeemTokens: list[str] 122 | redeemCounter: int 123 | 124 | 125 | @frozen 126 | class Issuance: 127 | """ 128 | Represent a group of issued ZKAPs. 129 | """ 130 | 131 | signatures: list[str] 132 | proof: str 133 | public_key: str 134 | success: bool = field(init=False, default=True) 135 | 136 | 137 | def _issue(signing_key: SigningKey, req: RedemptionRequest) -> Issuance: 138 | """ 139 | Respond to a request for a voucher redemption. 140 | 141 | :param signing_key: The signing key to use create issue signatures. 142 | 143 | :param req: The redemption request carrying the blinded tokens to sign. 144 | 145 | :return: The requested signatures and a proof that they were created with 146 | ``signing_key``. 147 | """ 148 | blinded_tokens = [ 149 | BlindedToken.decode_base64(blinded_token.encode("ascii")) 150 | for blinded_token in req.redeemTokens 151 | ] 152 | 153 | signatures = list(map(signing_key.sign, blinded_tokens)) 154 | 155 | proof = BatchDLEQProof.create(signing_key, blinded_tokens, signatures) 156 | 157 | return Issuance( 158 | [sig.encode_base64().decode("ascii") for sig in signatures], 159 | proof.encode_base64().decode("ascii"), 160 | PublicKey.from_signing_key(signing_key).encode_base64().decode("ascii"), 161 | ) 162 | 163 | 164 | class Redeem(Resource): 165 | """ 166 | Implement the voucher redemption endpoint. 167 | """ 168 | 169 | def __init__(self, signing_key: SigningKey) -> None: 170 | Resource.__init__(self) 171 | self.signing_key = signing_key 172 | 173 | def render_POST(self, request: IRequest) -> bytes: 174 | req = _converter.loads(request.content.read(), RedemptionRequest) 175 | issuance = _issue(self.signing_key, req) 176 | return _converter.dumps(issuance).encode("utf-8") 177 | 178 | 179 | def issuer(signing_key: SigningKey) -> Site: 180 | v1 = Resource() 181 | v1.putChild(b"redeem", Redeem(signing_key)) 182 | 183 | r = Resource() 184 | r.putChild(b"v1", v1) 185 | 186 | return Site(r) 187 | 188 | 189 | def run_issuer(reactor: IReactorTCP, signing_key_path: FilePath) -> Issuer: 190 | signing_key = SigningKey.decode_base64(signing_key_path.getContent()) 191 | port = reactor.listenTCP(0, issuer(signing_key), backlog=3, interface="127.0.0.1") 192 | return Issuer(port, signing_key_path) 193 | 194 | 195 | def stop_issuer(issuer: Issuer) -> Deferred[None]: 196 | return maybeDeferred(issuer.port.stopListening) # type: ignore[arg-type] 197 | 198 | 199 | _converter = json.make_converter(forbid_extra_keys=True) 200 | _issuance_hook = make_dict_unstructure_fn( 201 | Issuance, _converter, public_key=override(rename="public-key") 202 | ) 203 | _converter.register_unstructure_hook(Issuance, _issuance_hook) 204 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/privacypass.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Ristretto-flavored PrivacyPass helpers for the test suite. 17 | """ 18 | 19 | from challenge_bypass_ristretto import ( 20 | BatchDLEQProof, 21 | PublicKey, 22 | RandomToken, 23 | SigningKey, 24 | ) 25 | 26 | from ..model import Pass 27 | 28 | 29 | def make_passes( 30 | signing_key: SigningKey, for_message: bytes, random_tokens: list[RandomToken] 31 | ) -> list[Pass]: 32 | """ 33 | Create a number of cryptographically correct privacy passes. 34 | 35 | :param signing_key: The key to use to sign the passes. 36 | 37 | :param for_message: The request-binding message with which to associate 38 | the passes. 39 | 40 | :param random_tokens: The random tokens to feed in to the pass generation 41 | process. 42 | 43 | :return: The privacy passes. The returned list has one element for each 44 | element of ``random_tokens``. 45 | """ 46 | blinded_tokens = list(token.blind() for token in random_tokens) 47 | signatures = list( 48 | signing_key.sign(blinded_token) for blinded_token in blinded_tokens 49 | ) 50 | proof = BatchDLEQProof.create( 51 | signing_key, 52 | blinded_tokens, 53 | signatures, 54 | ) 55 | unblinded_signatures = proof.invalid_or_unblind( 56 | random_tokens, 57 | blinded_tokens, 58 | signatures, 59 | PublicKey.from_signing_key(signing_key), 60 | ) 61 | preimages = list( 62 | unblinded_signature.preimage() for unblinded_signature in unblinded_signatures 63 | ) 64 | verification_keys = list( 65 | unblinded_signature.derive_verification_key_sha512() 66 | for unblinded_signature in unblinded_signatures 67 | ) 68 | message_signatures = list( 69 | verification_key.sign_sha512(for_message) 70 | for verification_key in verification_keys 71 | ) 72 | passes = list( 73 | Pass( 74 | preimage.encode_base64(), 75 | signature.encode_base64(), 76 | ) 77 | for (preimage, signature) in zip(preimages, message_signatures) 78 | ) 79 | return passes 80 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_base64.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Tests for ``_zkapauthorizer._base64``. 17 | """ 18 | 19 | from base64 import urlsafe_b64encode 20 | 21 | from hypothesis import given 22 | from hypothesis.strategies import binary 23 | from testtools import TestCase 24 | from testtools.matchers import Equals 25 | 26 | from .._base64 import urlsafe_b64decode 27 | 28 | 29 | class Base64Tests(TestCase): 30 | """ 31 | Tests for ``urlsafe_b64decode``. 32 | """ 33 | 34 | @given(binary()) 35 | def test_roundtrip(self, bytestring: bytes) -> None: 36 | """ 37 | Byte strings round-trip through ``base64.urlsafe_b64encode`` and 38 | ``urlsafe_b64decode``. 39 | """ 40 | self.assertThat( 41 | urlsafe_b64decode(urlsafe_b64encode(bytestring)), 42 | Equals(bytestring), 43 | ) 44 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_foolscap.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Tests for Foolscap-related test helpers. 17 | """ 18 | 19 | from typing import Optional, cast 20 | 21 | from fixtures import Fixture 22 | from foolscap.api import Any, RemoteInterface, Violation # type: ignore[attr-defined] 23 | from foolscap.furl import decode_furl 24 | from foolscap.pb import Tub 25 | from foolscap.referenceable import ( 26 | RemoteReference, 27 | RemoteReferenceOnly, 28 | RemoteReferenceTracker, 29 | ) 30 | from hypothesis import given 31 | from hypothesis.strategies import just, one_of 32 | from testtools import TestCase 33 | from testtools.matchers import ( 34 | AfterPreprocessing, 35 | Always, 36 | Equals, 37 | IsInstance, 38 | MatchesAll, 39 | ) 40 | from testtools.twistedsupport import failed, succeeded 41 | from twisted.internet.defer import Deferred 42 | from twisted.trial.unittest import TestCase as TrialTestCase 43 | 44 | from ..foolscap import ShareStat 45 | from .common import async_test 46 | from .foolscap import BrokenCopyable, DummyReferenceable, Echoer, LocalRemote, RIStub 47 | 48 | 49 | class IHasSchema(RemoteInterface): 50 | def method(arg=int): # type: ignore[assignment,no-untyped-def] 51 | return bytes 52 | 53 | def good_method(arg=int): # type: ignore[assignment,no-untyped-def] 54 | return None 55 | 56 | def whatever_method(arg=Any()): # type: ignore[no-untyped-def] 57 | return Any() 58 | 59 | 60 | def remote_reference() -> RemoteReferenceOnly: 61 | tub = Tub() 62 | tub.setLocation("127.0.0.1:12345") 63 | url = tub.buildURL("efgh") 64 | 65 | # Ugh ugh ugh. Skip over the extra correctness checking in 66 | # RemoteReferenceTracker.__init__ that requires having a broker by passing 67 | # the url as None and setting it after. 68 | tracker = RemoteReferenceTracker(None, None, None, RIStub) 69 | tracker.url = url 70 | 71 | ref = RemoteReferenceOnly(tracker) 72 | return ref 73 | 74 | 75 | class LocalRemoteTests(TestCase): 76 | """ 77 | Tests for the ``LocalRemote`` test double. 78 | """ 79 | 80 | @given( 81 | ref=one_of( 82 | just(remote_reference()), 83 | just(LocalRemote(DummyReferenceable(RIStub))), 84 | ), 85 | ) 86 | def test_tracker_url(self, ref: RemoteReference) -> None: 87 | """ 88 | The URL of a remote reference can be retrieved using the tracker 89 | attribute. 90 | """ 91 | self.assertThat( 92 | ref.tracker.getURL(), 93 | MatchesAll( 94 | IsInstance(str), 95 | AfterPreprocessing( 96 | decode_furl, 97 | Always(), 98 | ), 99 | ), 100 | ) 101 | 102 | def test_arg_schema(self) -> None: 103 | """ 104 | ``LocalRemote.callRemote`` returns a ``Deferred`` that fails with a 105 | ``Violation`` if an parameter receives an argument which doesn't 106 | conform to its schema. 107 | """ 108 | ref = LocalRemote(DummyReferenceable(IHasSchema)) 109 | self.assertThat( 110 | ref.callRemote("method", None), 111 | failed( 112 | AfterPreprocessing( 113 | lambda f: f.type, 114 | Equals(Violation), 115 | ), 116 | ), 117 | ) 118 | 119 | def test_result_schema(self) -> None: 120 | """ 121 | ``LocalRemote.callRemote`` returns a ``Deferred`` that fails with a 122 | ``Violation`` if a method returns an object which doesn't conform to 123 | the method's result schema. 124 | """ 125 | ref = LocalRemote(DummyReferenceable(IHasSchema)) 126 | self.assertThat( 127 | ref.callRemote("method", 0), 128 | failed( 129 | AfterPreprocessing( 130 | lambda f: f.type, 131 | Equals(Violation), 132 | ), 133 | ), 134 | ) 135 | 136 | def test_successful_method(self) -> None: 137 | """ 138 | ``LocalRemote.callRemote`` returns a ``Deferred`` that fires with the 139 | remote method's result if the arguments and result conform to their 140 | respective schemas. 141 | """ 142 | ref = LocalRemote(DummyReferenceable(IHasSchema)) 143 | self.assertThat( 144 | ref.callRemote("good_method", 0), 145 | succeeded(Equals(None)), 146 | ) 147 | 148 | def test_argument_serialization_failure(self) -> None: 149 | """ 150 | ``LocalRemote.callRemote`` returns a ``Deferred`` that fires with a 151 | failure if an argument cannot be serialized. 152 | """ 153 | ref = LocalRemote(DummyReferenceable(IHasSchema)) 154 | self.assertThat( 155 | ref.callRemote("whatever_method", BrokenCopyable()), 156 | failed(Always()), 157 | ) 158 | 159 | def test_result_serialization_failure(self) -> None: 160 | """ 161 | ``LocalRemote.callRemote`` returns a ``Deferred`` that fires with a 162 | failure if the method's result cannot be serialized. 163 | """ 164 | 165 | class BrokenResultReferenceable(DummyReferenceable): 166 | def doRemoteCall(self, *a: object, **kw: object) -> BrokenCopyable: 167 | return BrokenCopyable() 168 | 169 | ref = LocalRemote(BrokenResultReferenceable(IHasSchema)) 170 | self.assertThat( 171 | ref.callRemote("whatever_method", None), 172 | failed(Always()), 173 | ) 174 | 175 | 176 | class EchoerFixture(Fixture): 177 | tub: Tub 178 | furl: bytes 179 | 180 | def __init__(self) -> None: 181 | self.tub = Tub() 182 | self.tub.setLocation(b"tcp:0") 183 | 184 | def _setUp(self) -> None: 185 | self.tub.startService() 186 | self.furl = self.tub.registerReference(Echoer()) 187 | 188 | def _cleanUp(self) -> Optional[Deferred[object]]: 189 | return cast(Optional[Deferred[object]], self.tub.stopService()) 190 | 191 | 192 | class SerializationTests(TrialTestCase): 193 | """ 194 | Tests for the serialization of types used in the Foolscap API. 195 | """ 196 | 197 | @async_test 198 | async def test_sharestat(self) -> None: 199 | """ 200 | A ``ShareStat`` instance can be sent as an argument to and received in a 201 | response from a Foolscap remote method call. 202 | """ 203 | await self._roundtrip_test(ShareStat(1, 2)) 204 | 205 | async def _roundtrip_test(self, obj: object) -> None: 206 | """ 207 | Send ``obj`` over Foolscap and receive it back again, equal to itself. 208 | """ 209 | # So sad. No Deferred support in testtools.TestCase or 210 | # fixture.Fixture, no fixture support in 211 | # twisted.trial.unittest.TestCase. 212 | fx = EchoerFixture() 213 | fx.setUp() 214 | self.addCleanup(fx._cleanUp) 215 | echoer = await fx.tub.getReference(fx.furl) 216 | received = await echoer.callRemote("echo", obj) 217 | self.assertEqual(obj, received) 218 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_private.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Tahoe-LAFS -- secure, distributed storage grid 3 | # 4 | # Copyright © 2020 The Tahoe-LAFS Software Foundation 5 | # 6 | # Copyright 2019 PrivateStorage.io, LLC 7 | 8 | """ 9 | Tests for ``_zkapauthorizer.private``. 10 | """ 11 | 12 | from allmydata.test.web.matchers import has_response_code 13 | from testtools import TestCase 14 | from testtools.matchers import Equals 15 | from testtools.twistedsupport import succeeded 16 | from treq.client import HTTPClient 17 | from treq.testing import RequestTraversalAgent 18 | from twisted.web.http import NOT_FOUND, UNAUTHORIZED 19 | from twisted.web.http_headers import Headers 20 | from twisted.web.resource import Resource 21 | 22 | from ..private import SCHEME, create_private_tree 23 | 24 | 25 | class PrivacyTests(TestCase): 26 | """ 27 | Tests for the privacy features of the resources created by ``create_private_tree``. 28 | """ 29 | 30 | def setUp(self) -> None: 31 | self.token = b"abcdef" 32 | self.resource = create_private_tree(lambda: self.token, Resource()) 33 | self.agent = RequestTraversalAgent(self.resource) 34 | self.client = HTTPClient(self.agent) 35 | super(PrivacyTests, self).setUp() 36 | 37 | def _authorization(self, scheme: bytes, value: bytes) -> Headers: 38 | return Headers( 39 | { 40 | "authorization": [ 41 | "{} {}".format(scheme.decode("ascii"), value.decode("ascii")), 42 | ], 43 | } 44 | ) 45 | 46 | def test_unauthorized(self) -> None: 47 | """ 48 | A request without an *Authorization* header receives an *Unauthorized* response. 49 | """ 50 | self.assertThat( 51 | self.client.head(b"http:///foo/bar"), 52 | succeeded(has_response_code(Equals(UNAUTHORIZED))), 53 | ) 54 | 55 | def test_wrong_scheme(self) -> None: 56 | """ 57 | A request with an *Authorization* header not containing the Tahoe-LAFS 58 | scheme receives an *Unauthorized* response. 59 | """ 60 | self.assertThat( 61 | self.client.head( 62 | b"http:///foo/bar", 63 | headers=self._authorization(b"basic", self.token), 64 | ), 65 | succeeded(has_response_code(Equals(UNAUTHORIZED))), 66 | ) 67 | 68 | def test_wrong_token(self) -> None: 69 | """ 70 | A request with an *Authorization* header not containing the expected token 71 | receives an *Unauthorized* response. 72 | """ 73 | self.assertThat( 74 | self.client.head( 75 | b"http:///foo/bar", 76 | headers=self._authorization(SCHEME, b"foo bar"), 77 | ), 78 | succeeded(has_response_code(Equals(UNAUTHORIZED))), 79 | ) 80 | 81 | def test_authorized(self) -> None: 82 | """ 83 | A request with an *Authorization* header containing the expected scheme 84 | and token does not receive an *Unauthorized* response. 85 | """ 86 | self.assertThat( 87 | self.client.head( 88 | b"http:///foo/bar", 89 | headers=self._authorization(SCHEME, self.token), 90 | ), 91 | # It's a made up URL so we don't get a 200, either, but a 404. 92 | succeeded(has_response_code(Equals(NOT_FOUND))), 93 | ) 94 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_schema.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | # Copyright 2019 PrivateStorage.io, LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Tests for ``_zkapauthorizer.schema``. 18 | """ 19 | 20 | from datetime import datetime, timezone 21 | from typing import Optional 22 | 23 | from fixtures import TempDir 24 | from hypothesis import assume, given 25 | from hypothesis.strategies import SearchStrategy 26 | from hypothesis.strategies import datetimes as naive_datetimes 27 | from hypothesis.strategies import integers, none, one_of 28 | from testtools import TestCase 29 | from testtools.matchers import Equals 30 | 31 | from ..model import memory_connect 32 | from ..schema import _UPGRADES, get_schema_upgrades, get_schema_version 33 | 34 | 35 | def datetimes() -> SearchStrategy[datetime]: 36 | """ 37 | Build naive datetime instances that represent times that actually happened 38 | or will actually happen bounded within a pretty reasonable range around 39 | when this code was written. 40 | """ 41 | return naive_datetimes( 42 | allow_imaginary=False, 43 | # The software has not existing very long. There is no point going 44 | # very far back in time. Also, according to authorities, timezones 45 | # are basically meaningless before 1900 anyway. 46 | min_value=datetime(1995, 1, 1, 0, 0), 47 | # Similarly, this upgrade is going to happen so going very far into 48 | # the future is also less interresting. Also, the behavior of 49 | # calendars and clocks becomes harder to predict the further you go. 50 | # In particular, things fall apart around "spring forward" in 2038 - I 51 | # suppose because SQLite3 can't figure out the DST rules for any point 52 | # after 2 ** 31 - 1 seconds after the POSIX epoch. 53 | max_value=datetime(2038, 1, 1, 0, 0), 54 | ) 55 | 56 | 57 | class UpgradeTests(TestCase): 58 | def test_consistency(self) -> None: 59 | """ 60 | Upgrades are defined for every version up to the latest version. 61 | """ 62 | self.assertThat( 63 | list(_UPGRADES.keys()), 64 | Equals(list(range(len(_UPGRADES)))), 65 | ) 66 | 67 | @given( 68 | datetimes(), 69 | one_of(none(), datetimes()), 70 | integers(min_value=0, max_value=2**63 - 1), 71 | ) 72 | def test_utc_datetimes( 73 | self, start: datetime, finish: Optional[datetime], count: int 74 | ) -> None: 75 | """ 76 | The schema upgrades naive, localtime timestamps from before schema version 77 | 7 to UTC timestamps. 78 | """ 79 | # a datetime instance can be "folded". When the clock seems to rewind 80 | # by an hour for DST, all of the times in that hour will seem to 81 | # repeat. A datetime with fold=1 represents the 2nd time. Since this 82 | # distinction is not present in the database, it is guaranteed that 83 | # only one or the other case can work here. The information is lost 84 | # so there's nothing we can do to fix it. We could make it an error 85 | # but that seems bad. Otherwise, we can arbitrarily pick one 86 | # interpretation or the other. That choice is basically up to SQLite3 87 | # since we're using it to do the conversion to UTC. It happens to 88 | # interpret is as the first occurrance rather than the second. So, 89 | # prevent the test from running with the other kind of value. 90 | # 91 | # https://docs.python.org/3/library/datetime.html#datetime.datetime.fold 92 | assume(not start.fold) 93 | assume(finish is None or not finish.fold) 94 | 95 | dbpath = self.useFixture(TempDir()).join("utc_datetimes") 96 | with memory_connect(dbpath) as conn: 97 | cursor = conn.cursor() 98 | for upgrade in get_schema_upgrades(get_schema_version(cursor)): 99 | cursor.execute(upgrade) 100 | if get_schema_version(cursor) == 6: 101 | # Stop here so we can populate the database with some 102 | # state that requires upgrade. 103 | break 104 | 105 | cursor.execute( 106 | """ 107 | INSERT INTO [lease-maintenance-spending] ([started], [finished], [count]) 108 | VALUES (?, ?, ?) 109 | """, 110 | (start, finish, count), 111 | ) 112 | 113 | # Finish the upgrade 114 | for upgrade in get_schema_upgrades(get_schema_version(cursor)): 115 | cursor.execute(upgrade) 116 | 117 | cursor.execute( 118 | """ 119 | SELECT [started], [finished], [count] 120 | FROM [lease-maintenance-spending] 121 | """ 122 | ) 123 | actual_start, actual_finished, actual_count = cursor.fetchone() 124 | 125 | def expected_datetime(value: datetime) -> str: 126 | return ( 127 | value.replace( 128 | # The schema upgrade throws away sub-second precision. 129 | # Perhaps not ideal but in practice it doesn't matter. 130 | microsecond=0 131 | ) 132 | # Translate the naive local time into an aware UTC-zoned 133 | # value. 134 | .astimezone(timezone.utc).isoformat(" ") 135 | ) 136 | 137 | expected_start = expected_datetime(start) 138 | if finish is None: 139 | expected_finish = None 140 | else: 141 | expected_finish = expected_datetime(finish) 142 | 143 | self.assertThat( 144 | (actual_start, actual_finished, actual_count), 145 | Equals((expected_start, expected_finish, count)), 146 | ) 147 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_spending.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Tests for ``_zkapauthorizer.spending``. 17 | """ 18 | 19 | from datetime import datetime 20 | from random import Random 21 | from typing import Callable 22 | 23 | from hypothesis import given 24 | from hypothesis.strategies import DataObject, data, integers, randoms 25 | from testtools import TestCase 26 | from testtools.matchers import ( 27 | AfterPreprocessing, 28 | Always, 29 | Equals, 30 | HasLength, 31 | MatchesAll, 32 | MatchesStructure, 33 | ) 34 | from testtools.twistedsupport import succeeded 35 | from twisted.internet.defer import Deferred 36 | 37 | from ..model import VoucherStore 38 | from ..spending import IPassGroup, SpendingController 39 | from .fixtures import TemporaryVoucherStore 40 | from .matchers import Matcher, Provides 41 | from .strategies import pass_counts, posix_safe_datetimes, vouchers 42 | 43 | 44 | class PassGroupTests(TestCase): 45 | """ 46 | Tests for ``IPassGroup`` and the factories that create them. 47 | """ 48 | 49 | @given(vouchers(), pass_counts(), posix_safe_datetimes()) 50 | def test_get(self, voucher: bytes, num_passes: int, now: datetime) -> None: 51 | """ 52 | ``IPassFactory.get`` returns an ``IPassGroup`` provider containing the 53 | requested number of passes. 54 | """ 55 | configless = self.useFixture(TemporaryVoucherStore(get_now=lambda: now)) 56 | # Make sure there are enough tokens for us to extract! 57 | self.assertThat( 58 | Deferred.fromCoroutine(configless.redeem(voucher, num_passes)), 59 | succeeded(Always()), 60 | ) 61 | 62 | pass_factory = SpendingController.for_store( 63 | tokens_to_passes=configless.redeemer.tokens_to_passes, 64 | store=configless.store, 65 | ) 66 | 67 | group = pass_factory.get(b"message", num_passes) 68 | self.assertThat( 69 | group, 70 | MatchesAll( 71 | Provides([IPassGroup]), 72 | MatchesStructure( 73 | passes=HasLength(num_passes), 74 | ), 75 | ), 76 | ) 77 | 78 | def _test_token_group_operation( 79 | self, 80 | operation: Callable[[IPassGroup], None], 81 | rest_operation: Callable[[IPassGroup], None], 82 | matches_tokens: Callable[[int, IPassGroup], Matcher[VoucherStore]], 83 | voucher: bytes, 84 | num_passes: int, 85 | now: datetime, 86 | random: Random, 87 | data: DataObject, 88 | ) -> None: 89 | configless = self.useFixture(TemporaryVoucherStore(get_now=lambda: now)) 90 | # Make sure there are enough tokens for us to use! 91 | self.assertThat( 92 | Deferred.fromCoroutine(configless.redeem(voucher, num_passes)), 93 | succeeded(Always()), 94 | ) 95 | 96 | # Figure out some subset, maybe empty, of passes from the group that 97 | # we will try to operate on. 98 | group_size = data.draw(integers(min_value=0, max_value=num_passes)) 99 | indices = list(range(num_passes)) 100 | random.shuffle(indices) 101 | spent_indices = indices[:group_size] 102 | 103 | # Get some passes and perform the operation. 104 | pass_factory = SpendingController.for_store( 105 | tokens_to_passes=configless.redeemer.tokens_to_passes, 106 | store=configless.store, 107 | ) 108 | group = pass_factory.get(b"message", num_passes) 109 | spent, rest = group.split(spent_indices) 110 | 111 | # Perform the test-specified operations on the two groups. 112 | operation(spent) 113 | rest_operation(rest) 114 | 115 | # Verify the expected outcome of the operation using the supplied 116 | # matcher factory. 117 | self.assertThat( 118 | configless.store, 119 | matches_tokens(num_passes, spent), 120 | ) 121 | 122 | @given(vouchers(), pass_counts(), posix_safe_datetimes(), randoms(), data()) 123 | def test_spent( 124 | self, 125 | voucher: bytes, 126 | num_passes: int, 127 | now: datetime, 128 | random: Random, 129 | data: DataObject, 130 | ) -> None: 131 | """ 132 | Passes in a group can be marked as successfully spent to prevent them from 133 | being re-used by a future ``get`` call. 134 | """ 135 | 136 | def matches_tokens(num_passes: int, group: IPassGroup) -> Matcher[VoucherStore]: 137 | return AfterPreprocessing( # type: ignore[no-any-return] 138 | lambda store: store.count_unblinded_tokens(), 139 | Equals(num_passes - len(group.passes)), 140 | ) 141 | 142 | self._test_token_group_operation( 143 | lambda group: group.mark_spent(), 144 | # Reset the other group so its tokens are counted above. 145 | lambda group: group.reset(), 146 | matches_tokens, 147 | voucher, 148 | num_passes, 149 | now, 150 | random, 151 | data, 152 | ) 153 | 154 | @given(vouchers(), pass_counts(), posix_safe_datetimes(), randoms(), data()) 155 | def test_invalid( 156 | self, 157 | voucher: bytes, 158 | num_passes: int, 159 | now: datetime, 160 | random: Random, 161 | data: DataObject, 162 | ) -> None: 163 | """ 164 | Passes in a group can be marked as invalid to prevent them from being 165 | re-used by a future ``get`` call. 166 | """ 167 | 168 | def matches_tokens(num_passes: int, group: IPassGroup) -> Matcher[VoucherStore]: 169 | expected = num_passes - len(group.passes) 170 | return AfterPreprocessing( # type: ignore[no-any-return] 171 | lambda store: store.count_unblinded_tokens(), 172 | Equals(expected), 173 | ) 174 | 175 | return self._test_token_group_operation( 176 | lambda group: group.mark_invalid("reason"), 177 | # Reset the rest so we can count them in our matcher. 178 | lambda group: group.reset(), 179 | matches_tokens, 180 | voucher, 181 | num_passes, 182 | now, 183 | random, 184 | data, 185 | ) 186 | 187 | @given(vouchers(), pass_counts(), posix_safe_datetimes(), randoms(), data()) 188 | def test_reset( 189 | self, 190 | voucher: bytes, 191 | num_passes: int, 192 | now: datetime, 193 | random: Random, 194 | data: DataObject, 195 | ) -> None: 196 | """ 197 | Passes in a group can be reset to allow them to be re-used by a future 198 | ``get`` call. 199 | """ 200 | 201 | def matches_tokens(num_passes: int, group: IPassGroup) -> Matcher[VoucherStore]: 202 | return AfterPreprocessing( # type: ignore[no-any-return] 203 | # They've been reset so we should be able to re-get them. 204 | lambda store: store.get_unblinded_tokens(len(group.passes)), 205 | Equals(group.unblinded_tokens), 206 | ) 207 | 208 | return self._test_token_group_operation( 209 | lambda group: group.reset(), 210 | # Leave the other group alone so we can see what the effect of the 211 | # above reset was. 212 | lambda group: None, 213 | matches_tokens, 214 | voucher, 215 | num_passes, 216 | now, 217 | random, 218 | data, 219 | ) 220 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_sql.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | # Copyright 2022 PrivateStorage.io, LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Tests for ``_zkapauthorizer.sql``. 18 | """ 19 | 20 | 21 | from hypothesis import given 22 | from hypothesis.strategies import sampled_from, tuples 23 | from testtools import TestCase 24 | from testtools.matchers import Equals 25 | 26 | from ..sql import Statement, statement_mutates 27 | from .strategies import mutations, selects, sql_identifiers 28 | 29 | 30 | class MutateTests(TestCase): 31 | """ 32 | Tests for ``statement_mutates`` 33 | """ 34 | 35 | @given(mutations()) 36 | def test_mutate(self, change: Statement) -> None: 37 | """ 38 | ``statement_mutates`` returns True for SQL INSERT, DELETE, and UPDATE 39 | statements. 40 | """ 41 | self.assertThat( 42 | statement_mutates(change.statement()), 43 | Equals(True), 44 | ) 45 | 46 | @given( 47 | tuples(sampled_from([selects]), sql_identifiers()).flatmap( 48 | lambda x: x[0](x[1]), 49 | ) 50 | ) 51 | def test_non_mutate(self, change: Statement) -> None: 52 | """ 53 | ``statement_mutates`` returns False for SQL SELECT and BEGIN IMMEDIATE 54 | TRANSACTION statements. 55 | """ 56 | self.assertThat( 57 | statement_mutates(change.statement()), 58 | Equals(False), 59 | ) 60 | self.assertThat( 61 | statement_mutates("BEGIN IMMEDIATE TRANSACTION"), 62 | Equals(False), 63 | ) 64 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_strategies.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Tests for our custom Hypothesis strategies. 17 | """ 18 | 19 | from allmydata.client import config_from_string 20 | from fixtures import TempDir 21 | from hypothesis import given, note 22 | from hypothesis.strategies import DataObject, data, just, one_of 23 | from testtools import TestCase 24 | 25 | from .strategies import encoding_parameters, tahoe_config_texts 26 | 27 | 28 | class TahoeConfigsTests(TestCase): 29 | """ 30 | Tests for ``tahoe_configs``. 31 | """ 32 | 33 | @given(data()) 34 | def test_parses(self, data: DataObject) -> None: 35 | """ 36 | Configurations built by the strategy can be parsed. 37 | """ 38 | tempdir = self.useFixture(TempDir()) 39 | config_text = data.draw( 40 | tahoe_config_texts( 41 | storage_client_plugins={}, 42 | shares=one_of( 43 | just((None, None, None)), 44 | encoding_parameters(), 45 | ), 46 | ), 47 | ) 48 | note(config_text) 49 | config_from_string( 50 | tempdir.join("tahoe.ini"), 51 | "tub.port", 52 | config_text.encode("utf-8"), 53 | ) 54 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/test_tokens.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | Some tests for some tokens, perhaps. Just a stub for now. 17 | """ 18 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/tests/testing-signing.key: -------------------------------------------------------------------------------- 1 | mkQf85V2vyLQRUYuqRb+Ke6K+M9pOtXm4MslsuCdBgg= 2 | 3 | -------------------------------------------------------------------------------- /src/_zkapauthorizer/validators.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | This module implements validators for ``attrs``-defined attributes. 17 | """ 18 | 19 | from base64 import b64decode 20 | from datetime import datetime 21 | from typing import Callable, Protocol, Sequence, TypeVar 22 | 23 | from ._base64 import urlsafe_b64decode 24 | from ._types import Attribute 25 | 26 | _T = TypeVar("_T") 27 | 28 | ValidatorType = Callable[[object, Attribute[_T], _T], None] 29 | 30 | 31 | def returns_aware_datetime_validator( 32 | inst: object, attr: Attribute[Callable[[], datetime]], value: Callable[[], datetime] 33 | ) -> None: 34 | """ 35 | An attrs validator that verifies the attribute value is a function that 36 | returns a timezone-aware datetime instance for at least one call. 37 | """ 38 | if is_aware_datetime(value()): 39 | return None 40 | # Is it really a TypeError and not a ValueError? It doesn't matter and 41 | # also attrs converts anything we raise into a TypeError. 42 | raise TypeError( 43 | f"{attr.name!r} must return aware datetime instances (returned {value!r})" 44 | ) 45 | 46 | 47 | def is_aware_datetime(value: datetime) -> bool: 48 | """ 49 | :return: ``True`` if and only iff the given value is a timezone-aware 50 | datetime instance. 51 | """ 52 | return isinstance(value, datetime) and value.tzinfo is not None 53 | 54 | 55 | def aware_datetime_validator( 56 | inst: object, attr: Attribute[datetime], value: datetime 57 | ) -> None: 58 | """ 59 | An attrs validator that verifies the attribute value is a timezone-aware 60 | datetime instance. 61 | """ 62 | if is_aware_datetime(value): 63 | return None 64 | raise TypeError(f"{attr.name!r} must be an aware datetime instance (got {value!r})") 65 | 66 | 67 | def is_base64_encoded( 68 | b64decode: Callable[[bytes], bytes] = b64decode 69 | ) -> ValidatorType[bytes]: 70 | """ 71 | Return an attrs validator that verifies that the attributes is a base64 72 | encoded byte string. 73 | """ 74 | 75 | def validate_is_base64_encoded( 76 | inst: object, attr: Attribute[bytes], value: bytes 77 | ) -> None: 78 | try: 79 | b64decode(value) 80 | except TypeError: 81 | raise TypeError( 82 | "{name!r} must be base64 encoded bytes, (got {value!r})".format( 83 | name=attr.name, 84 | value=value, 85 | ), 86 | ) 87 | 88 | return validate_is_base64_encoded 89 | 90 | 91 | def has_length(expected: int) -> ValidatorType[Sequence[_T]]: 92 | def validate_has_length( 93 | inst: object, attr: Attribute[Sequence[_T]], value: Sequence[_T] 94 | ) -> None: 95 | if len(value) != expected: 96 | raise ValueError( 97 | "{name!r} must have length {expected}, instead has length {actual}".format( 98 | name=attr.name, 99 | expected=expected, 100 | actual=len(value), 101 | ), 102 | ) 103 | 104 | return validate_has_length 105 | 106 | 107 | class Ordered(Protocol): 108 | def __gt__(self: _T, other: _T) -> bool: ... 109 | 110 | 111 | def greater_than(expected: Ordered) -> ValidatorType[Ordered]: 112 | def validate_relation( 113 | inst: object, attr: Attribute[Ordered], value: Ordered 114 | ) -> None: 115 | if value > expected: 116 | return None 117 | 118 | raise ValueError( 119 | "{name!r} must be greater than {expected}, instead it was {actual}".format( 120 | name=attr.name, 121 | expected=expected, 122 | actual=value, 123 | ), 124 | ) 125 | 126 | return validate_relation 127 | 128 | 129 | def bounded_integer(min_bound: int) -> ValidatorType[int]: 130 | def validator(inst: object, attr: Attribute[int], value: int) -> None: 131 | """ 132 | An attrs validator which checks an integer value to make sure it 133 | greater than some minimum bound. 134 | """ 135 | if not isinstance(value, int): 136 | raise ValueError( 137 | f"{attr.name} must be an integer, instead it was {type(value)}", 138 | ) 139 | if not (value > min_bound): 140 | raise ValueError( 141 | f"{attr.name} must be greater than {min_bound}, instead it was {value}", 142 | ) 143 | 144 | return None 145 | 146 | return validator 147 | 148 | 149 | positive_integer = bounded_integer(0) 150 | non_negative_integer = bounded_integer(-1) 151 | 152 | 153 | def base64_bytes(length: int) -> ValidatorType[bytes]: 154 | def validator(inst: object, attr: Attribute[bytes], value: bytes) -> None: 155 | if not isinstance(value, bytes): 156 | raise ValueError( 157 | f"{attr.name} must be bytes, instead it was {type(value)}", 158 | ) 159 | if not is_base64_encoded(urlsafe_b64decode): 160 | raise ValueError( 161 | f"{attr.name} must be base64 encoded data", 162 | ) 163 | 164 | if len(value) != length: 165 | raise ValueError( 166 | f"{attr.name} value must have length {length}, not {len(value)}", 167 | ) 168 | 169 | return None 170 | 171 | return validator 172 | -------------------------------------------------------------------------------- /src/twisted/plugins/zkapauthorizer.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 PrivateStorage.io, LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | A drop-in to supply plugins to the Twisted plugin system. 17 | """ 18 | 19 | __all__ = [ 20 | "storage_server_plugin", 21 | ] 22 | 23 | from _zkapauthorizer._plugin import storage_server_plugin 24 | --------------------------------------------------------------------------------