├── .bumpversion.cfg ├── .circleci └── config.yml ├── .coveragerc ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.rst ├── LICENSE.md ├── Pipfile ├── Pipfile.lock ├── README.rst ├── docs ├── Makefile ├── examples │ └── umbral_simple_api.py ├── make.bat ├── notebooks │ └── pyUmbral Simple API.ipynb └── source │ ├── .static │ ├── PRE_image.png │ └── umbral.svg │ ├── api.rst │ ├── conf.py │ ├── index.rst │ ├── installation.rst │ └── using_pyumbral.rst ├── mypy.ini ├── pylintrc ├── setup.py ├── tests ├── __init__.py ├── conftest.py ├── metrics │ ├── reencryption_benchmark.py │ └── reencryption_firehose.py ├── test_capsule.py ├── test_capsule_frag.py ├── test_compatibility.py ├── test_curve.py ├── test_curve_point.py ├── test_curve_scalar.py ├── test_dem.py ├── test_key_frag.py ├── test_keys.py ├── test_pre.py ├── test_serializable.py ├── test_signing.py └── test_vectors.py ├── umbral ├── __about__.py ├── __init__.py ├── capsule.py ├── capsule_frag.py ├── curve.py ├── curve_point.py ├── curve_scalar.py ├── dem.py ├── errors.py ├── hashing.py ├── key_frag.py ├── keys.py ├── openssl.py ├── params.py ├── pre.py ├── serializable.py └── signing.py └── vectors ├── generate_test_vectors.py ├── vectors_cfrags.json ├── vectors_kfrags.json ├── vectors_point_operations.json ├── vectors_scalar_from_digest.json ├── vectors_scalar_operations.json └── vectors_unsafe_hash_to_point.json /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.3.0 3 | commit = True 4 | tag = True 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(-(?P[^.]*)\.(?P\d+))? 6 | serialize = 7 | {major}.{minor}.{patch}-{stage}.{devnum} 8 | {major}.{minor}.{patch} 9 | 10 | [bumpversion:part:stage] 11 | optional_value = stable 12 | first_value = stable 13 | values = 14 | alpha 15 | beta 16 | rc 17 | stable 18 | 19 | [bumpversion:part:devnum] 20 | 21 | [bumpversion:file:README.rst] 22 | 23 | [bumpversion:file:umbral/__about__.py] 24 | 25 | [bumpversion:file:docs/source/conf.py] 26 | 27 | [bumpversion:file:docs/source/index.rst] 28 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | setup.py, 4 | *__init__.py 5 | parallel=True 6 | 7 | [report] 8 | exclude_lines = 9 | # Have to re-enable the standard pragma 10 | pragma: no cover 11 | 12 | # Exclude abstract methods 13 | @abstractmethod 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/vim,linux,macos,python,pycharm,virtualenv 3 | 4 | ### Linux ### 5 | *~ 6 | 7 | # temporary files which can be created if a process still has a handle open of a deleted file 8 | .fuse_hidden* 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | ### macOS ### 20 | *.DS_Store 21 | .AppleDouble 22 | .LSOverride 23 | 24 | # Icon must end with two \r 25 | Icon 26 | 27 | # Thumbnails 28 | ._* 29 | 30 | # Files that might appear in the root of a volume 31 | .DocumentRevisions-V100 32 | .fseventsd 33 | .Spotlight-V100 34 | .TemporaryItems 35 | .Trashes 36 | .VolumeIcon.icns 37 | .com.apple.timemachine.donotpresent 38 | 39 | # Directories potentially created on remote AFP share 40 | .AppleDB 41 | .AppleDesktop 42 | Network Trash Folder 43 | Temporary Items 44 | .apdisk 45 | 46 | ### PyCharm ### 47 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 48 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 49 | 50 | # User-specific stuff: 51 | .idea/**/workspace.xml 52 | .idea/**/tasks.xml 53 | .idea/dictionaries 54 | 55 | # Sensitive or high-churn files: 56 | .idea/**/dataSources/ 57 | .idea/**/dataSources.ids 58 | .idea/**/dataSources.xml 59 | .idea/**/dataSources.local.xml 60 | .idea/**/sqlDataSources.xml 61 | .idea/**/dynamic.xml 62 | .idea/**/uiDesigner.xml 63 | 64 | # Gradle: 65 | .idea/**/gradle.xml 66 | .idea/**/libraries 67 | 68 | # CMake 69 | cmake-build-debug/ 70 | 71 | # Mongo Explorer plugin: 72 | .idea/**/mongoSettings.xml 73 | 74 | ## File-based project format: 75 | *.iws 76 | 77 | ## Plugin-specific files: 78 | 79 | # IntelliJ 80 | /out/ 81 | 82 | # mpeltonen/sbt-idea plugin 83 | .idea_modules/ 84 | 85 | # JIRA plugin 86 | atlassian-ide-plugin.xml 87 | 88 | # Cursive Clojure plugin 89 | .idea/replstate.xml 90 | 91 | # Ruby plugin and RubyMine 92 | /.rakeTasks 93 | 94 | # Crashlytics plugin (for Android Studio and IntelliJ) 95 | com_crashlytics_export_strings.xml 96 | crashlytics.properties 97 | crashlytics-build.properties 98 | fabric.properties 99 | 100 | ### PyCharm Patch ### 101 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 102 | 103 | # *.iml 104 | # modules.xml 105 | # .idea/misc.xml 106 | # *.ipr 107 | 108 | # Sonarlint plugin 109 | .idea/sonarlint 110 | 111 | ### Python ### 112 | # Byte-compiled / optimized / DLL files 113 | __pycache__/ 114 | *.py[cod] 115 | *$py.class 116 | 117 | # C extensions 118 | *.so 119 | 120 | # Distribution / packaging 121 | .Python 122 | build/ 123 | develop-eggs/ 124 | dist/ 125 | downloads/ 126 | eggs/ 127 | .eggs/ 128 | lib/ 129 | lib64/ 130 | parts/ 131 | sdist/ 132 | var/ 133 | wheels/ 134 | *.egg-info/ 135 | .installed.cfg 136 | *.egg 137 | 138 | # PyInstaller 139 | # Usually these files are written by a python script from a template 140 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 141 | *.manifest 142 | *.spec 143 | 144 | # Installer logs 145 | pip-log.txt 146 | pip-delete-this-directory.txt 147 | 148 | # Unit test / coverage reports 149 | reports/ 150 | htmlcov/ 151 | .tox/ 152 | .coverage 153 | .coverage.* 154 | .cache 155 | nosetests.xml 156 | coverage.xml 157 | *.cover 158 | .hypothesis/ 159 | .pytest_cache/ 160 | 161 | 162 | # Translations 163 | *.mo 164 | *.pot 165 | 166 | # Django stuff: 167 | *.log 168 | local_settings.py 169 | 170 | # Flask stuff: 171 | instance/ 172 | .webassets-cache 173 | 174 | # Scrapy stuff: 175 | .scrapy 176 | 177 | # Sphinx documentation 178 | docs/_build/ 179 | 180 | # PyBuilder 181 | target/ 182 | 183 | # Jupyter Notebook 184 | .ipynb_checkpoints 185 | 186 | # pyenv 187 | .python-version 188 | 189 | # celery beat schedule file 190 | celerybeat-schedule 191 | 192 | # SageMath parsed files 193 | *.sage.py 194 | 195 | # Environments 196 | .env 197 | .venv 198 | env/ 199 | venv/ 200 | ENV/ 201 | env.bak/ 202 | venv.bak/ 203 | 204 | # Spyder project settings 205 | .spyderproject 206 | .spyproject 207 | 208 | # Rope project settings 209 | .ropeproject 210 | 211 | # mkdocs documentation 212 | /site 213 | 214 | # mypy 215 | .mypy_cache/ 216 | 217 | ### Vim ### 218 | # swap 219 | [._]*.s[a-v][a-z] 220 | [._]*.sw[a-p] 221 | [._]s[a-v][a-z] 222 | [._]sw[a-p] 223 | # session 224 | Session.vim 225 | # temporary 226 | .netrwhist 227 | # auto-generated tag files 228 | tags 229 | 230 | ### VirtualEnv ### 231 | # Virtualenv 232 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 233 | [Bb]in 234 | [Ii]nclude 235 | [Ll]ib 236 | [Ll]ib64 237 | [Ll]ocal 238 | [Mm]an 239 | [Ss]cripts 240 | [Tt]cl 241 | pyvenv.cfg 242 | pip-selfcheck.json 243 | 244 | # End of https://www.gitignore.io/api/vim,linux,macos,python,pycharm,virtualenv 245 | 246 | .idea 247 | /mypy_report/ 248 | pytest.ini 249 | /tests/metrics/.benchmarks/ 250 | tests/metrics/histograms/ 251 | .circleci/execute_build.sh 252 | /monkeytype.sqlite3 253 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at report@nucypher.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | 78 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | .. image:: https://cdn-images-1.medium.com/max/800/1*J31AEMsTP6o_E5QOohn0Hw.png 5 | :target: https://cdn-images-1.medium.com/max/800/1*J31AEMsTP6o_E5QOohn0Hw.png 6 | 7 | 8 | Acquiring the Codebase 9 | ---------------------- 10 | 11 | .. _`pyUmbral GitHub`: https://github.com/nucypher/pyUmbral 12 | 13 | In order to contribute new code or documentation changes, you will need a local copy 14 | of the source code which is located on the `pyUmbral GitHub`_. 15 | 16 | .. note:: 17 | 18 | pyUmbral uses ``git`` for version control. Be sure you have it installed. 19 | 20 | Here is the recommended procedure for acquiring the code in preparation for 21 | contributing proposed changes: 22 | 23 | 24 | 1. Use GitHub to Fork the `nucypher/pyUmbral` repository 25 | 26 | 2. Clone your fork's repository to your local machine 27 | 28 | .. code-block:: bash 29 | 30 | $ git clone https://github.com//pyUmbral.git 31 | 32 | 3. Change Directories into ``pyUmbral`` 33 | 34 | .. code-block:: bash 35 | 36 | cd pyUmbral 37 | 38 | 3. Add `nucypher/pyUmbral` as an upstream remote 39 | 40 | .. code-block:: bash 41 | 42 | $ git remote add upstream https://github.com/nucypher/pyUmbral.git 43 | 44 | 4. Update your remote tracking branches 45 | 46 | .. code-block:: bash 47 | 48 | $ git remote update 49 | 50 | 5. Install pyUmbral 51 | 52 | .. code-block:: bash 53 | 54 | $ pip3 install umbral 55 | 56 | 57 | Running the Tests 58 | ----------------- 59 | 60 | .. _Pytest Documentation: https://docs.pytest.org/en/latest/ 61 | 62 | pyUmbral tests are written for execution with ``pytest``. 63 | For more details see the `Pytest Documentation`_. 64 | 65 | To run the tests: 66 | 67 | .. code:: bash 68 | 69 | (pyUmbral)$ pytest 70 | 71 | 72 | Making A Commit 73 | --------------- 74 | 75 | NuCypher takes pride in its commit history. 76 | 77 | When making a commit that you intend to contribute, keep your commit descriptive and succinct. 78 | Commit messages are best written in full sentences that make an attempt to accurately 79 | describe what effect the changeset represents in the simplest form. (It takes practice!) 80 | 81 | Imagine you are the one reviewing the code, commit-by-commit as a means of understanding 82 | the thinking behind the PRs history. Does your commit history tell an honest and accurate story? 83 | 84 | We understand that different code authors have different development preferences, and others 85 | are first-time contributors to open source, so feel free to join our `Discord `_ and let us know 86 | how we can best support the submission of your proposed changes. 87 | 88 | 89 | Opening A Pull Request 90 | ---------------------- 91 | 92 | When considering including commits as part of a pull request into `nucypher/pyUmbral`, 93 | we *highly* recommend opening the pull request early, before it is finished with 94 | the mark "[WIP]" prepended to the title. We understand PRs marked "WIP" to be subject to change, 95 | history rewrites, and CI failures. Generally we will not review a WIP PR until the "[WIP]" marker 96 | has been removed from the PR title, however, this does give other contributors an opportunity 97 | to provide early feedback and assists in facilitating an iterative contribution process. 98 | 99 | 100 | Pull Request Conflicts 101 | ---------------------- 102 | 103 | As an effort to preserve authorship and a cohesive commit history, we prefer if proposed contributions 104 | are rebased over master (or appropriate branch) when a merge conflict arises, 105 | instead of making a merge commit back into the contributors fork. 106 | 107 | Generally speaking the preferred process of doing so is with an `interactive rebase`: 108 | 109 | .. important:: 110 | 111 | Be certain you do not have uncommitted changes before continuing. 112 | 113 | 1. Update your remote tracking branches 114 | 115 | .. code-block:: bash 116 | 117 | $ git remote update 118 | ... (some upstream changes are reported) 119 | 120 | 2. Initiate an interactive rebase over `nucypher/pyUmbral@master` 121 | 122 | .. note:: 123 | 124 | This example specifies the remote name ``upstream`` for the NuCypher organizational repository as 125 | used in the `Acquiring the Codebase`_ section. 126 | 127 | .. code-block:: bash 128 | 129 | $ git rebase -i upstream/master 130 | ... (edit & save rebase TODO list) 131 | 132 | 3. Resolve Conflicts 133 | 134 | .. code-block:: bash 135 | 136 | $ git status 137 | ... (resolve local conflict) 138 | $ git add path/to/resolved/conflict/file.py 139 | $ git rebase --continue 140 | ... ( repeat as needed ) 141 | 142 | 143 | 4. Push Rebased History 144 | 145 | After resolving all conflicts, you will need to force push to your fork's repository, since the commits 146 | are rewritten. 147 | 148 | .. warning:: 149 | 150 | Force pushing will override any changes on the remote you push to, proceed with caution. 151 | 152 | .. code-block:: bash 153 | 154 | $ git push origin my-branch -f 155 | 156 | 157 | Building Documentation 158 | ---------------------- 159 | 160 | .. note:: 161 | 162 | ``sphinx`` is a non-standard dependency that can be installed 163 | by running ``pip install -e .[docs]`` from the project directory. 164 | 165 | 166 | .. _Read The Docs: https://pyumbral.readthedocs.io/en/latest/ 167 | 168 | Documentation for ``pyUmbral`` is hosted on `Read The Docs`_, and is automatically built without intervention by 169 | following the release procedure. However, you may want to build the documentation html locally for development. 170 | 171 | To build the project dependencies locally: 172 | 173 | .. code:: bash 174 | 175 | (pyUmbral)$ cd pyUmbral/docs/ 176 | (pyUmbral)$ make html 177 | 178 | 179 | If the build is successful, the resulting html output can be found in ``pyUmbral/docs/build/html``; 180 | Opening ``pyUmbral/docs/build/html/index.html`` in a web browser is a reasonable next step. 181 | 182 | 183 | Issuing a New Release 184 | --------------------- 185 | 186 | .. note:: 187 | 188 | ``bumpversion`` is a non-standard dependency that can be installed by running ``pip install -e .[deployment]`` or ``pip install bumpversion``. 189 | 190 | .. important:: 191 | 192 | Ensure your local tree is based on ``master`` and has no uncommitted changes. 193 | 194 | 1. Increment the desired version part (options are ``major``, ``minor``, ``patch``, ``stage``, ``devnum``), for example: 195 | 196 | .. code:: bash 197 | 198 | (pyUmbral)$ bumpversion devnum 199 | 200 | 3. Ensure you have the intended history and incremented version tag: 201 | 202 | .. code:: bash 203 | 204 | (pyUmbral)$ git log 205 | 206 | 4. Push the resulting tagged commit to the originating remote by tag and branch to ensure they remain synchronized. 207 | 208 | .. code:: bash 209 | 210 | (pyUmbral)$ git push origin master && git push origin 211 | 212 | 5. Push the tag directly upstream by its name to trigger the publication webhooks on CircleCI: 213 | 214 | .. code:: bash 215 | 216 | (pyUmbral)$ git push upstream 217 | 218 | 7. Monitor the triggered deployment build on CircleCI for manual approval. 219 | 8. Open a pull request with the resulting history in order to update ``master``. 220 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.python.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | setuptools = "*" 8 | cryptography = "~=3.0" 9 | pynacl = "~=1.0" 10 | 11 | [dev-packages] 12 | bumpversion = "*" 13 | # Pytest Plugins 14 | pytest = "*" 15 | pytest-mypy = "*" 16 | pytest-cov = "*" 17 | pytest-benchmark = {version = "*",extras = ["histogram"]} 18 | # Pytest Plugin Subdeps 19 | mypy = "*" 20 | coverage = "*" 21 | codecov = "*" 22 | # Testing libraries 23 | nbval = "*" 24 | # Docs 25 | sphinx = "~=4.0" 26 | sphinx-autobuild = "*" 27 | sphinx_rtd_theme = "*" 28 | # Overrides vulnerable versions allowed by codecov and sphinx: 29 | requests = ">=2.20.0" 30 | 31 | [pipenv] 32 | allow_prereleases = true 33 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. role:: bash(code) 2 | :language: bash 3 | 4 | ========= 5 | pyUmbral 6 | ========= 7 | 8 | .. start-badges 9 | 10 | |version| |circleci| |commits-since| |docs| |discord| 11 | 12 | .. |docs| image:: https://readthedocs.org/projects/pyumbral/badge/?style=flat 13 | :target: https://readthedocs.org/projects/pyumbral 14 | :alt: Documentation Status 15 | 16 | .. |discord| image:: https://img.shields.io/discord/411401661714792449.svg?logo=discord 17 | :target: https://discord.gg/xYqyEby 18 | :alt: Discord 19 | 20 | .. |circleci| image:: https://img.shields.io/circleci/project/github/nucypher/pyUmbral.svg?logo=circleci 21 | :target: https://circleci.com/gh/nucypher/pyUmbral/tree/master 22 | :alt: CircleCI build status 23 | 24 | .. |version| image:: https://img.shields.io/pypi/v/umbral.svg 25 | :alt: PyPI Package latest release 26 | :target: https://pypi.org/project/umbral 27 | 28 | .. |commits-since| image:: https://img.shields.io/github/commits-since/nucypher/pyumbral/v0.3.0.svg 29 | :alt: Commits since latest release 30 | :target: https://github.com/nucypher/pyUmbral/compare/v0.3.0...master 31 | 32 | .. end-badges 33 | 34 | pyUmbral is the reference implementation of the Umbral_ threshold proxy re-encryption scheme. 35 | It is open-source, built with Python, and uses OpenSSL_ and Cryptography.io_. 36 | 37 | Using Umbral, Alice (the data owner) can *delegate decryption rights* to Bob for 38 | any ciphertext intended to her, through a re-encryption process performed by a 39 | set of semi-trusted proxies or *Ursulas*. When a threshold of these proxies 40 | participate by performing re-encryption, Bob is able to combine these independent 41 | re-encryptions and decrypt the original message using his private key. 42 | 43 | .. image:: docs/source/.static/umbral.svg 44 | :width: 400 px 45 | :align: center 46 | 47 | pyUmbral is the cryptographic engine behind nucypher_, 48 | a proxy re-encryption network to empower privacy in decentralized systems. 49 | 50 | .. _Umbral: https://github.com/nucypher/umbral-doc/blob/master/umbral-doc.pdf 51 | .. _Cryptography.io: https://cryptography.io/en/latest/ 52 | .. _OpenSSL: https://www.openssl.org/ 53 | .. _nucypher: https://github.com/nucypher/nucypher 54 | 55 | Usage 56 | ===== 57 | 58 | **Key Generation** 59 | 60 | As in any public-key cryptosystem, users need a pair of public and private keys. 61 | Additionally, users that delegate access to their data (like Alice, in this example) need a signing keypair. 62 | 63 | .. code-block:: python 64 | 65 | from umbral import SecretKey, Signer 66 | 67 | # Generate Umbral keys for Alice. 68 | alices_secret_key = SecretKey.random() 69 | alices_public_key = alices_secret_key.public_key() 70 | 71 | alices_signing_key = SecretKey.random() 72 | alices_signer = Signer(alices_signing_key) 73 | alices_verifying_key = alices_signing_key.public_key() 74 | 75 | # Generate Umbral keys for Bob. 76 | bobs_secret_key = SecretKey.random() 77 | bobs_public_key = bobs_secret_key.public_key() 78 | 79 | 80 | **Encryption** 81 | 82 | Now let's encrypt data with Alice's public key. 83 | Invocation of ``pre.encrypt`` returns both the ``ciphertext`` and a ``capsule``. 84 | Note that anyone with Alice's public key can perform this operation. 85 | 86 | Since data was encrypted with Alice's public key, 87 | Alice can open the capsule and decrypt the ciphertext with her private key. 88 | 89 | 90 | .. code-block:: python 91 | 92 | from umbral import encrypt, decrypt_original 93 | 94 | # Encrypt data with Alice's public key. 95 | plaintext = b'Proxy Re-Encryption is cool!' 96 | capsule, ciphertext = encrypt(alices_public_key, plaintext) 97 | 98 | # Decrypt data with Alice's private key. 99 | cleartext = decrypt_original(alices_secret_key, capsule, ciphertext) 100 | 101 | 102 | **Re-Encryption Key Fragments** 103 | 104 | When Alice wants to grant Bob access to open her encrypted messages, 105 | she creates *re-encryption key fragments*, or *"kfrags"*, 106 | which are next sent to N proxies or *Ursulas*. 107 | 108 | .. code-block:: python 109 | 110 | from umbral import generate_kfrags 111 | 112 | # Alice generates "M of N" re-encryption key fragments (or "KFrags") for Bob. 113 | # In this example, 10 out of 20. 114 | kfrags = generate_kfrags(delegating_sk=alices_secret_key, 115 | receiving_pk=bobs_public_key, 116 | signer=alices_signer, 117 | threshold=10, 118 | shares=20) 119 | 120 | 121 | **Re-Encryption** 122 | 123 | Bob asks several Ursulas to re-encrypt the capsule so he can open it. 124 | Each Ursula performs re-encryption on the capsule using the ``kfrag`` 125 | provided by Alice, obtaining this way a "capsule fragment", or ``cfrag``. 126 | 127 | Bob collects the resulting cfrags from several Ursulas. 128 | Bob must gather at least ``threshold`` cfrags in order to activate the capsule. 129 | 130 | .. code-block:: python 131 | 132 | from umbral import reencrypt 133 | 134 | # Several Ursulas perform re-encryption, and Bob collects the resulting `cfrags`. 135 | cfrags = list() # Bob's cfrag collection 136 | for kfrag in kfrags[:10]: 137 | cfrag = pre.reencrypt(capsule=capsule, kfrag=kfrag) 138 | cfrags.append(cfrag) # Bob collects a cfrag 139 | 140 | 141 | **Decryption by Bob** 142 | 143 | Finally, Bob activates the capsule by attaching at least ``threshold`` cfrags, 144 | and then decrypts the re-encrypted ciphertext. 145 | 146 | .. code-block:: python 147 | 148 | from umbral import decrypt_reencrypted 149 | 150 | bob_cleartext = pre.decrypt_reencrypted(receiving_sk=bobs_secret_key, 151 | delegating_pk=alices_public_key, 152 | capsule=capsule, 153 | cfrags=cfrags, 154 | ciphertext=ciphertext) 155 | assert bob_cleartext == plaintext 156 | 157 | See more detailed usage examples in the docs_ directory. 158 | 159 | .. _docs : https://github.com/nucypher/pyUmbral/tree/master/docs 160 | 161 | 162 | Quick Installation 163 | ================== 164 | 165 | To install pyUmbral, simply use ``pip``: 166 | 167 | .. code-block:: bash 168 | 169 | $ pip3 install umbral 170 | 171 | 172 | Alternatively, you can checkout the repo and install it from there. 173 | The NuCypher team uses ``pipenv`` for managing pyUmbral's dependencies. 174 | The recommended installation procedure is as follows: 175 | 176 | .. code-block:: bash 177 | 178 | $ sudo pip3 install pipenv 179 | $ pipenv install 180 | 181 | Post-installation, you can activate the project virtual environment 182 | in your current terminal session by running ``pipenv shell``. 183 | 184 | For more information on ``pipenv``, find the official documentation here: https://docs.pipenv.org/. 185 | 186 | 187 | Academic Whitepaper 188 | ==================== 189 | 190 | The Umbral scheme academic whitepaper and cryptographic specifications 191 | are available on GitHub_. 192 | 193 | "Umbral: A Threshold Proxy Re-Encryption Scheme" 194 | *by David Nuñez*. 195 | https://github.com/nucypher/umbral-doc/blob/master/umbral-doc.pdf 196 | 197 | .. _GitHub: https://github.com/nucypher/umbral-doc/ 198 | 199 | 200 | Support & Contribute 201 | ===================== 202 | 203 | - Issue Tracker: https://github.com/nucypher/pyUmbral/issues 204 | - Source Code: https://github.com/nucypher/pyUmbral 205 | 206 | 207 | Security 208 | ======== 209 | 210 | If you identify vulnerabilities with _any_ nucypher code, 211 | please email security@nucypher.com with relevant information to your findings. 212 | We will work with researchers to coordinate vulnerability disclosure between our partners 213 | and users to ensure successful mitigation of vulnerabilities. 214 | 215 | Throughout the reporting process, 216 | we expect researchers to honor an embargo period that may vary depending on the severity of the disclosure. 217 | This ensures that we have the opportunity to fix any issues, identify further issues (if any), and inform our users. 218 | 219 | Sometimes vulnerabilities are of a more sensitive nature and require extra precautions. 220 | We are happy to work together to use a more secure medium, such as Signal. 221 | Email security@nucypher.com and we will coordinate a communication channel that we're both comfortable with. 222 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pyUmbral 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/examples/umbral_simple_api.py: -------------------------------------------------------------------------------- 1 | import random 2 | from umbral import ( 3 | SecretKey, Signer, CapsuleFrag, 4 | encrypt, generate_kfrags, reencrypt, decrypt_original, decrypt_reencrypted) 5 | 6 | # Generate an Umbral key pair 7 | # --------------------------- 8 | # First, Let's generate two asymmetric key pairs for Alice: 9 | # A delegating key pair and a Signing key pair. 10 | 11 | alices_secret_key = SecretKey.random() 12 | alices_public_key = alices_secret_key.public_key() 13 | 14 | alices_signing_key = SecretKey.random() 15 | alices_verifying_key = alices_signing_key.public_key() 16 | alices_signer = Signer(alices_signing_key) 17 | 18 | # Encrypt some data for Alice 19 | # --------------------------- 20 | # Now let's encrypt data with Alice's public key. 21 | # Invocation of `pre.encrypt` returns both the `ciphertext`, 22 | # and a `capsule`. Anyone with Alice's public key can perform 23 | # this operation. 24 | 25 | plaintext = b'Proxy Re-encryption is cool!' 26 | capsule, ciphertext = encrypt(alices_public_key, plaintext) 27 | print(ciphertext) 28 | 29 | # Decrypt data for Alice 30 | # ---------------------- 31 | # Since data was encrypted with Alice's public key, 32 | # Alice can open the capsule and decrypt the ciphertext with her private key. 33 | 34 | cleartext = decrypt_original(alices_secret_key, capsule, ciphertext) 35 | print(cleartext) 36 | 37 | # Bob Exists 38 | # ----------- 39 | 40 | bobs_secret_key = SecretKey.random() 41 | bobs_public_key = bobs_secret_key.public_key() 42 | 43 | # Bob receives a capsule through a side channel (s3, ipfs, Google cloud, etc) 44 | bob_capsule = capsule 45 | 46 | # Attempt Bob's decryption (fail) 47 | try: 48 | fail_decrypted_data = decrypt_original(bobs_secret_key, bob_capsule, ciphertext) 49 | except ValueError: 50 | print("Decryption failed! Bob doesn't has access granted yet.") 51 | 52 | # Alice grants access to Bob by generating kfrags 53 | # ----------------------------------------------- 54 | # When Alice wants to grant Bob access to open her encrypted messages, 55 | # she creates *threshold split re-encryption keys*, or *"kfrags"*, 56 | # which are next sent to N proxies or *Ursulas*. 57 | # She uses her private key, and Bob's public key, and she sets a minimum 58 | # threshold of 10, for 20 total shares 59 | 60 | kfrags = generate_kfrags(delegating_sk=alices_secret_key, 61 | receiving_pk=bobs_public_key, 62 | signer=alices_signer, 63 | threshold=10, 64 | shares=20) 65 | 66 | # Ursulas perform re-encryption 67 | # ------------------------------ 68 | # Bob asks several Ursulas to re-encrypt the capsule so he can open it. 69 | # Each Ursula performs re-encryption on the capsule using the `kfrag` 70 | # provided by Alice, obtaining this way a "capsule fragment", or `cfrag`. 71 | # Let's mock a network or transport layer by sampling `threshold` random `kfrags`, 72 | # one for each required Ursula. 73 | 74 | kfrags = random.sample(kfrags, # All kfrags from above 75 | 10) # M - Threshold 76 | 77 | # Bob collects the resulting `cfrags` from several Ursulas. 78 | # Bob must gather at least `threshold` `cfrags` in order to open the capsule. 79 | 80 | cfrags = list() # Bob's cfrag collection 81 | for kfrag in kfrags: 82 | cfrag = reencrypt(capsule=capsule, kfrag=kfrag) 83 | cfrags.append(cfrag) # Bob collects a cfrag 84 | 85 | assert len(cfrags) == 10 86 | 87 | # Bob checks the capsule fragments 88 | # -------------------------------- 89 | # If Bob received the capsule fragments in serialized form, 90 | # he can verify that they are valid and really originate from Alice, 91 | # using Alice's public keys. 92 | 93 | suspicious_cfrags = [CapsuleFrag.from_bytes(bytes(cfrag)) for cfrag in cfrags] 94 | 95 | cfrags = [cfrag.verify(capsule, 96 | verifying_pk=alices_verifying_key, 97 | delegating_pk=alices_public_key, 98 | receiving_pk=bobs_public_key, 99 | ) 100 | for cfrag in suspicious_cfrags] 101 | 102 | # Bob opens the capsule 103 | # ------------------------------------ 104 | # Finally, Bob decrypts the re-encrypted ciphertext using his key. 105 | 106 | bob_cleartext = decrypt_reencrypted(receiving_sk=bobs_secret_key, 107 | delegating_pk=alices_public_key, 108 | capsule=bob_capsule, 109 | verified_cfrags=cfrags, 110 | ciphertext=ciphertext) 111 | print(bob_cleartext) 112 | assert bob_cleartext == plaintext 113 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=pyUmbral 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/notebooks/pyUmbral Simple API.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# pyUmbral Python API" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Generate Umbral Keys for Alice\n", 15 | "First, Let's generate two asymmetric key pairs for Alice:\n", 16 | "A *delegating* key pair and a *signing* key pair.\n" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 2, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "from umbral import SecretKey, Signer\n", 26 | "\n", 27 | "\n", 28 | "# Alice's Keys\n", 29 | "alices_private_key = SecretKey.random()\n", 30 | "alices_public_key = alices_private_key.public_key()\n", 31 | "\n", 32 | "alices_signing_key = SecretKey.random()\n", 33 | "alices_verifying_key = alices_signing_key.public_key()\n", 34 | "alices_signer = Signer(alices_signing_key)" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": {}, 40 | "source": [ 41 | "## Encrypt some data for Alice\n", 42 | "Now let's encrypt data with Alice's public key. Invocation of `pre.encrypt` returns both the `ciphertext`,\n", 43 | "and a `capsule`. Anyone with Alice's public key can perform this operation." 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "metadata": { 50 | "tags": [ 51 | "nbval-ignore-output" 52 | ] 53 | }, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "b'\\xfb\\xc3T\\xb2\\x89=\\x08X\\xb1<\\xd0G/\\xab\\x8c\\xac\\x7f\\xd4)\\xcbB\\xcb^\\x99;P\\x9c\\xbf\\xaaf\\x03\\xdd\\n\\x1f$\\x1b\\xfb\\x88\\xfa\\xcd\\xe2\\x11\\x8d\\xcf\\xe5\\x88\\xaf\\x00\\xfe\\xcb\\x9d\\xf83\\x17\\x9b\\xdd\\xba\\xab\\x8b\\x08\\xbe\\xb1M\\x80\\xf1" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "## Alice grants access to Bob by generating KFrags \n", 166 | "When Alice wants to grant Bob access to open her encrypted messages, she creates *re-encryption key fragments*, or \"kfrags\", which are next sent to N proxies or *Ursulas*. She uses her private key, and Bob's public key, and she sets a minimum threshold of 10, for 20 total shares\n" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 7, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "from umbral import generate_kfrags\n", 176 | "\n", 177 | "\n", 178 | "M, N = 10, 20 # the threshold and the total number of fragments\n", 179 | "kfrags = generate_kfrags(delegating_sk=alices_private_key,\n", 180 | " receiving_pk=bobs_public_key,\n", 181 | " signer=alices_signer,\n", 182 | " threshold=M,\n", 183 | " shares=N)" 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "metadata": {}, 189 | "source": [ 190 | "\n", 191 | "## Ursulas Re-encrypt; Bob attaches fragments to `capsule`\n", 192 | "Bob asks several Ursulas to re-encrypt the capsule so he can open it. Each Ursula performs re-encryption on the capsule using the `kfrag` provided by Alice, obtaining this way a \"capsule fragment\", or `cfrag`. Let's mock a network or transport layer by sampling `M` random `kfrags`, one for each required Ursula. Bob collects the resulting `cfrags` from several Ursulas. He must gather at least `M` `cfrags` in order to activate the capsule.\n" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 8, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "import random\n", 202 | "kfrags = random.sample(kfrags, # All kfrags from above\n", 203 | " M) # Threshold\n", 204 | "\n", 205 | "\n", 206 | "from umbral import reencrypt\n", 207 | "\n", 208 | "\n", 209 | "cfrags = list() # Bob's cfrag collection\n", 210 | "for kfrag in kfrags:\n", 211 | " cfrag = reencrypt(capsule=capsule, kfrag=kfrag)\n", 212 | " cfrags.append(cfrag) # Bob collects a cfrag" 213 | ] 214 | }, 215 | { 216 | "cell_type": "markdown", 217 | "metadata": {}, 218 | "source": [ 219 | "## Bob checks the capsule fragments\n", 220 | "If Bob received the capsule fragments in serialized form, he can verify that they are valid and really originate from Alice, using Alice's public keys." 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 10, 226 | "metadata": { 227 | "scrolled": true 228 | }, 229 | "outputs": [], 230 | "source": [ 231 | "from umbral import CapsuleFrag\n", 232 | "\n", 233 | "suspicious_cfrags = [CapsuleFrag.from_bytes(bytes(cfrag)) for cfrag in cfrags]\n", 234 | "\n", 235 | "cfrags = [cfrag.verify(capsule,\n", 236 | " verifying_pk=alices_verifying_key,\n", 237 | " delegating_pk=alices_public_key,\n", 238 | " receiving_pk=bobs_public_key,\n", 239 | " )\n", 240 | " for cfrag in suspicious_cfrags]" 241 | ] 242 | }, 243 | { 244 | "cell_type": "markdown", 245 | "metadata": {}, 246 | "source": [ 247 | "## Bob opens the capsule; Decrypts data from Alice.\n", 248 | "Finally, Bob decrypts the re-encrypted ciphertext using his secret key." 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": 12, 254 | "metadata": {}, 255 | "outputs": [ 256 | { 257 | "name": "stdout", 258 | "output_type": "stream", 259 | "text": [ 260 | "b'Proxy Re-encryption is cool!'\n" 261 | ] 262 | } 263 | ], 264 | "source": [ 265 | "from umbral import decrypt_reencrypted\n", 266 | "\n", 267 | "bob_cleartext = decrypt_reencrypted(receiving_sk=bobs_private_key,\n", 268 | " delegating_pk=alices_public_key,\n", 269 | " capsule=capsule,\n", 270 | " verified_cfrags=cfrags,\n", 271 | " ciphertext=ciphertext)\n", 272 | "\n", 273 | "print(bob_cleartext)\n", 274 | "assert bob_cleartext == plaintext" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": null, 280 | "metadata": {}, 281 | "outputs": [], 282 | "source": [] 283 | } 284 | ], 285 | "metadata": { 286 | "kernelspec": { 287 | "display_name": "Python 3", 288 | "language": "python", 289 | "name": "python3" 290 | }, 291 | "language_info": { 292 | "codemirror_mode": { 293 | "name": "ipython", 294 | "version": 3 295 | }, 296 | "file_extension": ".py", 297 | "mimetype": "text/x-python", 298 | "name": "python", 299 | "nbconvert_exporter": "python", 300 | "pygments_lexer": "ipython3", 301 | "version": "3.8.5" 302 | } 303 | }, 304 | "nbformat": 4, 305 | "nbformat_minor": 2 306 | } 307 | -------------------------------------------------------------------------------- /docs/source/.static/PRE_image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/pyUmbral/b2abccafa6dc007fd4d3de60c83bc8f1a857e885/docs/source/.static/PRE_image.png -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | Public API 2 | ========== 3 | 4 | .. automodule:: umbral 5 | 6 | Keys 7 | ---- 8 | 9 | .. autoclass:: SecretKey() 10 | :members: 11 | :show-inheritance: 12 | 13 | .. autoclass:: PublicKey() 14 | :members: 15 | :special-members: __eq__, __hash__ 16 | :show-inheritance: 17 | 18 | .. autoclass:: SecretKeyFactory() 19 | :members: 20 | :show-inheritance: 21 | 22 | .. autoclass:: Signer 23 | :members: 24 | 25 | .. autoclass:: Signature() 26 | :members: 27 | :special-members: __eq__, __hash__ 28 | :show-inheritance: 29 | 30 | Intermediate objects 31 | -------------------- 32 | 33 | .. autoclass:: Capsule() 34 | :special-members: __eq__, __hash__ 35 | :show-inheritance: 36 | 37 | .. autoclass:: KeyFrag() 38 | :members: verify 39 | :special-members: __eq__, __hash__ 40 | :show-inheritance: 41 | 42 | .. autoclass:: VerifiedKeyFrag() 43 | :members: 44 | :special-members: __eq__, __hash__ 45 | :show-inheritance: 46 | 47 | .. autoclass:: CapsuleFrag() 48 | :members: 49 | :special-members: __eq__, __hash__ 50 | :show-inheritance: 51 | 52 | .. autoclass:: VerifiedCapsuleFrag() 53 | :members: 54 | :special-members: __eq__, __hash__ 55 | :show-inheritance: 56 | 57 | Encryption, re-encryption and decryption 58 | ---------------------------------------- 59 | 60 | .. autofunction:: encrypt 61 | 62 | .. autofunction:: decrypt_original 63 | 64 | .. autofunction:: generate_kfrags 65 | 66 | .. autofunction:: reencrypt 67 | 68 | .. autofunction:: decrypt_reencrypted 69 | 70 | Utilities 71 | --------- 72 | 73 | .. autoclass:: umbral.VerificationError 74 | :show-inheritance: 75 | 76 | .. autoclass:: umbral.serializable.HasSerializedSize 77 | :members: 78 | 79 | .. autoclass:: umbral.serializable.Serializable 80 | :special-members: __bytes__ 81 | :show-inheritance: 82 | 83 | .. autoclass:: umbral.serializable.SerializableSecret 84 | :members: 85 | :show-inheritance: 86 | 87 | .. autoclass:: umbral.serializable.Deserializable 88 | :members: 89 | :show-inheritance: 90 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'pyUmbral' 23 | copyright = u'2019, Michael Egorov, Justin Myles Holmes, David Nuñez, John Pacific, Kieran Prasch, Bogdan Opanchuk' 24 | author = u'Michael Egorov, Justin Myles Holmes, David Nuñez, John Pacific, Kieran Prasch, Bogdan Opanchuk' 25 | 26 | # The full version, including alpha/beta/rc tags 27 | release = '0.3.0' 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # If your documentation needs a minimal Sphinx version, state it here. 33 | # 34 | # needs_sphinx = '1.0' 35 | 36 | # Add any Sphinx extension module names here, as strings. They can be 37 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 38 | # ones. 39 | extensions = [ 40 | 'sphinx.ext.autodoc', 41 | 'sphinx.ext.mathjax', 42 | 'sphinx.ext.viewcode', 43 | 'sphinx.ext.doctest', 44 | ] 45 | 46 | # Add any paths that contain templates here, relative to this directory. 47 | templates_path = ['.templates'] 48 | 49 | # The suffix(es) of source filenames. 50 | # You can specify multiple suffix as a list of string: 51 | # 52 | # source_suffix = ['.rst', '.md'] 53 | source_suffix = '.rst' 54 | 55 | # The master toctree document. 56 | master_doc = 'index' 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | # 61 | # This is also used if you do content translation via gettext catalogs. 62 | # Usually you set "language" from the command line for these cases. 63 | language = None 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | # This pattern also affects html_static_path and html_extra_path . 68 | exclude_patterns = [] 69 | 70 | # The name of the Pygments (syntax highlighting) style to use. 71 | pygments_style = 'sphinx' 72 | 73 | 74 | # -- Options for HTML output ------------------------------------------------- 75 | 76 | # The theme to use for HTML and HTML Help pages. See the documentation for 77 | # a list of builtin themes. 78 | # 79 | html_theme = 'sphinx_rtd_theme' 80 | 81 | # Theme options are theme-specific and customize the look and feel of a theme 82 | # further. For a list of options available for each theme, see the 83 | # documentation. 84 | # 85 | # html_theme_options = {} 86 | 87 | # Add any paths that contain custom static files (such as style sheets) here, 88 | # relative to this directory. They are copied after the builtin static files, 89 | # so a file named "default.css" will overwrite the builtin "default.css". 90 | html_static_path = ['.static'] 91 | 92 | # Custom sidebar templates, must be a dictionary that maps document names 93 | # to template names. 94 | # 95 | # The default sidebars (for documents that don't match any pattern) are 96 | # defined by theme itself. Builtin themes are using these templates by 97 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 98 | # 'searchbox.html']``. 99 | # 100 | # html_sidebars = {} 101 | 102 | 103 | # -- Options for HTMLHelp output --------------------------------------------- 104 | 105 | # Output file base name for HTML help builder. 106 | htmlhelp_basename = 'pyUmbraldoc' 107 | 108 | 109 | # -- Options for LaTeX output ------------------------------------------------ 110 | 111 | latex_elements = { 112 | # The paper size ('letterpaper' or 'a4paper'). 113 | # 114 | # 'papersize': 'letterpaper', 115 | 116 | # The font size ('10pt', '11pt' or '12pt'). 117 | # 118 | # 'pointsize': '10pt', 119 | 120 | # Additional stuff for the LaTeX preamble. 121 | # 122 | # 'preamble': '', 123 | 124 | # Latex figure (float) alignment 125 | # 126 | # 'figure_align': 'htbp', 127 | } 128 | 129 | # Grouping the document tree into LaTeX files. List of tuples 130 | # (source start file, target name, title, 131 | # author, documentclass [howto, manual, or own class]). 132 | latex_documents = [ 133 | (master_doc, 'pyUmbral.tex', 'pyUmbral Documentation', 134 | u'Michael Egorov, Justin Myles Holmes, David Nuñez, John Pacific, Kieran Prasch, Bogdan Opanchuk', 'manual'), 135 | ] 136 | 137 | 138 | # -- Options for manual page output ------------------------------------------ 139 | 140 | # One entry per manual page. List of tuples 141 | # (source start file, name, description, authors, manual section). 142 | man_pages = [ 143 | (master_doc, 'pyumbral', 'pyUmbral Documentation', 144 | [author], 1) 145 | ] 146 | 147 | 148 | # -- Options for Texinfo output ---------------------------------------------- 149 | 150 | # Grouping the document tree into Texinfo files. List of tuples 151 | # (source start file, target name, title, author, 152 | # dir menu entry, description, category) 153 | texinfo_documents = [ 154 | (master_doc, 'pyUmbral', 'pyUmbral Documentation', 155 | author, 'pyUmbral', 'One line description of project.', 156 | 'Miscellaneous'), 157 | ] 158 | 159 | 160 | # -- Options for Epub output ------------------------------------------------- 161 | 162 | # Bibliographic Dublin Core info. 163 | epub_title = project 164 | epub_author = author 165 | epub_publisher = author 166 | epub_copyright = copyright 167 | 168 | # The unique identifier of the text. This can be a ISBN number 169 | # or the project homepage. 170 | # 171 | # epub_identifier = '' 172 | 173 | # A unique identification for the text. 174 | # 175 | # epub_uid = '' 176 | 177 | # A list of files that should not be packed into the epub file. 178 | epub_exclude_files = ['search.html'] 179 | 180 | 181 | # -- Extension configuration ------------------------------------------------- 182 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. pyUmbral documentation master file 2 | created Thu Feb 15 12:47:25 2018. 3 | 4 | ======== 5 | pyUmbral 6 | ======== 7 | .. start-badges 8 | 9 | |version| |circleci| |commits-since| |docs| |discord| 10 | 11 | .. |docs| image:: https://readthedocs.org/projects/pyumbral/badge/?style=flat 12 | :target: https://readthedocs.org/projects/pyumbral 13 | :alt: Documentation Status 14 | 15 | .. |discord| image:: https://img.shields.io/discord/411401661714792449.svg?logo=discord 16 | :target: https://discord.gg/xYqyEby 17 | :alt: Discord 18 | 19 | .. |circleci| image:: https://img.shields.io/circleci/project/github/nucypher/pyUmbral.svg?logo=circleci 20 | :target: https://circleci.com/gh/nucypher/pyUmbral/tree/master 21 | :alt: CircleCI build status 22 | 23 | .. |version| image:: https://img.shields.io/pypi/v/umbral.svg 24 | :alt: PyPI Package latest release 25 | :target: https://pypi.org/project/umbral 26 | 27 | .. |commits-since| image:: https://img.shields.io/github/commits-since/nucypher/pyumbral/v0.3.0.svg 28 | :alt: Commits since latest release 29 | :target: https://github.com/nucypher/pyUmbral/compare/v0.3.0...master 30 | 31 | .. end-badges 32 | 33 | pyUmbral is the reference implementation of the Umbral_ threshold proxy re-encryption scheme. 34 | It is open-source, built with Python, and uses OpenSSL_ via Cryptography.io_, and libsodium_ via PyNaCl_. 35 | 36 | Using Umbral, Alice (the data owner) can *delegate decryption rights* to Bob for 37 | any ciphertext intended to her, through a re-encryption process performed by a 38 | set of semi-trusted proxies or *Ursulas*. When a threshold of these proxies 39 | participate by performing re-encryption, Bob is able to combine these independent 40 | re-encryptions and decrypt the original message using his private key. 41 | 42 | .. image:: .static/umbral.svg 43 | :width: 400 px 44 | :align: center 45 | 46 | pyUmbral is the cryptographic engine behind nucypher_, 47 | a proxy re-encryption network to empower privacy in decentralized systems. 48 | 49 | .. _Umbral: https://github.com/nucypher/umbral-doc/blob/master/umbral-doc.pdf 50 | .. _Cryptography.io: https://cryptography.io/en/latest/ 51 | .. _OpenSSL: https://www.openssl.org/ 52 | .. _nucypher: https://github.com/nucypher/nucypher 53 | .. _libsodium: https://github.com/jedisct1/libsodium 54 | .. _PyNaCl: https://pynacl.readthedocs.io/en/latest/ 55 | 56 | .. toctree:: 57 | :maxdepth: 3 58 | :caption: Table of Contents: 59 | 60 | installation 61 | using_pyumbral 62 | api 63 | 64 | 65 | Academic Whitepaper 66 | ==================== 67 | 68 | The Umbral scheme academic whitepaper and cryptographic specifications 69 | are available on GitHub_. 70 | 71 | "Umbral: A Threshold Proxy Re-Encryption Scheme" 72 | *by David Nuñez*. 73 | https://github.com/nucypher/umbral-doc/blob/master/umbral-doc.pdf 74 | 75 | .. _GitHub: https://github.com/nucypher/umbral-doc/ 76 | 77 | 78 | Support & Contribute 79 | ===================== 80 | 81 | - Issue Tracker: https://github.com/nucypher/pyUmbral/issues 82 | - Source Code: https://github.com/nucypher/pyUmbral 83 | 84 | 85 | Security 86 | ======== 87 | 88 | If you identify vulnerabilities with _any_ nucypher code, 89 | please email security@nucypher.com with relevant information to your findings. 90 | We will work with researchers to coordinate vulnerability disclosure between our partners 91 | and users to ensure successful mitigation of vulnerabilities. 92 | 93 | Throughout the reporting process, 94 | we expect researchers to honor an embargo period that may vary depending on the severity of the disclosure. 95 | This ensures that we have the opportunity to fix any issues, identify further issues (if any), and inform our users. 96 | 97 | Sometimes vulnerabilities are of a more sensitive nature and require extra precautions. 98 | We are happy to work together to use a more secure medium, such as Signal. 99 | Email security@nucypher.com and we will coordinate a communication channel that we're both comfortable with. 100 | 101 | 102 | Indices and Tables 103 | ================== 104 | 105 | * :ref:`genindex` 106 | * :ref:`modindex` 107 | * :ref:`search` 108 | -------------------------------------------------------------------------------- /docs/source/installation.rst: -------------------------------------------------------------------------------- 1 | Installing pyUmbral 2 | ==================== 3 | 4 | 5 | Using pip 6 | ------------------------- 7 | 8 | The easiest way to install pyUmbral is using ``pip``: 9 | 10 | .. code-block:: bash 11 | 12 | $ pip3 install umbral 13 | 14 | 15 | Build from source code 16 | ------------------------- 17 | 18 | pyUmbral is maintained on GitHub: https://github.com/nucypher/pyUmbral. 19 | 20 | Clone the repository to download the source code. 21 | 22 | .. code-block:: bash 23 | 24 | $ git clone https://github.com/nucypher/pyUmbral.git 25 | 26 | Once you have acquired the source code, you can... 27 | 28 | *...embed pyUmbral modules into your own codebase...* 29 | 30 | .. code-block:: python 31 | 32 | from umbral import pre, keys, config 33 | 34 | *...install pyUmbral with pipenv...* 35 | 36 | .. code-block:: bash 37 | 38 | $ pipenv install . 39 | 40 | *...or install it with python-pip...* 41 | 42 | .. code-block:: bash 43 | 44 | $ pip3 install . 45 | 46 | 47 | Install dependencies 48 | --------------------- 49 | 50 | The NuCypher team uses pipenv for managing pyUmbral's dependencies. 51 | The recommended installation procedure is as follows: 52 | 53 | .. code-block:: bash 54 | 55 | $ sudo pip3 install pipenv 56 | $ pipenv install 57 | 58 | Post-installation, you can activate the pyUmbral's virtual environment 59 | in your current terminal session by running :code:`pipenv shell`. 60 | 61 | If your installation is successful, the following command will succeed without error. 62 | 63 | .. code-block:: bash 64 | 65 | $ pipenv run python 66 | >>> import umbral 67 | 68 | For more information on pipenv, The official documentation is located here: https://docs.pipenv.org/. 69 | 70 | 71 | Development Installation 72 | ------------------------- 73 | 74 | If you want to participate in developing pyUmbral, you'll probably want to run the test suite and / or 75 | build the documentation, and for that, you must install some additional development requirements. 76 | 77 | .. code-block:: bash 78 | 79 | $ pipenv install --dev --three 80 | 81 | 82 | To build the documentation locally: 83 | 84 | .. code-block:: bash 85 | 86 | $ pipenv run make html --directory=docs 87 | 88 | -------------------------------------------------------------------------------- /docs/source/using_pyumbral.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Using pyUmbral 3 | ============== 4 | 5 | .. image:: .static/PRE_image.png 6 | 7 | 8 | .. testsetup:: capsule_story 9 | 10 | import sys 11 | import os 12 | sys.path.append(os.path.abspath(os.getcwd())) 13 | 14 | 15 | Elliptic Curves 16 | =============== 17 | 18 | The matter of which curve to use is the subject of some debate. If you aren't sure, you might start here: 19 | https://safecurves.cr.yp.to/ 20 | 21 | A number of curves are available in the Cryptography.io_ library, on which pyUmbral depends. 22 | You can find them in the ``cryptography.hazmat.primitives.asymmetric.ec`` module. 23 | 24 | .. _Cryptography.io: https://cryptography.io/en/latest/ 25 | 26 | .. important:: 27 | 28 | Be careful when choosing a curve - the security of your application depends on it. 29 | 30 | We provide curve ``SECP256K1`` as a default because it is the basis for a number of crypto-blockchain projects; 31 | we don't otherwise endorse its security. 32 | We additionally support curves ``SECP256R1`` (also known as "NIST P-256") and ``SECP384R1`` ("NIST P-384"), but they cannot currently be selected via the public API. 33 | 34 | 35 | Encryption 36 | ========== 37 | 38 | 39 | Generate an Umbral key pair 40 | ----------------------------- 41 | First, let's generate two asymmetric key pairs for Alice: 42 | A delegating key pair and a signing key pair. 43 | 44 | .. doctest:: capsule_story 45 | 46 | >>> from umbral import SecretKey, Signer 47 | 48 | >>> alices_secret_key = SecretKey.random() 49 | >>> alices_public_key = alices_secret_key.public_key() 50 | 51 | >>> alices_signing_key = SecretKey.random() 52 | >>> alices_verifying_key = alices_signing_key.public_key() 53 | >>> alices_signer = Signer(alices_signing_key) 54 | 55 | 56 | Encrypt with a public key 57 | -------------------------- 58 | Now let's encrypt data with Alice's public key. 59 | Invocation of :py:func:`umbral.encrypt` returns both a ``capsule`` and a ``ciphertext``. 60 | Note that anyone with Alice's public key can perform this operation. 61 | 62 | 63 | .. doctest:: capsule_story 64 | 65 | >>> from umbral import encrypt 66 | >>> plaintext = b'Proxy Re-encryption is cool!' 67 | >>> capsule, ciphertext = encrypt(alices_public_key, plaintext) 68 | 69 | 70 | Decrypt with a private key 71 | --------------------------- 72 | Since data was encrypted with Alice's public key, 73 | Alice can open the capsule and decrypt the ciphertext with her private key. 74 | 75 | .. doctest:: capsule_story 76 | 77 | >>> from umbral import decrypt_original 78 | >>> cleartext = decrypt_original(alices_secret_key, capsule, ciphertext) 79 | 80 | 81 | Threshold Re-Encryption 82 | ================================== 83 | 84 | Bob Exists 85 | ----------- 86 | 87 | .. doctest:: capsule_story 88 | 89 | >>> bobs_secret_key = SecretKey.random() 90 | >>> bobs_public_key = bobs_secret_key.public_key() 91 | 92 | 93 | Alice grants access to Bob by generating kfrags 94 | ----------------------------------------------- 95 | When Alice wants to grant Bob access to view her encrypted data, 96 | she creates *re-encryption key fragments*, or *"kfrags"*, 97 | which are next sent to N proxies or *Ursulas*. 98 | 99 | Alice must specify ``shares`` (the total number of kfrags), 100 | and a ``threshold`` (the minimum number of kfrags needed to activate a capsule). 101 | In the following example, Alice creates 20 kfrags, 102 | but Bob needs to get only 10 re-encryptions to activate the capsule. 103 | 104 | .. doctest:: capsule_story 105 | 106 | >>> from umbral import generate_kfrags 107 | >>> kfrags = generate_kfrags(delegating_sk=alices_secret_key, 108 | ... receiving_pk=bobs_public_key, 109 | ... signer=alices_signer, 110 | ... threshold=10, 111 | ... shares=20) 112 | 113 | 114 | Bob receives a capsule 115 | ----------------------- 116 | Next, let's generate a key pair for Bob, and pretend to send 117 | him the capsule through a side channel like 118 | S3, IPFS, Google Cloud, Sneakernet, etc. 119 | 120 | .. code-block:: python 121 | 122 | # Bob receives the capsule through a side-channel: IPFS, Sneakernet, etc. 123 | capsule = 124 | 125 | 126 | Bob fails to open the capsule 127 | ------------------------------- 128 | If Bob attempts to open a capsule that was not encrypted for his public key, 129 | or re-encrypted for him by Ursula, he will not be able to open it. 130 | 131 | .. doctest:: capsule_story 132 | 133 | >>> fail = decrypt_original(delegating_sk=bobs_secret_key, 134 | ... capsule=capsule, 135 | ... ciphertext=ciphertext) 136 | Traceback (most recent call last): 137 | ... 138 | ValueError 139 | 140 | 141 | Ursulas perform re-encryption 142 | ------------------------------ 143 | Bob asks several Ursulas to re-encrypt the capsule so he can open it. 144 | Each Ursula performs re-encryption on the capsule using the ``kfrag`` 145 | provided by Alice, obtaining this way a "capsule fragment", or ``cfrag``. 146 | Let's mock a network or transport layer by sampling ``threshold`` random kfrags, 147 | one for each required Ursula. 148 | 149 | Bob collects the resulting cfrags from several Ursulas. 150 | Bob must gather at least ``threshold`` cfrags in order to open the capsule. 151 | 152 | 153 | .. doctest:: capsule_story 154 | 155 | >>> import random 156 | >>> kfrags = random.sample(kfrags, # All kfrags from above 157 | ... 10) # M - Threshold 158 | 159 | >>> from umbral import reencrypt 160 | >>> cfrags = list() # Bob's cfrag collection 161 | >>> for kfrag in kfrags: 162 | ... cfrag = reencrypt(capsule=capsule, kfrag=kfrag) 163 | ... cfrags.append(cfrag) # Bob collects a cfrag 164 | 165 | .. doctest:: capsule_story 166 | :hide: 167 | 168 | >>> assert len(cfrags) == 10 169 | 170 | 171 | Decryption 172 | ================================== 173 | 174 | Bob checks the capsule fragments 175 | -------------------------------- 176 | If Bob received the capsule fragments in serialized form, 177 | he can verify that they are valid and really originate from Alice, 178 | using Alice's public keys. 179 | 180 | .. doctest:: capsule_story 181 | 182 | >>> from umbral import CapsuleFrag 183 | >>> suspicious_cfrags = [CapsuleFrag.from_bytes(bytes(cfrag)) for cfrag in cfrags] 184 | >>> cfrags = [cfrag.verify(capsule, 185 | ... verifying_pk=alices_verifying_key, 186 | ... delegating_pk=alices_public_key, 187 | ... receiving_pk=bobs_public_key, 188 | ... ) 189 | ... for cfrag in suspicious_cfrags] 190 | 191 | 192 | Bob opens the capsule 193 | --------------------- 194 | Finally, Bob decrypts the re-encrypted ciphertext using his key. 195 | 196 | .. doctest:: capsule_story 197 | 198 | >>> from umbral import decrypt_reencrypted 199 | >>> cleartext = decrypt_reencrypted(receiving_sk=bobs_secret_key, 200 | ... delegating_pk=alices_public_key, 201 | ... capsule=capsule, 202 | ... verified_cfrags=cfrags, 203 | ... ciphertext=ciphertext) 204 | 205 | 206 | .. doctest:: capsule_story 207 | :hide: 208 | 209 | >>> assert cleartext == plaintext 210 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version=3.6 3 | verbosity=0 4 | ignore_missing_imports=True 5 | [mypy-umbral.*] 6 | disallow_untyped_defs=False 7 | check_untyped_defs=False 8 | disallow_untyped_calls=False 9 | [mypy-umbral.openssl] 10 | disallow_untyped_defs=False -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # Specify a configuration file. 4 | #rcfile= 5 | 6 | # Python code to execute, usually for sys.path manipulation such as 7 | # pygtk.require(). 8 | #init-hook= 9 | 10 | # Profiled execution. 11 | profile=no 12 | 13 | # Add files or directories to the blacklist. They should be base names, not 14 | # paths. 15 | ignore=CVS 16 | 17 | # Pickle collected data for later comparisons. 18 | persistent=yes 19 | 20 | # List of plugins (as comma separated values of python modules names) to load, 21 | # usually to register additional checkers. 22 | load-plugins= 23 | 24 | 25 | [MESSAGES CONTROL] 26 | 27 | # Enable the message, report, category or checker with the given id(s). You can 28 | # either give multiple identifier separated by comma (,) or put this option 29 | # multiple time. 30 | #enable= 31 | 32 | # Disable the message, report, category or checker with the given id(s). You 33 | # can either give multiple identifier separated by comma (,) or put this option 34 | # multiple time (only on the command line, not in the configuration file where 35 | # it should appear only once). 36 | 37 | # disable warnings: 38 | # - C0103 (we have a lot of short names) 39 | # - C0114 (flat package, no need for module docstrings) 40 | # - C0116 (we have many short self-documenting functions) 41 | disable=C0103,C0114,C0116 42 | 43 | 44 | [REPORTS] 45 | 46 | # Set the output format. Available formats are text, parseable, colorized, msvs 47 | # (visual studio) and html 48 | output-format=text 49 | 50 | # Include message's id in output 51 | include-ids=no 52 | 53 | # Put messages in a separate file for each module / package specified on the 54 | # command line instead of printing them on stdout. Reports (if any) will be 55 | # written in a file name "pylint_global.[txt|html]". 56 | files-output=no 57 | 58 | # Tells whether to display a full report or only the messages 59 | reports=yes 60 | 61 | # Python expression which should return a note less than 10 (10 is the highest 62 | # note). You have access to the variables errors warning, statement which 63 | # respectively contain the number of errors / warnings messages and the total 64 | # number of statements analyzed. This is used by the global evaluation report 65 | # (RP0004). 66 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 67 | 68 | # Add a comment according to your evaluation note. This is used by the global 69 | # evaluation report (RP0004). 70 | comment=no 71 | 72 | 73 | [BASIC] 74 | 75 | # Required attributes for module, separated by a comma 76 | required-attributes= 77 | 78 | # List of builtins function names that should not be used, separated by a comma 79 | bad-functions=map,filter,apply,input 80 | 81 | # Regular expression which should only match correct module names 82 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ 83 | 84 | # Regular expression which should only match correct module level names 85 | const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ 86 | 87 | # Regular expression which should only match correct class names 88 | class-rgx=[A-Z_][a-zA-Z0-9]+$ 89 | 90 | # Regular expression which should only match correct function names 91 | function-rgx=[a-z_][a-z0-9_]{2,30}$ 92 | 93 | # Regular expression which should only match correct method names 94 | method-rgx=[a-z_][a-z0-9_]{2,30}$ 95 | 96 | # Regular expression which should only match correct instance attribute names 97 | attr-rgx=[a-z_][a-z0-9_]{2,30}$ 98 | 99 | # Regular expression which should only match correct argument names 100 | argument-rgx=[a-z_][a-z0-9_]{2,30}$ 101 | 102 | # Regular expression which should only match correct variable names 103 | variable-rgx=[a-z_][a-z0-9_]{2,30}$ 104 | 105 | # Regular expression which should only match correct list comprehension / 106 | # generator expression variable names 107 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ 108 | 109 | # Good variable names which should always be accepted, separated by a comma 110 | good-names=i,j,k,ex,Run,_ 111 | 112 | # Bad variable names which should always be refused, separated by a comma 113 | bad-names=foo,bar,baz,toto,tutu,tata 114 | 115 | # Regular expression which should only match functions or classes name which do 116 | # not require a docstring 117 | no-docstring-rgx=__.*__ 118 | 119 | 120 | [FORMAT] 121 | 122 | # Maximum number of characters on a single line. 123 | max-line-length=100 124 | 125 | # Maximum number of lines in a module 126 | max-module-lines=1000 127 | 128 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 129 | # tab). 130 | indent-string=' ' 131 | 132 | 133 | [MISCELLANEOUS] 134 | 135 | # List of note tags to take in consideration, separated by a comma. 136 | notes=FIXME,XXX,TODO 137 | 138 | 139 | [SIMILARITIES] 140 | 141 | # Minimum lines number of a similarity. 142 | min-similarity-lines=4 143 | 144 | # Ignore comments when computing similarities. 145 | ignore-comments=yes 146 | 147 | # Ignore docstrings when computing similarities. 148 | ignore-docstrings=yes 149 | 150 | 151 | [TYPECHECK] 152 | 153 | # Tells whether missing members accessed in mixin class should be ignored. A 154 | # mixin class is detected if its name ends with "mixin" (case insensitive). 155 | ignore-mixin-members=yes 156 | 157 | # List of classes names for which member attributes should not be checked 158 | # (useful for classes with attributes dynamically set). 159 | ignored-classes=SQLObject 160 | 161 | # When zope mode is activated, add a predefined set of Zope acquired attributes 162 | # to generated-members. 163 | zope=no 164 | 165 | # List of members which are set dynamically and missed by pylint inference 166 | # system, and so shouldn't trigger E0201 when accessed. Python regular 167 | # expressions are accepted. 168 | generated-members=REQUEST,acl_users,aq_parent 169 | 170 | 171 | [VARIABLES] 172 | 173 | # Tells whether we should check for unused import in __init__ files. 174 | init-import=no 175 | 176 | # A regular expression matching the beginning of the name of dummy variables 177 | # (i.e. not used). 178 | dummy-variables-rgx=_|dummy 179 | 180 | # List of additional names supposed to be defined in builtins. Remember that 181 | # you should avoid to define new builtins when possible. 182 | additional-builtins= 183 | 184 | 185 | [CLASSES] 186 | 187 | # List of interface methods to ignore, separated by a comma. This is used for 188 | # instance to not check methods defines in Zope's Interface base class. 189 | ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by 190 | 191 | # List of method names used to declare (i.e. assign) instance attributes. 192 | defining-attr-methods=__init__,__new__,setUp 193 | 194 | # List of valid names for the first argument in a class method. 195 | valid-classmethod-first-arg=cls 196 | 197 | 198 | [DESIGN] 199 | 200 | # Maximum number of arguments for function / method 201 | max-args=5 202 | 203 | # Argument names that match this expression will be ignored. Default to name 204 | # with leading underscore 205 | ignored-argument-names=_.* 206 | 207 | # Maximum number of locals for function / method body 208 | max-locals=20 209 | 210 | # Maximum number of return / yield for function / method body 211 | max-returns=6 212 | 213 | # Maximum number of branch for function / method body 214 | max-branchs=12 215 | 216 | # Maximum number of statements in function / method body 217 | max-statements=50 218 | 219 | # Maximum number of parents for a class (see R0901). 220 | max-parents=7 221 | 222 | # Maximum number of attributes for a class (see R0902). 223 | max-attributes=7 224 | 225 | # Minimum number of public methods for a class (see R0903). 226 | min-public-methods=2 227 | 228 | # Maximum number of public methods for a class (see R0904). 229 | max-public-methods=20 230 | 231 | 232 | [IMPORTS] 233 | 234 | # Deprecated modules which should not be used, separated by a comma 235 | deprecated-modules=regsub,string,TERMIOS,Bastion,rexec 236 | 237 | # Create a graph of every (i.e. internal and external) dependencies in the 238 | # given file (report RP0402 must not be disabled) 239 | import-graph= 240 | 241 | # Create a graph of external dependencies in the given file (report RP0402 must 242 | # not be disabled) 243 | ext-import-graph= 244 | 245 | # Create a graph of internal dependencies in the given file (report RP0402 must 246 | # not be disabled) 247 | int-import-graph= 248 | 249 | 250 | [EXCEPTIONS] 251 | 252 | # Exceptions that will emit a warning when being caught. Defaults to 253 | # "Exception" 254 | overgeneral-exceptions=Exception 255 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | This file is part of pyUmbral. 6 | 7 | pyUmbral is free software: you can redistribute it and/or modify 8 | it under the terms of the GNU General Public License as published by 9 | the Free Software Foundation, either version 3 of the License, or 10 | (at your option) any later version. 11 | 12 | pyUmbral is distributed in the hope that it will be useful, 13 | but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | GNU General Public License for more details. 16 | 17 | You should have received a copy of the GNU General Public License 18 | along with pyUmbral. If not, see . 19 | """ 20 | 21 | import os 22 | import sys 23 | 24 | from setuptools import setup, find_packages 25 | from setuptools.command.install import install 26 | 27 | 28 | BASE_DIR = os.path.dirname(__file__) 29 | 30 | ABOUT = dict() 31 | with open(os.path.join(BASE_DIR, "umbral", "__about__.py")) as f: 32 | exec(f.read(), ABOUT) 33 | 34 | 35 | with open(os.path.join(BASE_DIR, "README.rst")) as f: 36 | long_description = f.read() 37 | 38 | 39 | class VerifyVersionCommand(install): 40 | """Custom command to verify that the git tag matches our version""" 41 | description = 'verify that the git tag matches our version' 42 | 43 | def run(self): 44 | tag = os.getenv('CIRCLE_TAG') 45 | if tag.startswith('v'): 46 | tag = tag[1:] 47 | 48 | version = ABOUT['__version__'] 49 | if version.startswith('v'): 50 | version = version[1:] 51 | 52 | if tag != version: 53 | info = "Git tag: {0} does not match the version of this app: {1}".format( 54 | os.getenv('CIRCLE_TAG'), ABOUT['__version__'] 55 | ) 56 | sys.exit(info) 57 | 58 | 59 | INSTALL_REQUIRES = [ 60 | 'setuptools', 61 | 'cryptography~=3.0', 62 | 'pynacl~=1.0', 63 | ] 64 | 65 | DEV_INSTALL_REQUIRES = [ 66 | 'pytest', 67 | 'pytest-mypy', 68 | 'pytest-cov', 69 | 'coverage', 70 | 'codecov', 71 | 'nbval', 72 | 'mypy', 73 | 'bumpversion', 74 | ] 75 | 76 | EXTRAS_REQUIRE = { 77 | 'testing': DEV_INSTALL_REQUIRES, 78 | 'docs': [ 79 | 'sphinx~=4.0', 80 | 'sphinx-autobuild', 81 | 'sphinx_rtd_theme', 82 | ], 83 | 'benchmarks': ['pytest-benchmark'], 84 | } 85 | 86 | 87 | setup(name=ABOUT['__title__'], 88 | url=ABOUT['__url__'], 89 | version=ABOUT['__version__'], 90 | author=ABOUT['__author__'], 91 | author_email=ABOUT['__email__'], 92 | description=ABOUT['__summary__'], 93 | long_description=long_description, 94 | extras_require=EXTRAS_REQUIRE, 95 | install_requires=INSTALL_REQUIRES, 96 | setup_requires=['pytest-runner'], # required for setup.py test 97 | packages=find_packages(exclude=['tests']), 98 | classifiers=[ 99 | "Development Status :: 2 - Pre-Alpha", 100 | "Intended Audience :: Science/Research", 101 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 102 | "Natural Language :: English", 103 | "Programming Language :: Python :: Implementation", 104 | "Programming Language :: Python :: 3 :: Only", 105 | "Programming Language :: Python :: 3.6", 106 | "Programming Language :: Python :: 3.7", 107 | "Topic :: Scientific/Engineering", 108 | ], 109 | python_requires='>=3', 110 | cmdclass={'verify': VerifyVersionCommand} 111 | ) 112 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/pyUmbral/b2abccafa6dc007fd4d3de60c83bc8f1a857e885/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral import SecretKey, Signer, generate_kfrags, encrypt 4 | 5 | 6 | @pytest.fixture 7 | def alices_keys(): 8 | delegating_sk = SecretKey.random() 9 | signing_sk = SecretKey.random() 10 | return delegating_sk, signing_sk 11 | 12 | 13 | @pytest.fixture 14 | def bobs_keys(): 15 | sk = SecretKey.random() 16 | pk = sk.public_key() 17 | return sk, pk 18 | 19 | 20 | @pytest.fixture 21 | def verification_keys(alices_keys, bobs_keys): 22 | delegating_sk, signing_sk = alices_keys 23 | _receiving_sk, receiving_pk = bobs_keys 24 | 25 | verifying_pk = signing_sk.public_key() 26 | delegating_pk = delegating_sk.public_key() 27 | 28 | return verifying_pk, delegating_pk, receiving_pk 29 | 30 | 31 | @pytest.fixture 32 | def kfrags(alices_keys, bobs_keys): 33 | delegating_sk, signing_sk = alices_keys 34 | receiving_sk, receiving_pk = bobs_keys 35 | yield generate_kfrags(delegating_sk=delegating_sk, 36 | signer=Signer(signing_sk), 37 | receiving_pk=receiving_pk, 38 | threshold=6, shares=10) 39 | 40 | 41 | @pytest.fixture(scope='session') 42 | def message(): 43 | message = (b"dnunez [9:30 AM]" 44 | b"@Tux we had this super fruitful discussion last night with @jMyles @michwill @KPrasch" 45 | b"to sum up: the symmetric ciphertext is now called the 'Chimney'." 46 | b"the chimney of the capsule, of course" 47 | b"tux [9:32 AM]" 48 | b"wat") 49 | return message 50 | 51 | 52 | @pytest.fixture 53 | def capsule_and_ciphertext(alices_keys, message): 54 | delegating_sk, _signing_sk = alices_keys 55 | capsule, ciphertext = encrypt(delegating_sk.public_key(), message) 56 | return capsule, ciphertext 57 | 58 | 59 | @pytest.fixture 60 | def capsule(capsule_and_ciphertext): 61 | capsule, ciphertext = capsule_and_ciphertext 62 | return capsule 63 | -------------------------------------------------------------------------------- /tests/metrics/reencryption_benchmark.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | 4 | import pytest 5 | 6 | import umbral as umbral_py 7 | 8 | try: 9 | import umbral_pre as umbral_rs 10 | except ImportError: 11 | umbral_rs = None 12 | 13 | 14 | def pytest_generate_tests(metafunc): 15 | if 'umbral' in metafunc.fixturenames: 16 | implementations = [umbral_py] 17 | ids = ['python'] 18 | if umbral_rs is not None: 19 | implementations.append(umbral_rs) 20 | ids.append('rust') 21 | metafunc.parametrize('umbral', implementations, ids=ids) 22 | 23 | 24 | # Faster 25 | # (M, N) # | 26 | FRAG_VALUES = ((1, 1), # | 27 | (2, 3), # | 28 | (5, 8), # | 29 | (6, 10), # | 30 | (10, 30), # | 31 | # (20, 30), # | # FIXME: CircleCi build killed 32 | # (10, 100) # | 33 | # | 34 | ) # | 35 | # Slower 36 | 37 | 38 | def __standard_encryption_api(umbral) -> tuple: 39 | 40 | delegating_sk = umbral.SecretKey.random() 41 | delegating_pk = delegating_sk.public_key() 42 | 43 | signing_sk = umbral.SecretKey.random() 44 | signer = umbral.Signer(signing_sk) 45 | 46 | receiving_sk = umbral.SecretKey.random() 47 | receiving_pk = receiving_sk.public_key() 48 | 49 | plain_data = os.urandom(32) 50 | capsule, ciphertext = umbral.encrypt(delegating_pk, plain_data) 51 | 52 | return delegating_sk, receiving_pk, signer, ciphertext, capsule 53 | 54 | 55 | # 56 | # KFrag Generation Benchmarks 57 | # 58 | 59 | 60 | @pytest.mark.benchmark(group="Reencryption Key Generation Performance", 61 | disable_gc=True, 62 | warmup=True, 63 | warmup_iterations=10) 64 | @pytest.mark.parametrize("m, n", FRAG_VALUES) 65 | def test_generate_kfrags_performance(benchmark, m: int, n: int, umbral) -> None: 66 | 67 | def __setup(): 68 | delegating_sk, receiving_pk, signer, ciphertext, capsule = __standard_encryption_api(umbral) 69 | return (delegating_sk, receiving_pk, signer, m, n, True, True), {} 70 | 71 | benchmark.pedantic(umbral.generate_kfrags, setup=__setup, rounds=1000) 72 | assert True # ensure function finishes and succeeds. 73 | 74 | 75 | # 76 | # Reencryption Benchmarks 77 | # 78 | 79 | @pytest.mark.benchmark(group="Reencryption Performance", 80 | timer=time.perf_counter, 81 | disable_gc=True, 82 | warmup=True, 83 | warmup_iterations=10) 84 | @pytest.mark.parametrize("m, n", ((6, 10), )) 85 | def test_random_frag_reencryption_performance(benchmark, m: int, n: int, umbral) -> None: 86 | 87 | def __setup(): 88 | delegating_sk, receiving_pk, signer, ciphertext, capsule = __standard_encryption_api(umbral) 89 | kfrags = umbral.generate_kfrags(delegating_sk, receiving_pk, signer, m, n, True, True) 90 | one_kfrag, *remaining_kfrags = kfrags 91 | return (capsule, one_kfrag), {} 92 | 93 | benchmark.pedantic(umbral.reencrypt, setup=__setup, rounds=1000) 94 | assert True # ensure function finishes and succeeds. 95 | 96 | 97 | @pytest.mark.benchmark(group="Reencryption Performance", 98 | timer=time.perf_counter, 99 | disable_gc=True, 100 | min_time=0.00005, 101 | max_time=0.005, 102 | min_rounds=7, 103 | warmup=True, 104 | warmup_iterations=10) 105 | @pytest.mark.parametrize("m, n", ((6, 10), )) 106 | def test_single_frag_reencryption_performance(benchmark, m: int, n: int, umbral) -> None: 107 | 108 | delegating_sk, receiving_pk, signer, ciphertext, capsule = __standard_encryption_api(umbral) 109 | kfrags = umbral.generate_kfrags(delegating_sk, receiving_pk, signer, m, n, True, True) 110 | one_kfrag, *remaining_kfrags = kfrags 111 | args, kwargs = (capsule, one_kfrag), {} 112 | 113 | benchmark.pedantic(umbral.reencrypt, args=args, kwargs=kwargs, iterations=20, rounds=100) 114 | assert True # ensure function finishes and succeeds. 115 | -------------------------------------------------------------------------------- /tests/metrics/reencryption_firehose.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.append(os.path.abspath(os.getcwd())) 5 | 6 | from typing import Tuple, List 7 | 8 | import umbral 9 | 10 | 11 | REENCRYPTIONS = 1000 12 | 13 | 14 | def __produce_kfrags_and_capsule(m: int, n: int) -> Tuple[List[umbral.KeyFrag], umbral.Capsule]: 15 | 16 | delegating_sk = umbral.SecretKey.random() 17 | delegating_pk = delegating_sk.public_key() 18 | 19 | signing_sk = umbral.SecretKey.random() 20 | signer = umbral.Signer(signing_sk) 21 | 22 | receiving_sk = umbral.SecretKey.random() 23 | receiving_pk = receiving_sk.public_key() 24 | 25 | plain_data = os.urandom(32) 26 | capsule, ciphertext = umbral.encrypt(delegating_pk, plain_data) 27 | 28 | kfrags = umbral.generate_kfrags(delegating_sk, receiving_pk, signer, m, n) 29 | 30 | return kfrags, capsule 31 | 32 | 33 | def firehose(m: int=6, n: int=10) -> None: 34 | 35 | print("Making kfrags...") 36 | kfrags, capsule = __produce_kfrags_and_capsule(m=m, n=n) 37 | one_kfrag, *remaining_kfrags = kfrags 38 | 39 | print('Re-encrypting...') 40 | successful_reencryptions = 0 41 | for iteration in range(int(REENCRYPTIONS)): 42 | 43 | _cfrag = umbral.reencrypt(capsule, one_kfrag) # <<< REENCRYPTION HAPPENS HERE 44 | 45 | successful_reencryptions += 1 46 | if iteration % 20 == 0: 47 | print('Performed {} Re-encryptions...'.format(iteration)) 48 | 49 | failure_message = "A Reencryption failed. {} of {} succeeded".format(successful_reencryptions, REENCRYPTIONS) 50 | assert successful_reencryptions == REENCRYPTIONS, failure_message 51 | print("Successfully performed {} reencryptions".format(successful_reencryptions), end='\n') 52 | 53 | 54 | if __name__ == "__main__": 55 | firehose() # do 56 | -------------------------------------------------------------------------------- /tests/test_capsule.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral import ( 4 | Capsule, 5 | SecretKey, 6 | Signer, 7 | encrypt, 8 | decrypt_original, 9 | reencrypt, 10 | decrypt_reencrypted, 11 | generate_kfrags 12 | ) 13 | from umbral.curve_point import CurvePoint 14 | 15 | 16 | def test_capsule_serialization(alices_keys): 17 | 18 | delegating_sk, _signing_sk = alices_keys 19 | delegating_pk = delegating_sk.public_key() 20 | 21 | capsule, _key = Capsule.from_public_key(delegating_pk) 22 | new_capsule = Capsule.from_bytes(bytes(capsule)) 23 | 24 | assert capsule == new_capsule 25 | 26 | # Deserializing a bad capsule triggers verification error 27 | capsule.point_e = CurvePoint.random() 28 | capsule_bytes = bytes(capsule) 29 | 30 | with pytest.raises(ValueError): 31 | Capsule.from_bytes(capsule_bytes) 32 | 33 | 34 | def test_capsule_is_hashable(alices_keys): 35 | 36 | delegating_sk, _signing_sk = alices_keys 37 | delegating_pk = delegating_sk.public_key() 38 | 39 | capsule1, key1 = Capsule.from_public_key(delegating_pk) 40 | capsule2, key2 = Capsule.from_public_key(delegating_pk) 41 | 42 | assert capsule1 != capsule2 43 | assert key1 != key2 44 | assert hash(capsule1) != hash(capsule2) 45 | 46 | new_capsule = Capsule.from_bytes(bytes(capsule1)) 47 | assert hash(new_capsule) == hash(capsule1) 48 | 49 | 50 | def test_open_original(alices_keys): 51 | 52 | delegating_sk, _signing_sk = alices_keys 53 | delegating_pk = delegating_sk.public_key() 54 | 55 | capsule, key = Capsule.from_public_key(delegating_pk) 56 | key_back = capsule.open_original(delegating_sk) 57 | assert key == key_back 58 | 59 | 60 | def test_open_reencrypted(alices_keys, bobs_keys): 61 | 62 | threshold = 6 63 | shares = 10 64 | 65 | delegating_sk, signing_sk = alices_keys 66 | receiving_sk, receiving_pk = bobs_keys 67 | 68 | signer = Signer(signing_sk) 69 | delegating_pk = delegating_sk.public_key() 70 | 71 | capsule, key = Capsule.from_public_key(delegating_pk) 72 | kfrags = generate_kfrags(delegating_sk=delegating_sk, 73 | signer=signer, 74 | receiving_pk=receiving_pk, 75 | threshold=threshold, 76 | shares=shares) 77 | 78 | cfrags = [reencrypt(capsule, kfrag).cfrag for kfrag in kfrags] 79 | key_back = capsule.open_reencrypted(receiving_sk, delegating_pk, cfrags[:threshold]) 80 | assert key_back == key 81 | 82 | # No cfrags at all 83 | with pytest.raises(ValueError, match="Empty CapsuleFrag sequence"): 84 | capsule.open_reencrypted(receiving_sk, delegating_pk, []) 85 | 86 | # Not enough cfrags 87 | with pytest.raises(ValueError, match="Internal validation failed"): 88 | capsule.open_reencrypted(receiving_sk, delegating_pk, cfrags[:threshold-1]) 89 | 90 | # Repeating cfrags 91 | with pytest.raises(ValueError, match="Some of the CapsuleFrags are repeated"): 92 | capsule.open_reencrypted(receiving_sk, delegating_pk, [cfrags[0]] + cfrags[:threshold-1]) 93 | 94 | # Mismatched cfrags 95 | kfrags2 = generate_kfrags(delegating_sk=delegating_sk, 96 | signer=signer, 97 | receiving_pk=receiving_pk, 98 | threshold=threshold, 99 | shares=shares) 100 | cfrags2 = [reencrypt(capsule, kfrag).cfrag for kfrag in kfrags2] 101 | with pytest.raises(ValueError, match="CapsuleFrags are not pairwise consistent"): 102 | capsule.open_reencrypted(receiving_sk, delegating_pk, [cfrags2[0]] + cfrags[:threshold-1]) 103 | 104 | 105 | def test_capsule_str(capsule): 106 | s = str(capsule) 107 | assert 'Capsule' in s 108 | -------------------------------------------------------------------------------- /tests/test_capsule_frag.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral import encrypt, reencrypt, CapsuleFrag, VerifiedCapsuleFrag, Capsule, VerificationError 4 | from umbral.curve_point import CurvePoint 5 | 6 | 7 | def test_cfrag_serialization(verification_keys, capsule, kfrags): 8 | 9 | verifying_pk, delegating_pk, receiving_pk = verification_keys 10 | 11 | for kfrag in kfrags: 12 | cfrag = reencrypt(capsule, kfrag) 13 | cfrag_bytes = bytes(cfrag) 14 | 15 | new_cfrag = CapsuleFrag.from_bytes(cfrag_bytes) 16 | 17 | verified_cfrag = new_cfrag.verify(capsule, 18 | verifying_pk=verifying_pk, 19 | delegating_pk=delegating_pk, 20 | receiving_pk=receiving_pk, 21 | ) 22 | assert verified_cfrag == cfrag 23 | 24 | # Wrong delegating key 25 | with pytest.raises(VerificationError): 26 | new_cfrag.verify(capsule, 27 | verifying_pk=verifying_pk, 28 | delegating_pk=receiving_pk, 29 | receiving_pk=receiving_pk, 30 | ) 31 | 32 | # Wrong receiving key 33 | with pytest.raises(VerificationError): 34 | new_cfrag.verify(capsule, 35 | verifying_pk=verifying_pk, 36 | delegating_pk=delegating_pk, 37 | receiving_pk=delegating_pk, 38 | ) 39 | 40 | # Wrong signing key 41 | with pytest.raises(VerificationError): 42 | new_cfrag.verify(capsule, 43 | verifying_pk=receiving_pk, 44 | delegating_pk=delegating_pk, 45 | receiving_pk=receiving_pk, 46 | ) 47 | 48 | 49 | def test_cfrag_with_wrong_capsule(verification_keys, kfrags, capsule_and_ciphertext, message): 50 | 51 | capsule, ciphertext = capsule_and_ciphertext 52 | verifying_pk, delegating_pk, receiving_pk = verification_keys 53 | 54 | capsule_alice1 = capsule 55 | capsule_alice2, _unused_key2 = Capsule.from_public_key(delegating_pk) 56 | 57 | cfrag = reencrypt(capsule_alice2, kfrags[0]) 58 | cfrag = CapsuleFrag.from_bytes(bytes(cfrag)) # de-verify 59 | 60 | with pytest.raises(VerificationError): 61 | cfrag.verify(capsule_alice1, 62 | verifying_pk=verifying_pk, 63 | delegating_pk=delegating_pk, 64 | receiving_pk=receiving_pk, 65 | ) 66 | 67 | 68 | def test_cfrag_with_wrong_data(verification_keys, kfrags, capsule_and_ciphertext, message): 69 | 70 | capsule, ciphertext = capsule_and_ciphertext 71 | verifying_pk, delegating_pk, receiving_pk = verification_keys 72 | 73 | cfrag = reencrypt(capsule, kfrags[0]) 74 | 75 | # Let's put random garbage in one of the cfrags 76 | cfrag = CapsuleFrag.from_bytes(bytes(cfrag)) # de-verify 77 | cfrag.point_e1 = CurvePoint.random() 78 | cfrag.point_v1 = CurvePoint.random() 79 | 80 | with pytest.raises(VerificationError): 81 | cfrag.verify(capsule, 82 | verifying_pk=verifying_pk, 83 | delegating_pk=delegating_pk, 84 | receiving_pk=receiving_pk, 85 | ) 86 | 87 | 88 | def test_cfrag_is_hashable(verification_keys, capsule, kfrags): 89 | 90 | verifying_pk, delegating_pk, receiving_pk = verification_keys 91 | 92 | cfrag0 = reencrypt(capsule, kfrags[0]) 93 | cfrag1 = reencrypt(capsule, kfrags[1]) 94 | 95 | assert hash(cfrag0) != hash(cfrag1) 96 | 97 | new_cfrag = CapsuleFrag.from_bytes(bytes(cfrag0)) 98 | assert hash(new_cfrag) != hash(cfrag0) 99 | 100 | verified_cfrag = new_cfrag.verify(capsule, 101 | verifying_pk=verifying_pk, 102 | delegating_pk=delegating_pk, 103 | receiving_pk=receiving_pk, 104 | ) 105 | 106 | assert hash(verified_cfrag) == hash(cfrag0) 107 | 108 | 109 | def test_cfrag_str(capsule, kfrags): 110 | cfrag0 = reencrypt(capsule, kfrags[0]) 111 | s = str(cfrag0) 112 | assert 'VerifiedCapsuleFrag' in s 113 | 114 | s = str(CapsuleFrag.from_bytes(bytes(cfrag0))) 115 | assert "VerifiedCapsuleFrag" not in s 116 | assert "CapsuleFrag" in s 117 | 118 | 119 | def test_from_verified_bytes(capsule, kfrags): 120 | verified_cfrag = reencrypt(capsule, kfrags[0]) 121 | cfrag_bytes = bytes(verified_cfrag) 122 | verified_cfrag_back = VerifiedCapsuleFrag.from_verified_bytes(cfrag_bytes) 123 | assert verified_cfrag == verified_cfrag_back 124 | 125 | 126 | def test_serialized_size(capsule, kfrags): 127 | verified_cfrag = reencrypt(capsule, kfrags[0]) 128 | cfrag = CapsuleFrag.from_bytes(bytes(verified_cfrag)) 129 | assert verified_cfrag.serialized_size() == cfrag.serialized_size() 130 | -------------------------------------------------------------------------------- /tests/test_compatibility.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | try: 4 | import umbral_pre as umbral_rs 5 | except ImportError: 6 | umbral_rs = None 7 | 8 | import umbral as umbral_py 9 | 10 | 11 | def pytest_generate_tests(metafunc): 12 | if 'implementations' in metafunc.fixturenames: 13 | implementations = [(umbral_py, umbral_py)] 14 | ids = ['python -> python'] 15 | if umbral_rs is not None: 16 | implementations.extend([(umbral_py, umbral_rs), (umbral_rs, umbral_py)]) 17 | ids.extend(['python -> rust', 'rust -> python']) 18 | 19 | metafunc.parametrize('implementations', implementations, ids=ids) 20 | 21 | 22 | def _create_keypair(umbral): 23 | sk = umbral.SecretKey.random() 24 | pk = sk.public_key() 25 | return sk.to_secret_bytes(), bytes(pk) 26 | 27 | 28 | def _restore_keys(umbral, sk_bytes, pk_bytes): 29 | sk = umbral.SecretKey.from_bytes(sk_bytes) 30 | pk_from_sk = sk.public_key() 31 | pk_from_bytes = umbral.PublicKey.from_bytes(pk_bytes) 32 | assert pk_from_sk == pk_from_bytes 33 | 34 | 35 | def test_keys(implementations): 36 | umbral1, umbral2 = implementations 37 | 38 | # On client 1 39 | sk_bytes, pk_bytes = _create_keypair(umbral1) 40 | 41 | # On client 2 42 | _restore_keys(umbral2, sk_bytes, pk_bytes) 43 | 44 | 45 | def _create_sk_factory_and_sk(umbral, skf_label, key_label): 46 | skf = umbral.SecretKeyFactory.random() 47 | derived_skf = skf.make_factory(skf_label) 48 | sk = derived_skf.make_key(key_label) 49 | return skf.to_secret_bytes(), derived_skf.to_secret_bytes(), sk.to_secret_bytes() 50 | 51 | 52 | def _check_sk_is_same(umbral, skf_label, key_label, skf_bytes, derived_skf_bytes, sk_bytes): 53 | skf = umbral.SecretKeyFactory.from_bytes(skf_bytes) 54 | 55 | derived_skf_restored = umbral.SecretKeyFactory.from_bytes(derived_skf_bytes) 56 | derived_skf_generated = skf.make_factory(skf_label) 57 | assert derived_skf_generated.to_secret_bytes() == derived_skf_restored.to_secret_bytes() 58 | 59 | sk_restored = umbral.SecretKey.from_bytes(sk_bytes) 60 | sk_generated = derived_skf_generated.make_key(key_label) 61 | assert sk_restored.to_secret_bytes() == sk_generated.to_secret_bytes() 62 | 63 | 64 | def test_secret_key_factory(implementations): 65 | umbral1, umbral2 = implementations 66 | skf_label = b'skf label' 67 | key_label = b'key label' 68 | 69 | skf_bytes, derived_skf_bytes, sk_bytes = _create_sk_factory_and_sk(umbral1, skf_label, key_label) 70 | _check_sk_is_same(umbral2, skf_label, key_label, skf_bytes, derived_skf_bytes, sk_bytes) 71 | 72 | 73 | def _encrypt(umbral, plaintext, pk_bytes): 74 | pk = umbral.PublicKey.from_bytes(pk_bytes) 75 | capsule, ciphertext = umbral.encrypt(pk, plaintext) 76 | return bytes(capsule), ciphertext 77 | 78 | 79 | def _decrypt_original(umbral, sk_bytes, capsule_bytes, ciphertext): 80 | capsule = umbral.Capsule.from_bytes(bytes(capsule_bytes)) 81 | sk = umbral.SecretKey.from_bytes(sk_bytes) 82 | return umbral.decrypt_original(sk, capsule, ciphertext) 83 | 84 | 85 | def test_encrypt_decrypt(implementations): 86 | 87 | umbral1, umbral2 = implementations 88 | plaintext = b'peace at dawn' 89 | 90 | # On client 1 91 | sk_bytes, pk_bytes = _create_keypair(umbral1) 92 | 93 | # On client 2 94 | capsule_bytes, ciphertext = _encrypt(umbral2, plaintext, pk_bytes) 95 | 96 | # On client 1 97 | plaintext_decrypted = _decrypt_original(umbral1, sk_bytes, capsule_bytes, ciphertext) 98 | 99 | assert plaintext_decrypted == plaintext 100 | 101 | 102 | def _generate_kfrags(umbral, delegating_sk_bytes, receiving_pk_bytes, 103 | signing_sk_bytes, threshold, shares): 104 | 105 | delegating_sk = umbral.SecretKey.from_bytes(delegating_sk_bytes) 106 | receiving_pk = umbral.PublicKey.from_bytes(receiving_pk_bytes) 107 | signing_sk = umbral.SecretKey.from_bytes(signing_sk_bytes) 108 | 109 | kfrags = umbral.generate_kfrags(delegating_sk=delegating_sk, 110 | receiving_pk=receiving_pk, 111 | signer=umbral.Signer(signing_sk), 112 | threshold=threshold, 113 | shares=shares, 114 | sign_delegating_key=True, 115 | sign_receiving_key=True, 116 | ) 117 | 118 | return [bytes(kfrag) for kfrag in kfrags] 119 | 120 | 121 | def _verify_kfrags(umbral, kfrags_bytes, verifying_pk_bytes, delegating_pk_bytes, receiving_pk_bytes): 122 | kfrags = [umbral.KeyFrag.from_bytes(kfrag_bytes) for kfrag_bytes in kfrags_bytes] 123 | verifying_pk = umbral.PublicKey.from_bytes(verifying_pk_bytes) 124 | delegating_pk = umbral.PublicKey.from_bytes(delegating_pk_bytes) 125 | receiving_pk = umbral.PublicKey.from_bytes(receiving_pk_bytes) 126 | return [kfrag.verify(verifying_pk=verifying_pk, 127 | delegating_pk=delegating_pk, 128 | receiving_pk=receiving_pk) for kfrag in kfrags] 129 | 130 | 131 | def test_kfrags(implementations): 132 | 133 | umbral1, umbral2 = implementations 134 | 135 | threshold = 2 136 | shares = 3 137 | plaintext = b'peace at dawn' 138 | 139 | # On client 1 140 | 141 | receiving_sk_bytes, receiving_pk_bytes = _create_keypair(umbral1) 142 | delegating_sk_bytes, delegating_pk_bytes = _create_keypair(umbral1) 143 | signing_sk_bytes, verifying_pk_bytes = _create_keypair(umbral1) 144 | kfrags_bytes = _generate_kfrags(umbral1, delegating_sk_bytes, receiving_pk_bytes, 145 | signing_sk_bytes, threshold, shares) 146 | 147 | # On client 2 148 | 149 | _verify_kfrags(umbral2, kfrags_bytes, verifying_pk_bytes, delegating_pk_bytes, receiving_pk_bytes) 150 | 151 | 152 | def _reencrypt(umbral, verifying_pk_bytes, delegating_pk_bytes, receiving_pk_bytes, 153 | capsule_bytes, kfrags_bytes, threshold): 154 | capsule = umbral.Capsule.from_bytes(bytes(capsule_bytes)) 155 | verified_kfrags = _verify_kfrags(umbral, kfrags_bytes, 156 | verifying_pk_bytes, delegating_pk_bytes, receiving_pk_bytes) 157 | cfrags = [umbral.reencrypt(capsule, kfrag) for kfrag in verified_kfrags[:threshold]] 158 | return [bytes(cfrag) for cfrag in cfrags] 159 | 160 | 161 | def _decrypt_reencrypted(umbral, receiving_sk_bytes, delegating_pk_bytes, verifying_pk_bytes, 162 | capsule_bytes, cfrags_bytes, ciphertext): 163 | 164 | receiving_sk = umbral.SecretKey.from_bytes(receiving_sk_bytes) 165 | receiving_pk = receiving_sk.public_key() 166 | delegating_pk = umbral.PublicKey.from_bytes(delegating_pk_bytes) 167 | verifying_pk = umbral.PublicKey.from_bytes(verifying_pk_bytes) 168 | 169 | capsule = umbral.Capsule.from_bytes(bytes(capsule_bytes)) 170 | cfrags = [umbral.CapsuleFrag.from_bytes(cfrag_bytes) for cfrag_bytes in cfrags_bytes] 171 | 172 | verified_cfrags = [cfrag.verify(capsule, 173 | verifying_pk=verifying_pk, 174 | delegating_pk=delegating_pk, 175 | receiving_pk=receiving_pk, 176 | ) 177 | for cfrag in cfrags] 178 | 179 | # Decryption by Bob 180 | plaintext = umbral.decrypt_reencrypted(receiving_sk=receiving_sk, 181 | delegating_pk=delegating_pk, 182 | capsule=capsule, 183 | verified_cfrags=verified_cfrags, 184 | ciphertext=ciphertext, 185 | ) 186 | 187 | return plaintext 188 | 189 | 190 | def test_reencrypt(implementations): 191 | 192 | umbral1, umbral2 = implementations 193 | 194 | threshold = 2 195 | shares = 3 196 | plaintext = b'peace at dawn' 197 | 198 | # On client 1 199 | 200 | receiving_sk_bytes, receiving_pk_bytes = _create_keypair(umbral1) 201 | delegating_sk_bytes, delegating_pk_bytes = _create_keypair(umbral1) 202 | signing_sk_bytes, verifying_pk_bytes = _create_keypair(umbral1) 203 | 204 | capsule_bytes, ciphertext = _encrypt(umbral1, plaintext, delegating_pk_bytes) 205 | 206 | kfrags_bytes = _generate_kfrags(umbral1, delegating_sk_bytes, receiving_pk_bytes, 207 | signing_sk_bytes, threshold, shares) 208 | 209 | # On client 2 210 | 211 | cfrags_bytes = _reencrypt(umbral2, verifying_pk_bytes, delegating_pk_bytes, receiving_pk_bytes, 212 | capsule_bytes, kfrags_bytes, threshold) 213 | 214 | # On client 1 215 | 216 | plaintext_reencrypted = _decrypt_reencrypted(umbral1, 217 | receiving_sk_bytes, delegating_pk_bytes, verifying_pk_bytes, 218 | capsule_bytes, cfrags_bytes, ciphertext) 219 | 220 | assert plaintext_reencrypted == plaintext 221 | 222 | 223 | def _sign_message(umbral, sk_bytes, message): 224 | sk = umbral.SecretKey.from_bytes(sk_bytes) 225 | signer = umbral.Signer(sk) 226 | assert signer.verifying_key() == sk.public_key() 227 | return bytes(signer.sign(message)) 228 | 229 | 230 | def _verify_message(umbral, pk_bytes, signature_bytes, message): 231 | pk = umbral.PublicKey.from_bytes(pk_bytes) 232 | signature = umbral.Signature.from_bytes(signature_bytes) 233 | return signature.verify(pk, message) 234 | 235 | 236 | def test_signer(implementations): 237 | 238 | umbral1, umbral2 = implementations 239 | 240 | message = b'peace at dawn' 241 | 242 | sk_bytes, pk_bytes = _create_keypair(umbral1) 243 | 244 | signature1_bytes = _sign_message(umbral1, sk_bytes, message) 245 | signature2_bytes = _sign_message(umbral2, sk_bytes, message) 246 | 247 | # Signatures are random, so we can't compare them. 248 | # Cross-verify instead 249 | 250 | assert _verify_message(umbral1, pk_bytes, signature2_bytes, message) 251 | assert _verify_message(umbral2, pk_bytes, signature1_bytes, message) 252 | 253 | 254 | def _measure_sizes(umbral): 255 | 256 | sized_types = [ 257 | umbral.SecretKey, 258 | umbral.SecretKeyFactory, 259 | umbral.PublicKey, 260 | umbral.Signature, 261 | umbral.Capsule, 262 | umbral.KeyFrag, 263 | umbral.VerifiedKeyFrag, 264 | umbral.CapsuleFrag, 265 | umbral.VerifiedCapsuleFrag, 266 | ] 267 | 268 | return {tp.__name__: tp.serialized_size() for tp in sized_types} 269 | 270 | 271 | def test_serialization_size(implementations): 272 | 273 | umbral1, umbral2 = implementations 274 | 275 | sizes1 = _measure_sizes(umbral1) 276 | sizes2 = _measure_sizes(umbral1) 277 | 278 | assert sizes1 == sizes2 279 | -------------------------------------------------------------------------------- /tests/test_curve.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral.openssl import Curve, bn_to_int, point_to_affine_coords 4 | from umbral.curve import CURVE, SECP256K1 5 | 6 | 7 | def test_supported_curves(): 8 | 9 | # Ensure we have the correct number of supported curves hardcoded 10 | number_of_supported_curves = 1 11 | assert len(Curve._supported_curves) == number_of_supported_curves 12 | 13 | # Manually ensure the `_supported curves` dict contains only valid supported curves 14 | assert Curve._supported_curves[714] == 'secp256k1' 15 | 16 | 17 | def test_create_by_nid(): 18 | 19 | nid, name = 714, 'secp256k1' 20 | 21 | # supported 22 | _curve_714 = Curve(nid=nid) 23 | assert _curve_714.nid == nid 24 | assert _curve_714.name == name 25 | 26 | # unsuported 27 | with pytest.raises(NotImplementedError): 28 | Curve(711) 29 | 30 | 31 | def test_create_by_name(): 32 | 33 | nid, name = 714, 'secp256k1' 34 | 35 | # Supported 36 | _curve_secp256k1 = Curve.from_name(name) 37 | assert _curve_secp256k1.name == name 38 | assert _curve_secp256k1.nid == nid 39 | 40 | # Unsupported 41 | with pytest.raises(NotImplementedError): 42 | Curve.from_name('abcd123e4') 43 | 44 | 45 | def test_curve_constants(): 46 | 47 | test_secp256k1 = SECP256K1 48 | 49 | assert CURVE == SECP256K1 50 | 51 | # Test the hardcoded curve NIDs are correct: 52 | assert test_secp256k1.nid == 714 53 | 54 | # Ensure all supported curves can be initialized 55 | for nid, name in Curve._supported_curves.items(): 56 | by_nid, by_name = Curve(nid=nid), Curve.from_name(name) 57 | assert by_nid.name == name 58 | assert by_name.nid == nid 59 | 60 | 61 | def test_curve_str(): 62 | for nid in Curve._supported_curves: 63 | curve = Curve(nid=nid) 64 | s = str(curve) 65 | assert str(curve.nid) in s 66 | assert str(curve.name) in s 67 | 68 | 69 | def _curve_info(curve: Curve): 70 | assert bn_to_int(curve.bn_order) == curve.order 71 | return dict(order=curve.order, 72 | field_element_size=curve.field_element_size, 73 | scalar_size=curve.scalar_size, 74 | generator=point_to_affine_coords(curve, curve.point_generator)) 75 | 76 | 77 | def test_secp256k1(): 78 | info = _curve_info(SECP256K1) 79 | assert info['order'] == 0xFFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFE_BAAEDCE6_AF48A03B_BFD25E8C_D0364141 80 | assert info['field_element_size'] == 32 81 | assert info['scalar_size'] == 32 82 | assert info['generator'] == ( 83 | 0x79BE667E_F9DCBBAC_55A06295_CE870B07_029BFCDB_2DCE28D9_59F2815B_16F81798, 84 | 0x483ADA77_26A3C465_5DA4FBFC_0E1108A8_FD17B448_A6855419_9C47D08F_FB10D4B8) 85 | -------------------------------------------------------------------------------- /tests/test_curve_point.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral.openssl import ErrorInvalidCompressedPoint, ErrorInvalidPointEncoding 4 | from umbral.curve_point import CurvePoint 5 | from umbral.curve import CURVE 6 | 7 | 8 | def test_random(): 9 | p1 = CurvePoint.random() 10 | p2 = CurvePoint.random() 11 | assert isinstance(p1, CurvePoint) 12 | assert isinstance(p2, CurvePoint) 13 | assert p1 != p2 14 | 15 | 16 | def test_generator_point(): 17 | """http://www.secg.org/SEC2-Ver-1.0.pdf Section 2.7.1""" 18 | g1 = CurvePoint.generator() 19 | 20 | g_compressed = 0x0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 21 | g_compressed_bytes = g_compressed.to_bytes(CURVE.field_element_size + 1, byteorder='big') 22 | g2 = CurvePoint.from_bytes(g_compressed_bytes) 23 | 24 | assert g1 == g2 25 | 26 | 27 | def test_invalid_serialized_points(): 28 | 29 | field_order = 2**256 - 0x1000003D1 30 | 31 | # A point on secp256k1 32 | x = 17004608369308732328368332205668001941491834793934321461466076545247324070015 33 | y = 69725941631324401609944843130171147910924748427773762412028916504484868631573 34 | 35 | # Check it 36 | assert (y**2 - x**3 - 7) % field_order == 0 37 | 38 | # Should load 39 | point_data = b'\x03' + x.to_bytes(CURVE.field_element_size, 'big') 40 | p = CurvePoint.from_bytes(point_data) 41 | 42 | # Make it invalid 43 | bad_x = x - 1 44 | assert (y**2 - bad_x**3 - 7) % field_order != 0 45 | 46 | bad_x_data = b'\x03' + bad_x.to_bytes(CURVE.field_element_size, 'big') 47 | with pytest.raises(ErrorInvalidCompressedPoint): 48 | CurvePoint.from_bytes(bad_x_data) 49 | 50 | # Valid x, invalid prefix 51 | bad_format = b'\xff' + x.to_bytes(CURVE.field_element_size, 'big') 52 | with pytest.raises(ErrorInvalidPointEncoding): 53 | CurvePoint.from_bytes(bad_format) 54 | 55 | 56 | def test_serialize_point_at_infinity(): 57 | 58 | p = CurvePoint.random() 59 | point_at_infinity = p - p 60 | 61 | bytes_point_at_infinity = bytes(point_at_infinity) 62 | assert bytes_point_at_infinity == b'\x00' 63 | 64 | 65 | def test_to_affine(): 66 | p = CurvePoint.generator() 67 | x_ref = 0x79BE667E_F9DCBBAC_55A06295_CE870B07_029BFCDB_2DCE28D9_59F2815B_16F81798 68 | y_ref = 0x483ADA77_26A3C465_5DA4FBFC_0E1108A8_FD17B448_A6855419_9C47D08F_FB10D4B8 69 | assert p.to_affine() == (x_ref, y_ref) 70 | 71 | 72 | def test_identity_to_affine(): 73 | p = CurvePoint.generator() 74 | identity = p - p 75 | with pytest.raises(ValueError): 76 | identity.to_affine() 77 | -------------------------------------------------------------------------------- /tests/test_curve_scalar.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral.curve import CURVE 4 | from umbral.curve_scalar import CurveScalar 5 | from umbral.hashing import Hash 6 | 7 | 8 | def test_random(): 9 | r1 = CurveScalar.random_nonzero() 10 | r2 = CurveScalar.random_nonzero() 11 | assert r1 != r2 12 | assert not r1.is_zero() 13 | assert not r2.is_zero() 14 | 15 | 16 | def test_from_and_to_int(): 17 | zero = CurveScalar.from_int(0) 18 | assert zero.is_zero() 19 | assert int(zero) == 0 20 | 21 | one = CurveScalar.one() 22 | assert not one.is_zero() 23 | assert int(one) == 1 24 | 25 | big_int = CURVE.order - 2 26 | big_scalar = CurveScalar.from_int(big_int) 27 | assert int(big_scalar) == big_int 28 | 29 | # normalization check 30 | with pytest.raises(ValueError): 31 | CurveScalar.from_int(CURVE.order) 32 | 33 | # disable normalization check 34 | too_big = CurveScalar.from_int(CURVE.order, check_normalization=False) 35 | 36 | 37 | def test_from_digest(): 38 | digest = Hash(b'asdf') 39 | digest.update(b'some info') 40 | s1 = CurveScalar.from_digest(digest) 41 | 42 | digest = Hash(b'asdf') 43 | digest.update(b'some info') 44 | s2 = CurveScalar.from_digest(digest) 45 | 46 | assert s1 == s2 47 | assert int(s1) == int(s2) 48 | 49 | 50 | def test_eq(): 51 | random = CurveScalar.random_nonzero() 52 | same = CurveScalar.from_int(int(random)) 53 | different = CurveScalar.random_nonzero() 54 | assert random == same 55 | assert random == int(same) 56 | assert random != different 57 | assert random != int(different) 58 | 59 | 60 | def test_serialization_rotations_of_1(): 61 | 62 | size_in_bytes = CURVE.scalar_size 63 | for i in range(size_in_bytes): 64 | lonely_one = 1 << i 65 | bn = CurveScalar.from_int(lonely_one) 66 | lonely_one_in_bytes = lonely_one.to_bytes(size_in_bytes, 'big') 67 | 68 | # Check serialization 69 | assert bytes(bn) == lonely_one_in_bytes 70 | 71 | # Check deserialization 72 | assert CurveScalar.from_bytes(lonely_one_in_bytes) == bn 73 | 74 | 75 | def test_invalid_deserialization(): 76 | size_in_bytes = CURVE.scalar_size 77 | 78 | # All-ones bytestring is invalid (since it's greater than the order) 79 | lots_of_ones = b'\xFF' * size_in_bytes 80 | with pytest.raises(ValueError): 81 | CurveScalar.from_bytes(lots_of_ones) 82 | 83 | # Serialization of `order` is invalid since it's not strictly lower than 84 | # the order of the curve 85 | order = CURVE.order 86 | with pytest.raises(ValueError): 87 | CurveScalar.from_bytes(order.to_bytes(size_in_bytes, 'big')) 88 | 89 | # On the other hand, serialization of `order - 1` is valid 90 | order -= 1 91 | CurveScalar.from_bytes(order.to_bytes(size_in_bytes, 'big')) 92 | 93 | 94 | def test_add(): 95 | r1 = CurveScalar.random_nonzero() 96 | r2 = CurveScalar.random_nonzero() 97 | r1i = int(r1) 98 | r2i = int(r2) 99 | assert r1 + r2 == (r1i + r2i) % CURVE.order 100 | assert r1 + r2i == (r1i + r2i) % CURVE.order 101 | 102 | 103 | def test_sub(): 104 | r1 = CurveScalar.random_nonzero() 105 | r2 = CurveScalar.random_nonzero() 106 | r1i = int(r1) 107 | r2i = int(r2) 108 | assert r1 - r2 == (r1i - r2i) % CURVE.order 109 | assert r1 - r2i == (r1i - r2i) % CURVE.order 110 | 111 | 112 | def test_mul(): 113 | r1 = CurveScalar.random_nonzero() 114 | r2 = CurveScalar.random_nonzero() 115 | r1i = int(r1) 116 | r2i = int(r2) 117 | assert r1 * r2 == (r1i * r2i) % CURVE.order 118 | assert r1 * r2i == (r1i * r2i) % CURVE.order 119 | 120 | 121 | def test_invert(): 122 | r1 = CurveScalar.random_nonzero() 123 | r1i = int(r1) 124 | r1inv = r1.invert() 125 | assert r1 * r1inv == CurveScalar.one() 126 | assert (r1i * int(r1inv)) % CURVE.order == 1 127 | 128 | -------------------------------------------------------------------------------- /tests/test_dem.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | 4 | from umbral.dem import DEM 5 | 6 | 7 | def test_encrypt_decrypt(): 8 | 9 | key = os.urandom(DEM.KEY_SIZE) 10 | dem = DEM(key) 11 | 12 | plaintext = b'peace at dawn' 13 | 14 | ciphertext0 = dem.encrypt(plaintext) 15 | ciphertext1 = dem.encrypt(plaintext) 16 | 17 | assert ciphertext0 != plaintext 18 | assert ciphertext1 != plaintext 19 | 20 | # Ciphertext should be different even with same plaintext. 21 | assert ciphertext0 != ciphertext1 22 | 23 | # Nonce should be different 24 | assert ciphertext0[:DEM.NONCE_SIZE] != ciphertext1[:DEM.NONCE_SIZE] 25 | 26 | cleartext0 = dem.decrypt(ciphertext0) 27 | cleartext1 = dem.decrypt(ciphertext1) 28 | 29 | assert cleartext0 == plaintext 30 | assert cleartext1 == plaintext 31 | 32 | 33 | def test_malformed_ciphertext(): 34 | 35 | key = os.urandom(DEM.KEY_SIZE) 36 | dem = DEM(key) 37 | 38 | plaintext = b'peace at dawn' 39 | ciphertext = dem.encrypt(plaintext) 40 | 41 | # So short it we can tell right away it doesn't even contain a nonce 42 | with pytest.raises(ValueError, match="The ciphertext must include the nonce"): 43 | dem.decrypt(ciphertext[:DEM.NONCE_SIZE-1]) 44 | 45 | # Too short to contain a tag 46 | with pytest.raises(ValueError, match="The authentication tag is missing or malformed"): 47 | dem.decrypt(ciphertext[:DEM.NONCE_SIZE + DEM.TAG_SIZE - 1]) 48 | 49 | # Too long 50 | with pytest.raises(ValueError): 51 | dem.decrypt(ciphertext + b'abcd') 52 | 53 | 54 | def test_encrypt_decrypt_associated_data(): 55 | key = os.urandom(32) 56 | aad = b'secret code 1234' 57 | 58 | dem = DEM(key) 59 | 60 | plaintext = b'peace at dawn' 61 | 62 | ciphertext0 = dem.encrypt(plaintext, authenticated_data=aad) 63 | ciphertext1 = dem.encrypt(plaintext, authenticated_data=aad) 64 | 65 | assert ciphertext0 != plaintext 66 | assert ciphertext1 != plaintext 67 | 68 | assert ciphertext0 != ciphertext1 69 | 70 | assert ciphertext0[:DEM.NONCE_SIZE] != ciphertext1[:DEM.NONCE_SIZE] 71 | 72 | cleartext0 = dem.decrypt(ciphertext0, authenticated_data=aad) 73 | cleartext1 = dem.decrypt(ciphertext1, authenticated_data=aad) 74 | 75 | assert cleartext0 == plaintext 76 | assert cleartext1 == plaintext 77 | 78 | # Attempt decryption with invalid associated data 79 | with pytest.raises(ValueError): 80 | cleartext2 = dem.decrypt(ciphertext0, authenticated_data=b'wrong data') 81 | -------------------------------------------------------------------------------- /tests/test_key_frag.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral import KeyFrag, Signer, VerificationError 4 | from umbral.key_frag import KeyFragID, KeyFragBase, VerifiedKeyFrag 5 | from umbral.curve_scalar import CurveScalar 6 | 7 | 8 | def test_kfrag_serialization(verification_keys, kfrags): 9 | 10 | verifying_pk, delegating_pk, receiving_pk = verification_keys 11 | 12 | for kfrag in kfrags: 13 | kfrag_bytes = bytes(kfrag) 14 | new_kfrag = KeyFrag.from_bytes(kfrag_bytes) 15 | 16 | new_kfrag = new_kfrag.verify(verifying_pk=verifying_pk, 17 | delegating_pk=delegating_pk, 18 | receiving_pk=receiving_pk) 19 | 20 | assert new_kfrag == kfrag 21 | 22 | 23 | def test_kfrag_verification(verification_keys, kfrags): 24 | 25 | verifying_pk, delegating_pk, receiving_pk = verification_keys 26 | 27 | # Wrong signature 28 | kfrag = kfrags[0] 29 | kfrag.kfrag.id = KeyFragID.random() 30 | kfrag_bytes = bytes(kfrag) 31 | new_kfrag = KeyFrag.from_bytes(kfrag_bytes) 32 | with pytest.raises(VerificationError): 33 | new_kfrag.verify(verifying_pk=verifying_pk, 34 | delegating_pk=delegating_pk, 35 | receiving_pk=receiving_pk) 36 | 37 | # Wrong key 38 | kfrag = kfrags[1] 39 | kfrag.kfrag.key = CurveScalar.random_nonzero() 40 | kfrag_bytes = bytes(kfrag) 41 | new_kfrag = KeyFrag.from_bytes(kfrag_bytes) 42 | with pytest.raises(VerificationError): 43 | new_kfrag.verify(verifying_pk=verifying_pk, 44 | delegating_pk=delegating_pk, 45 | receiving_pk=receiving_pk) 46 | 47 | 48 | @pytest.mark.parametrize('sign_delegating_key', 49 | [False, True], 50 | ids=['sign_delegating_key', 'dont_sign_delegating_key']) 51 | @pytest.mark.parametrize('sign_receiving_key', 52 | [False, True], 53 | ids=['sign_receiving_key', 'dont_sign_receiving_key']) 54 | def test_kfrag_signing(alices_keys, bobs_keys, sign_delegating_key, sign_receiving_key): 55 | 56 | delegating_sk, signing_sk = alices_keys 57 | _receiving_sk, receiving_pk = bobs_keys 58 | 59 | verifying_pk = signing_sk.public_key() 60 | delegating_pk = delegating_sk.public_key() 61 | 62 | base = KeyFragBase(delegating_sk=delegating_sk, 63 | receiving_pk=receiving_pk, 64 | signer=Signer(signing_sk), 65 | threshold=6) 66 | 67 | kfrag = KeyFrag.from_base(base=base, 68 | sign_delegating_key=sign_delegating_key, 69 | sign_receiving_key=sign_receiving_key) 70 | 71 | # serialize/deserialize to make sure sign_* fields are serialized correctly 72 | kfrag = KeyFrag.from_bytes(bytes(kfrag)) 73 | 74 | for pass_delegating_key, pass_receiving_key in zip([False, True], [False, True]): 75 | 76 | delegating_key_ok = (not sign_delegating_key) or pass_delegating_key 77 | receiving_key_ok = (not sign_receiving_key) or pass_receiving_key 78 | should_verify = delegating_key_ok and receiving_key_ok 79 | 80 | verification_passed = True 81 | try: 82 | kfrag.verify(verifying_pk=verifying_pk, 83 | delegating_pk=delegating_pk if pass_delegating_key else None, 84 | receiving_pk=receiving_pk if pass_receiving_key else None) 85 | except VerificationError: 86 | verification_passed = False 87 | 88 | assert verification_passed == should_verify 89 | 90 | 91 | def test_wrong_threshold(alices_keys, bobs_keys): 92 | delegating_sk, signing_sk = alices_keys 93 | _receiving_sk, receiving_pk = bobs_keys 94 | 95 | with pytest.raises(ValueError): 96 | KeyFragBase(delegating_sk=delegating_sk, 97 | receiving_pk=receiving_pk, 98 | signer=Signer(signing_sk), 99 | threshold=0) 100 | 101 | 102 | def test_kfrag_is_hashable(verification_keys, kfrags): 103 | 104 | verifying_pk, delegating_pk, receiving_pk = verification_keys 105 | 106 | assert hash(kfrags[0]) != hash(kfrags[1]) 107 | 108 | new_kfrag = KeyFrag.from_bytes(bytes(kfrags[0])) 109 | 110 | # Not verified yet 111 | assert hash(new_kfrag) != hash(kfrags[0]) 112 | 113 | verified_kfrag = new_kfrag.verify(verifying_pk=verifying_pk, 114 | delegating_pk=delegating_pk, 115 | receiving_pk=receiving_pk) 116 | 117 | assert hash(verified_kfrag) == hash(kfrags[0]) 118 | 119 | 120 | def test_kfrag_str(kfrags): 121 | s = str(kfrags[0]) 122 | assert "VerifiedKeyFrag" in s 123 | 124 | s = str(KeyFrag.from_bytes(bytes(kfrags[0]))) 125 | assert "VerifiedKeyFrag" not in s 126 | assert "KeyFrag" in s 127 | 128 | 129 | def test_from_verified_bytes(kfrags): 130 | kfrag_bytes = bytes(kfrags[0]) 131 | verified_kfrag = VerifiedKeyFrag.from_verified_bytes(kfrag_bytes) 132 | assert verified_kfrag == kfrags[0] 133 | 134 | 135 | def test_serialized_size(kfrags): 136 | verified_kfrag = kfrags[0] 137 | kfrag = KeyFrag.from_bytes(bytes(verified_kfrag)) 138 | assert verified_kfrag.serialized_size() == kfrag.serialized_size() 139 | -------------------------------------------------------------------------------- /tests/test_keys.py: -------------------------------------------------------------------------------- 1 | import os 2 | import string 3 | 4 | import pytest 5 | 6 | from umbral.keys import PublicKey, SecretKey, SecretKeyFactory 7 | 8 | 9 | def test_gen_key(): 10 | sk = SecretKey.random() 11 | assert type(sk) == SecretKey 12 | 13 | pk = sk.public_key() 14 | assert type(pk) == PublicKey 15 | 16 | pk2 = sk.public_key() 17 | assert pk == pk2 18 | 19 | 20 | def test_secret_scalar(): 21 | sk = SecretKey.random() 22 | assert sk.secret_scalar() == sk._scalar_key 23 | 24 | 25 | def test_derive_key_from_label(): 26 | factory = SecretKeyFactory.random() 27 | 28 | label = b"my_healthcare_information" 29 | 30 | sk1 = factory.make_key(label) 31 | assert type(sk1) == SecretKey 32 | 33 | pk1 = sk1.public_key() 34 | assert type(pk1) == PublicKey 35 | 36 | # Check that key derivation is reproducible 37 | sk2 = factory.make_key(label) 38 | pk2 = sk2.public_key() 39 | assert sk1.to_secret_bytes() == sk2.to_secret_bytes() 40 | assert pk1 == pk2 41 | 42 | # Different labels on the same master secret create different keys 43 | label = b"my_tax_information" 44 | sk3 = factory.make_key(label) 45 | pk3 = sk3.public_key() 46 | assert sk1 != sk3 47 | 48 | 49 | def test_derive_skf_from_label(): 50 | root = SecretKeyFactory.random() 51 | 52 | skf_label = b"Alice" 53 | 54 | skf = root.make_factory(skf_label) 55 | assert type(skf) == SecretKeyFactory 56 | 57 | skf_same = root.make_factory(skf_label) 58 | assert skf.to_secret_bytes() == skf_same.to_secret_bytes() 59 | 60 | # Just in case, check that they produce the same secret keys too. 61 | key_label = b"my_healthcare_information" 62 | key = skf.make_key(key_label) 63 | key_same = skf_same.make_key(key_label) 64 | assert key.to_secret_bytes() == key_same.to_secret_bytes() 65 | 66 | # Different label produces a different factory 67 | skf_different = root.make_factory(b"Bob") 68 | assert skf.to_secret_bytes() != skf_different.to_secret_bytes() 69 | 70 | 71 | def test_from_secure_randomness(): 72 | 73 | seed = os.urandom(SecretKeyFactory.seed_size()) 74 | skf = SecretKeyFactory.from_secure_randomness(seed) 75 | assert type(skf) == SecretKeyFactory 76 | 77 | # Check that it can produce keys 78 | sk = skf.make_key(b"key label") 79 | 80 | # Wrong seed size 81 | 82 | with pytest.raises(ValueError, match=f"Expected {len(seed)} bytes, got {len(seed) + 1}"): 83 | SecretKeyFactory.from_secure_randomness(seed + b'a') 84 | 85 | with pytest.raises(ValueError, match=f"Expected {len(seed)} bytes, got {len(seed) - 1}"): 86 | SecretKeyFactory.from_secure_randomness(seed[:-1]) 87 | 88 | 89 | def test_secret_key_serialization(): 90 | sk = SecretKey.random() 91 | encoded_key = sk.to_secret_bytes() 92 | decoded_key = SecretKey.from_bytes(encoded_key) 93 | assert sk.to_secret_bytes() == decoded_key.to_secret_bytes() 94 | 95 | 96 | def test_secret_key_str(): 97 | sk = SecretKey.random() 98 | s = str(sk) 99 | assert s == "SecretKey:..." 100 | 101 | 102 | def test_secret_key_hash(): 103 | sk = SecretKey.random() 104 | # Insecure Python hash, shouldn't be available. 105 | with pytest.raises(RuntimeError): 106 | hash(sk) 107 | 108 | 109 | def test_secret_key_factory_str(): 110 | skf = SecretKeyFactory.random() 111 | s = str(skf) 112 | assert s == "SecretKeyFactory:..." 113 | 114 | 115 | def test_secret_key_factory_hash(): 116 | skf = SecretKeyFactory.random() 117 | # Insecure Python hash, shouldn't be available. 118 | with pytest.raises(RuntimeError): 119 | hash(skf) 120 | 121 | 122 | def test_public_key_serialization(): 123 | sk = SecretKey.random() 124 | pk = sk.public_key() 125 | 126 | encoded_key = bytes(pk) 127 | decoded_key = PublicKey.from_bytes(encoded_key) 128 | assert pk == decoded_key 129 | 130 | 131 | def test_public_key_point(): 132 | pk = SecretKey.random().public_key() 133 | assert bytes(pk) == bytes(pk.point()) 134 | 135 | 136 | def test_public_key_str(): 137 | pk = SecretKey.random().public_key() 138 | s = str(pk) 139 | assert 'PublicKey' in s 140 | 141 | 142 | def test_secret_key_factory_serialization(): 143 | factory = SecretKeyFactory.random() 144 | 145 | encoded_factory = factory.to_secret_bytes() 146 | decoded_factory = SecretKeyFactory.from_bytes(encoded_factory) 147 | 148 | label = os.urandom(32) 149 | sk1 = factory.make_key(label) 150 | sk2 = decoded_factory.make_key(label) 151 | assert sk1.to_secret_bytes() == sk2.to_secret_bytes() 152 | 153 | 154 | def test_public_key_is_hashable(): 155 | sk = SecretKey.random() 156 | pk = sk.public_key() 157 | 158 | sk2 = SecretKey.random() 159 | pk2 = sk2.public_key() 160 | assert hash(pk) != hash(pk2) 161 | 162 | pk3 = PublicKey.from_bytes(bytes(pk)) 163 | assert hash(pk) == hash(pk3) 164 | -------------------------------------------------------------------------------- /tests/test_pre.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral import ( 4 | SecretKey, 5 | Signer, 6 | KeyFrag, 7 | CapsuleFrag, 8 | encrypt, 9 | generate_kfrags, 10 | decrypt_original, 11 | reencrypt, 12 | decrypt_reencrypted, 13 | ) 14 | 15 | 16 | def test_public_key_encryption(alices_keys): 17 | delegating_sk, _ = alices_keys 18 | delegating_pk = delegating_sk.public_key() 19 | plaintext = b'peace at dawn' 20 | capsule, ciphertext = encrypt(delegating_pk, plaintext) 21 | plaintext_decrypted = decrypt_original(delegating_sk, capsule, ciphertext) 22 | assert plaintext == plaintext_decrypted 23 | 24 | # Wrong secret key 25 | sk = SecretKey.random() 26 | with pytest.raises(ValueError): 27 | decrypt_original(sk, capsule, ciphertext) 28 | 29 | 30 | SIMPLE_API_PARAMETERS = ( 31 | # (shares, threshold) 32 | (1, 1), 33 | (6, 1), 34 | (6, 4), 35 | (6, 6), 36 | (50, 30) 37 | ) 38 | 39 | @pytest.mark.parametrize("shares, threshold", SIMPLE_API_PARAMETERS) 40 | def test_simple_api(shares, threshold): 41 | """ 42 | This test models the main interactions between actors (i.e., Alice, 43 | Bob, Data Source, and Ursulas) and artifacts (i.e., public and private keys, 44 | ciphertexts, capsules, KFrags, CFrags, etc). 45 | 46 | The test covers all the main stages of data sharing: 47 | key generation, delegation, encryption, decryption by 48 | Alice, re-encryption by Ursula, and decryption by Bob. 49 | """ 50 | 51 | # Key Generation (Alice) 52 | delegating_sk = SecretKey.random() 53 | delegating_pk = delegating_sk.public_key() 54 | 55 | signing_sk = SecretKey.random() 56 | signer = Signer(signing_sk) 57 | verifying_pk = signing_sk.public_key() 58 | 59 | # Key Generation (Bob) 60 | receiving_sk = SecretKey.random() 61 | receiving_pk = receiving_sk.public_key() 62 | 63 | # Encryption by an unnamed data source 64 | plaintext = b'peace at dawn' 65 | capsule, ciphertext = encrypt(delegating_pk, plaintext) 66 | 67 | # Decryption by Alice 68 | plaintext_decrypted = decrypt_original(delegating_sk, capsule, ciphertext) 69 | assert plaintext_decrypted == plaintext 70 | 71 | # Split Re-Encryption Key Generation (aka Delegation) 72 | kfrags = generate_kfrags(delegating_sk=delegating_sk, 73 | receiving_pk=receiving_pk, 74 | signer=signer, 75 | threshold=threshold, 76 | shares=shares) 77 | 78 | # Bob requests re-encryption to some set of M ursulas 79 | cfrags = [reencrypt(capsule, kfrag) for kfrag in kfrags] 80 | 81 | # Decryption by Bob 82 | plaintext_reenc = decrypt_reencrypted(receiving_sk=receiving_sk, 83 | delegating_pk=delegating_pk, 84 | capsule=capsule, 85 | verified_cfrags=cfrags[:threshold], 86 | ciphertext=ciphertext, 87 | ) 88 | 89 | assert plaintext_reenc == plaintext 90 | 91 | 92 | def test_reencrypt_unverified_kfrag(capsule, kfrags): 93 | kfrag = KeyFrag.from_bytes(bytes(kfrags[0])) 94 | with pytest.raises(TypeError): 95 | reencrypt(capsule, kfrag) 96 | 97 | 98 | def test_decrypt_unverified_cfrag(verification_keys, bobs_keys, capsule_and_ciphertext, kfrags): 99 | verifying_pk, delegating_pk, receiving_pk = verification_keys 100 | receiving_sk, _receiving_pk = bobs_keys 101 | capsule, ciphertext = capsule_and_ciphertext 102 | 103 | cfrags = [reencrypt(capsule, kfrag) for kfrag in kfrags] 104 | cfrags[0] = CapsuleFrag.from_bytes(bytes(cfrags[0])) 105 | with pytest.raises(TypeError): 106 | plaintext_reenc = decrypt_reencrypted(receiving_sk=receiving_sk, 107 | delegating_pk=delegating_pk, 108 | capsule=capsule, 109 | verified_cfrags=cfrags, 110 | ciphertext=ciphertext, 111 | ) 112 | 113 | 114 | def test_wrong_shares(alices_keys, bobs_keys): 115 | delegating_sk, signing_sk = alices_keys 116 | _receiving_sk, receiving_pk = bobs_keys 117 | 118 | # Trying to create less kfrags than the threshold 119 | with pytest.raises(ValueError): 120 | generate_kfrags(delegating_sk=delegating_sk, 121 | signer=Signer(signing_sk), 122 | receiving_pk=receiving_pk, 123 | threshold=3, 124 | shares=2) 125 | -------------------------------------------------------------------------------- /tests/test_serializable.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import pytest 4 | 5 | from umbral.serializable import Serializable, Deserializable, bool_bytes, bool_from_exact_bytes 6 | 7 | 8 | class A(Serializable, Deserializable): 9 | 10 | def __init__(self, val: int): 11 | assert 0 <= val < 2**32 12 | self.val = val 13 | 14 | @classmethod 15 | def serialized_size(cls): 16 | return 4 17 | 18 | @classmethod 19 | def _from_exact_bytes(cls, data): 20 | return cls(int.from_bytes(data, byteorder='big')) 21 | 22 | def __bytes__(self): 23 | return self.val.to_bytes(self.serialized_size(), byteorder='big') 24 | 25 | def __eq__(self, other): 26 | return isinstance(other, A) and self.val == other.val 27 | 28 | 29 | class B(Serializable, Deserializable): 30 | 31 | def __init__(self, val: int): 32 | assert 0 <= val < 2**16 33 | self.val = val 34 | 35 | @classmethod 36 | def serialized_size(cls): 37 | return 2 38 | 39 | @classmethod 40 | def _from_exact_bytes(cls, data): 41 | return cls(int.from_bytes(data, byteorder='big')) 42 | 43 | def __bytes__(self): 44 | return self.val.to_bytes(self.serialized_size(), byteorder='big') 45 | 46 | def __eq__(self, other): 47 | return isinstance(other, B) and self.val == other.val 48 | 49 | 50 | class C(Serializable, Deserializable): 51 | 52 | def __init__(self, a: A, b: B): 53 | self.a = a 54 | self.b = b 55 | 56 | @classmethod 57 | def serialized_size(cls): 58 | return A.serialized_size() + B.serialized_size() 59 | 60 | @classmethod 61 | def _from_exact_bytes(cls, data): 62 | components = cls._split(data, A, B) 63 | return cls(*components) 64 | 65 | def __bytes__(self): 66 | return bytes(self.a) + bytes(self.b) 67 | 68 | def __eq__(self, other): 69 | return isinstance(other, C) and self.a == other.a and self.b == other.b 70 | 71 | 72 | def test_normal_operation(): 73 | a = A(2**32 - 123) 74 | b = B(2**16 - 456) 75 | c = C(a, b) 76 | c_back = C.from_bytes(bytes(c)) 77 | assert c_back == c 78 | 79 | 80 | def test_too_many_bytes(): 81 | a = A(2**32 - 123) 82 | b = B(2**16 - 456) 83 | c = C(a, b) 84 | with pytest.raises(ValueError, match="Expected 6 bytes, got 7"): 85 | C.from_bytes(bytes(c) + b'\x00') 86 | 87 | 88 | def test_not_enough_bytes(): 89 | a = A(2**32 - 123) 90 | b = B(2**16 - 456) 91 | c = C(a, b) 92 | # Will happen on deserialization of B - 1 byte missing 93 | with pytest.raises(ValueError, match="Expected 6 bytes, got 5"): 94 | C.from_bytes(bytes(c)[:-1]) 95 | 96 | 97 | def test_bool_bytes(): 98 | assert bool_from_exact_bytes(bool_bytes(True)) == True 99 | assert bool_from_exact_bytes(bool_bytes(False)) == False 100 | error_msg = re.escape("Incorrectly serialized boolean; expected b'\\x00' or b'\\x01', got b'z'") 101 | with pytest.raises(ValueError, match=error_msg): 102 | bool_from_exact_bytes(b'z') 103 | 104 | 105 | def test_split_bool(): 106 | a = A(2**32 - 123) 107 | b = True 108 | data = bytes(a) + bool_bytes(b) 109 | a_back, b_back = Deserializable._split(data, A, bool) 110 | assert a_back == a 111 | assert b_back == b 112 | -------------------------------------------------------------------------------- /tests/test_signing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from umbral.keys import PublicKey, SecretKey 4 | from umbral.signing import Signature, Signer 5 | from umbral.hashing import Hash 6 | 7 | 8 | @pytest.mark.parametrize('execution_number', range(20)) # Run this test 20 times. 9 | def test_sign_and_verify(execution_number): 10 | sk = SecretKey.random() 11 | pk = sk.public_key() 12 | signer = Signer(sk) 13 | 14 | message = b"peace at dawn" + str(execution_number).encode() 15 | 16 | signature = signer.sign(message) 17 | assert signature.verify(pk, message) 18 | 19 | 20 | @pytest.mark.parametrize('execution_number', range(20)) # Run this test 20 times. 21 | def test_sign_serialize_and_verify(execution_number): 22 | sk = SecretKey.random() 23 | pk = sk.public_key() 24 | signer = Signer(sk) 25 | 26 | message = b"peace at dawn" + str(execution_number).encode() 27 | 28 | signature = signer.sign(message) 29 | 30 | signature_bytes = bytes(signature) 31 | signature_restored = Signature.from_bytes(signature_bytes) 32 | 33 | assert signature_restored.verify(pk, message) 34 | 35 | 36 | def test_verification_fail(): 37 | sk = SecretKey.random() 38 | pk = sk.public_key() 39 | signer = Signer(sk) 40 | 41 | message = b"peace at dawn" 42 | signature = signer.sign(message) 43 | 44 | # wrong message 45 | wrong_message = b"no peace at dawn" 46 | assert not signature.verify(pk, wrong_message) 47 | 48 | # bad signature 49 | signature_bytes = bytes(signature) 50 | signature_bytes = b'\x00' + signature_bytes[1:] 51 | signature_restored = Signature.from_bytes(signature_bytes) 52 | 53 | assert not signature_restored.verify(pk, message) 54 | 55 | 56 | def test_signature_str(): 57 | sk = SecretKey.random() 58 | pk = sk.public_key() 59 | signer = Signer(sk) 60 | signature = signer.sign(b'peace at dawn') 61 | s = str(signature) 62 | assert 'Signature' in s 63 | 64 | 65 | def test_signature_is_hashable(): 66 | sk = SecretKey.random() 67 | pk = sk.public_key() 68 | signer = Signer(sk) 69 | 70 | message = b'peace at dawn' 71 | message2 = b'no peace at dawn' 72 | 73 | signature = signer.sign(message) 74 | signature2 = signer.sign(message2) 75 | 76 | assert hash(signature) != hash(signature2) 77 | 78 | signature_restored = Signature.from_bytes(bytes(signature)) 79 | assert signature == signature_restored 80 | assert hash(signature) == hash(signature_restored) 81 | 82 | # Different hash, since signing involves some randomness 83 | signature3 = signer.sign(message) 84 | assert hash(signature) != hash(signature3) 85 | 86 | 87 | def test_signer_str(): 88 | signer = Signer(SecretKey.random()) 89 | s = str(signer) 90 | assert s == "Signer:..." 91 | 92 | 93 | def test_signer_hash(): 94 | signer = Signer(SecretKey.random()) 95 | # Insecure Python hash, shouldn't be available. 96 | with pytest.raises(RuntimeError): 97 | hash(signer) 98 | 99 | 100 | def test_signer_bytes(): 101 | signer = Signer(SecretKey.random()) 102 | # Shouldn't be able to serialize. 103 | with pytest.raises(RuntimeError): 104 | bytes(signer) 105 | 106 | 107 | def test_signer_pubkey(): 108 | sk = SecretKey.random() 109 | pk = sk.public_key() 110 | signer = Signer(sk) 111 | assert signer.verifying_key() == pk 112 | -------------------------------------------------------------------------------- /tests/test_vectors.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from umbral import ( 5 | Capsule, KeyFrag, CapsuleFrag, SecretKey, PublicKey, encrypt, generate_kfrags, reencrypt) 6 | from umbral.curve_scalar import CurveScalar 7 | from umbral.curve_point import CurvePoint 8 | from umbral.hashing import Hash, unsafe_hash_to_point 9 | from umbral.dem import DEM, kdf 10 | 11 | 12 | def test_scalar_operations(): 13 | 14 | vector_file = os.path.join('vectors', 'vectors_scalar_operations.json') 15 | try: 16 | with open(vector_file) as f: 17 | vector_suite = json.load(f) 18 | except OSError: 19 | raise 20 | 21 | bn1 = CurveScalar.from_bytes(bytes.fromhex(vector_suite['first operand'])) 22 | bn2 = CurveScalar.from_bytes(bytes.fromhex(vector_suite['second operand'])) 23 | 24 | expected = dict() 25 | for op_result in vector_suite['vectors']: 26 | result = bytes.fromhex(op_result['result']) 27 | expected[op_result['operation']] = CurveScalar.from_bytes(result) 28 | 29 | test = [('Addition', bn1 + bn2), 30 | ('Subtraction', bn1 - bn2), 31 | ('Multiplication', bn1 * bn2), 32 | ('Inverse', bn1.invert()), 33 | ] 34 | 35 | for (operation, result) in test: 36 | assert result == expected[operation], 'Error in {}'.format(operation) 37 | 38 | def test_scalar_hash(): 39 | 40 | vector_file = os.path.join('vectors', 'vectors_scalar_from_digest.json') 41 | try: 42 | with open(vector_file) as f: 43 | vector_suite = json.load(f) 44 | except OSError: 45 | raise 46 | 47 | for vector in vector_suite['vectors']: 48 | hash_input = [bytes.fromhex(item['bytes']) for item in vector['input']] 49 | expected = CurveScalar.from_bytes(bytes.fromhex(vector['output'])) 50 | 51 | digest = Hash(b'some_dst') 52 | for input_ in hash_input: 53 | digest.update(input_) 54 | scalar = CurveScalar.from_digest(digest) 55 | assert scalar == expected 56 | 57 | 58 | def test_point_operations(): 59 | 60 | vector_file = os.path.join('vectors', 'vectors_point_operations.json') 61 | try: 62 | with open(vector_file) as f: 63 | vector_suite = json.load(f) 64 | except OSError: 65 | raise 66 | 67 | point1 = CurvePoint.from_bytes(bytes.fromhex(vector_suite['first CurvePoint operand'])) 68 | point2 = CurvePoint.from_bytes(bytes.fromhex(vector_suite['second CurvePoint operand'])) 69 | bn1 = CurveScalar.from_bytes(bytes.fromhex(vector_suite['CurveScalar operand'])) 70 | 71 | expected = dict() 72 | for op_result in vector_suite['vectors']: 73 | expected[op_result['operation']] = bytes.fromhex(op_result['result']) 74 | 75 | test = [('Addition', point1 + point2), 76 | ('Subtraction', point1 - point2), 77 | ('Multiplication', point1 * bn1), 78 | ('Inversion', -point1), 79 | ] 80 | 81 | for (operation, result) in test: 82 | assert result == CurvePoint.from_bytes(expected[operation]), 'Error in {}'.format(operation) 83 | 84 | test = [('To_affine.X', point1.to_affine()[0]), 85 | ('To_affine.Y', point1.to_affine()[1]), 86 | ] 87 | 88 | for (operation, result) in test: 89 | assert result == int.from_bytes(expected[operation], 'big'), 'Error in {}'.format(operation) 90 | 91 | assert kdf(bytes(point1), DEM.KEY_SIZE) == expected['kdf'] 92 | 93 | 94 | def test_unsafe_hash_to_point(): 95 | 96 | vector_file = os.path.join('vectors', 'vectors_unsafe_hash_to_point.json') 97 | try: 98 | with open(vector_file) as f: 99 | vector_suite = json.load(f) 100 | except OSError: 101 | raise 102 | 103 | for item in vector_suite['vectors']: 104 | data = bytes.fromhex(item['data']) 105 | dst = bytes.fromhex(item['dst']) 106 | expected = CurvePoint.from_bytes(bytes.fromhex(item['point'])) 107 | assert expected == unsafe_hash_to_point(dst=dst, data=data) 108 | 109 | 110 | def test_kfrags(): 111 | 112 | vector_file = os.path.join('vectors', 'vectors_kfrags.json') 113 | try: 114 | with open(vector_file) as f: 115 | vector_suite = json.load(f) 116 | except OSError: 117 | raise 118 | 119 | verifying_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['verifying_pk'])) 120 | delegating_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['delegating_pk'])) 121 | receiving_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['receiving_pk'])) 122 | 123 | for json_kfrag in vector_suite['vectors']: 124 | kfrag = KeyFrag.from_bytes(bytes.fromhex(json_kfrag['kfrag'])) 125 | assert kfrag.verify(verifying_pk=verifying_pk, 126 | delegating_pk=delegating_pk, 127 | receiving_pk=receiving_pk), \ 128 | 'Invalid KeyFrag {}'.format(bytes(kfrag).hex()) 129 | 130 | 131 | def test_cfrags(): 132 | 133 | vector_file = os.path.join('vectors', 'vectors_cfrags.json') 134 | try: 135 | with open(vector_file) as f: 136 | vector_suite = json.load(f) 137 | except OSError: 138 | raise 139 | 140 | capsule = Capsule.from_bytes(bytes.fromhex(vector_suite['capsule'])) 141 | 142 | verifying_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['verifying_pk'])) 143 | delegating_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['delegating_pk'])) 144 | receiving_pk = PublicKey.from_bytes(bytes.fromhex(vector_suite['receiving_pk'])) 145 | 146 | kfrags_n_cfrags = [(KeyFrag.from_bytes(bytes.fromhex(json_kfrag['kfrag'])), 147 | CapsuleFrag.from_bytes(bytes.fromhex(json_kfrag['cfrag']))) 148 | for json_kfrag in vector_suite['vectors']] 149 | 150 | for kfrag, cfrag in kfrags_n_cfrags: 151 | verified_kfrag = kfrag.verify(verifying_pk=verifying_pk, 152 | delegating_pk=delegating_pk, 153 | receiving_pk=receiving_pk) 154 | 155 | new_cfrag = reencrypt(capsule, verified_kfrag).cfrag 156 | assert new_cfrag.point_e1 == cfrag.point_e1 157 | assert new_cfrag.point_v1 == cfrag.point_v1 158 | assert new_cfrag.kfrag_id == cfrag.kfrag_id 159 | assert new_cfrag.precursor == cfrag.precursor 160 | new_cfrag.verify(capsule, 161 | verifying_pk=verifying_pk, 162 | delegating_pk=delegating_pk, 163 | receiving_pk=receiving_pk, 164 | ) 165 | -------------------------------------------------------------------------------- /umbral/__about__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, print_function 2 | 3 | __all__ = [ 4 | "__title__", "__summary__", "__version__", "__author__", 5 | "__email__", "__license__", "__copyright__", "__url__" 6 | ] 7 | 8 | __title__ = "umbral" 9 | 10 | __url__ = "https://github.com/nucypher/pyUmbral" 11 | 12 | __summary__ = 'NuCypher\'s Umbral Proxy Re-Encryption Implementation' 13 | 14 | __version__ = "0.3.0" 15 | 16 | __author__ = "NuCypher" 17 | 18 | __email__ = "dev@nucypher.com" 19 | 20 | __license__ = "GNU General Public License, Version 3" 21 | 22 | __copyright__ = 'Copyright (C) 2019 NuCypher' 23 | -------------------------------------------------------------------------------- /umbral/__init__.py: -------------------------------------------------------------------------------- 1 | from .__about__ import ( 2 | __author__, __license__, __summary__, __title__, __version__, __copyright__, __email__, __url__ 3 | ) 4 | 5 | from .capsule import Capsule 6 | from .capsule_frag import CapsuleFrag, VerifiedCapsuleFrag 7 | from .errors import VerificationError 8 | from .key_frag import KeyFrag, VerifiedKeyFrag 9 | from .keys import SecretKey, PublicKey, SecretKeyFactory 10 | from .pre import encrypt, decrypt_original, decrypt_reencrypted, reencrypt, generate_kfrags 11 | from .signing import Signature, Signer 12 | 13 | __all__ = [ 14 | "SecretKey", 15 | "PublicKey", 16 | "SecretKeyFactory", 17 | "Signature", 18 | "Signer", 19 | "Capsule", 20 | "KeyFrag", 21 | "VerifiedKeyFrag", 22 | "CapsuleFrag", 23 | "VerifiedCapsuleFrag", 24 | "VerificationError", 25 | "encrypt", 26 | "decrypt_original", 27 | "generate_kfrags", 28 | "reencrypt", 29 | "decrypt_reencrypted", 30 | ] 31 | -------------------------------------------------------------------------------- /umbral/capsule.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Tuple, Sequence 2 | 3 | from .curve_point import CurvePoint 4 | from .curve_scalar import CurveScalar 5 | from .hashing import hash_capsule_points, hash_to_polynomial_arg, hash_to_shared_secret 6 | from .keys import PublicKey, SecretKey 7 | from .serializable import Serializable, Deserializable 8 | if TYPE_CHECKING: # pragma: no cover 9 | from .capsule_frag import CapsuleFrag 10 | 11 | 12 | def lambda_coeff(xs: Sequence[CurveScalar], i: int) -> CurveScalar: 13 | res = CurveScalar.one() 14 | for j, xs_j in enumerate(xs): 15 | if j != i: 16 | inv_diff = (xs_j - xs[i]).invert() 17 | res = (res * xs_j) * inv_diff 18 | return res 19 | 20 | 21 | class Capsule(Serializable, Deserializable): 22 | """ 23 | Encapsulated symmetric key. 24 | """ 25 | 26 | def __init__(self, point_e: CurvePoint, point_v: CurvePoint, signature: CurveScalar): 27 | self.point_e = point_e 28 | self.point_v = point_v 29 | self.signature = signature 30 | 31 | _COMPONENT_TYPES = CurvePoint, CurvePoint, CurveScalar 32 | _SERIALIZED_SIZE = sum(tp.serialized_size() for tp in _COMPONENT_TYPES) 33 | 34 | @classmethod 35 | def serialized_size(cls): 36 | return cls._SERIALIZED_SIZE 37 | 38 | @classmethod 39 | def _from_exact_bytes(cls, data: bytes): 40 | capsule = cls(*cls._split(data, *cls._COMPONENT_TYPES)) 41 | if not capsule._verify(): 42 | raise ValueError("Capsule self-verification failed. Serialized data may be damaged.") 43 | return capsule 44 | 45 | def __bytes__(self): 46 | return bytes(self.point_e) + bytes(self.point_v) + bytes(self.signature) 47 | 48 | @classmethod 49 | def from_public_key(cls, delegating_pk: PublicKey) -> Tuple['Capsule', CurvePoint]: 50 | g = CurvePoint.generator() 51 | 52 | priv_r = CurveScalar.random_nonzero() 53 | pub_r = g * priv_r 54 | 55 | priv_u = CurveScalar.random_nonzero() 56 | pub_u = g * priv_u 57 | 58 | h = hash_capsule_points(pub_r, pub_u) 59 | s = priv_u + (priv_r * h) 60 | 61 | shared_key = delegating_pk._point_key * (priv_r + priv_u) 62 | 63 | return cls(point_e=pub_r, point_v=pub_u, signature=s), shared_key 64 | 65 | def open_original(self, delegating_sk: SecretKey) -> CurvePoint: 66 | return (self.point_e + self.point_v) * delegating_sk.secret_scalar() 67 | 68 | def open_reencrypted(self, 69 | receiving_sk: SecretKey, 70 | delegating_pk: PublicKey, 71 | cfrags: Sequence['CapsuleFrag'], 72 | ) -> CurvePoint: 73 | 74 | if len(cfrags) == 0: 75 | raise ValueError("Empty CapsuleFrag sequence") 76 | 77 | precursor = cfrags[0].precursor 78 | 79 | if len(set(cfrags)) != len(cfrags): 80 | raise ValueError("Some of the CapsuleFrags are repeated") 81 | 82 | if not all(cfrag.precursor == precursor for cfrag in cfrags[1:]): 83 | raise ValueError("CapsuleFrags are not pairwise consistent") 84 | 85 | pub_key = receiving_sk.public_key().point() 86 | dh_point = precursor * receiving_sk.secret_scalar() 87 | 88 | # Combination of CFrags via Shamir's Secret Sharing reconstruction 89 | lc = [hash_to_polynomial_arg(precursor, pub_key, dh_point, cfrag.kfrag_id) 90 | for cfrag in cfrags] 91 | 92 | e_primes = [] 93 | v_primes = [] 94 | for i, cfrag in enumerate(cfrags): 95 | lambda_i = lambda_coeff(lc, i) 96 | e_primes.append(cfrag.point_e1 * lambda_i) 97 | v_primes.append(cfrag.point_v1 * lambda_i) 98 | e_prime = sum(e_primes[1:], e_primes[0]) 99 | v_prime = sum(v_primes[1:], v_primes[0]) 100 | 101 | # Secret value 'd' allows to make Umbral non-interactive 102 | d = hash_to_shared_secret(precursor, pub_key, dh_point) 103 | 104 | s = self.signature 105 | h = hash_capsule_points(self.point_e, self.point_v) 106 | 107 | orig_pub_key = delegating_pk.point() 108 | 109 | # TODO: check for d == 0? Or just let if fail? 110 | inv_d = d.invert() 111 | if orig_pub_key * (s * inv_d) != (e_prime * h) + v_prime: 112 | raise ValueError("Internal validation failed") 113 | 114 | return (e_prime + v_prime) * d 115 | 116 | def _components(self): 117 | return (self.point_e, self.point_v, self.signature) 118 | 119 | def _verify(self) -> bool: 120 | g = CurvePoint.generator() 121 | e, v, s = self._components() 122 | h = hash_capsule_points(e, v) 123 | return g * s == v + (e * h) 124 | 125 | def __eq__(self, other): 126 | return self._components() == other._components() 127 | 128 | def __hash__(self): 129 | return hash((self.__class__, bytes(self))) 130 | 131 | def __str__(self): 132 | return f"{self.__class__.__name__}:{bytes(self).hex()[:16]}" 133 | -------------------------------------------------------------------------------- /umbral/capsule_frag.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Tuple, Type 2 | 3 | from .capsule import Capsule 4 | from .curve_point import CurvePoint 5 | from .curve_scalar import CurveScalar 6 | from .errors import VerificationError 7 | from .hashing import hash_to_cfrag_verification, kfrag_signature_message 8 | from .keys import PublicKey 9 | from .key_frag import KeyFrag, KeyFragID 10 | from .params import PARAMETERS 11 | from .serializable import Serializable, Deserializable, HasSerializedSize 12 | from .signing import Signature 13 | 14 | 15 | class CapsuleFragProof(Serializable, Deserializable): 16 | 17 | def __init__(self, 18 | point_e2: CurvePoint, 19 | point_v2: CurvePoint, 20 | kfrag_commitment: CurvePoint, 21 | kfrag_pok: CurvePoint, 22 | signature: CurveScalar, 23 | kfrag_signature: Signature, 24 | ): 25 | 26 | self.point_e2 = point_e2 27 | self.point_v2 = point_v2 28 | self.kfrag_commitment = kfrag_commitment 29 | self.kfrag_pok = kfrag_pok 30 | self.signature = signature 31 | self.kfrag_signature = kfrag_signature 32 | 33 | def _components(self): 34 | return (self.point_e2, self.point_v2, self.kfrag_commitment, 35 | self.kfrag_pok, self.signature, self.kfrag_signature) 36 | 37 | _COMPONENT_TYPES: Tuple[Type[HasSerializedSize], ...] = ( 38 | CurvePoint, CurvePoint, CurvePoint, CurvePoint, CurveScalar, Signature) 39 | _SERIALIZED_SIZE = sum(tp.serialized_size() for tp in _COMPONENT_TYPES) 40 | 41 | def __eq__(self, other): 42 | return self._components() == other._components() 43 | 44 | @classmethod 45 | def serialized_size(cls): 46 | return cls._SERIALIZED_SIZE 47 | 48 | @classmethod 49 | def _from_exact_bytes(cls, data): 50 | return cls(*cls._split(data, *cls._COMPONENT_TYPES)) 51 | 52 | def __bytes__(self): 53 | return b''.join(bytes(comp) for comp in self._components()) 54 | 55 | @classmethod 56 | def from_kfrag_and_cfrag(cls, 57 | capsule: Capsule, 58 | kfrag: KeyFrag, 59 | cfrag_e1: CurvePoint, 60 | cfrag_v1: CurvePoint, 61 | ) -> 'CapsuleFragProof': 62 | 63 | params = PARAMETERS 64 | 65 | rk = kfrag.key 66 | t = CurveScalar.random_nonzero() 67 | 68 | # Here are the formulaic constituents shared with `CapsuleFrag.verify()`. 69 | 70 | e = capsule.point_e 71 | v = capsule.point_v 72 | 73 | e1 = cfrag_e1 74 | v1 = cfrag_v1 75 | 76 | u = params.u 77 | u1 = kfrag.proof.commitment 78 | 79 | e2 = e * t 80 | v2 = v * t 81 | u2 = u * t 82 | 83 | h = hash_to_cfrag_verification([e, e1, e2, v, v1, v2, u, u1, u2]) 84 | 85 | ### 86 | 87 | z3 = t + rk * h 88 | 89 | return cls(point_e2=e2, 90 | point_v2=v2, 91 | kfrag_commitment=u1, 92 | kfrag_pok=u2, 93 | signature=z3, 94 | kfrag_signature=kfrag.proof.signature_for_receiver, 95 | ) 96 | 97 | 98 | class CapsuleFrag(Serializable, Deserializable): 99 | """ 100 | Re-encrypted fragment of :py:class:`Capsule`. 101 | """ 102 | 103 | def __init__(self, 104 | point_e1: CurvePoint, 105 | point_v1: CurvePoint, 106 | kfrag_id: KeyFragID, 107 | precursor: CurvePoint, 108 | proof: CapsuleFragProof, 109 | ): 110 | 111 | self.point_e1 = point_e1 112 | self.point_v1 = point_v1 113 | self.kfrag_id = kfrag_id 114 | self.precursor = precursor 115 | self.proof = proof 116 | 117 | def _components(self): 118 | return (self.point_e1, self.point_v1, self.kfrag_id, self.precursor, self.proof) 119 | 120 | _COMPONENT_TYPES: Tuple[Type[HasSerializedSize], ...] = ( 121 | CurvePoint, CurvePoint, KeyFragID, CurvePoint, CapsuleFragProof) 122 | _SERIALIZED_SIZE = sum(tp.serialized_size() for tp in _COMPONENT_TYPES) 123 | 124 | def __eq__(self, other): 125 | return self._components() == other._components() 126 | 127 | def __hash__(self): 128 | return hash((self.__class__, bytes(self))) 129 | 130 | def __str__(self): 131 | return f"{self.__class__.__name__}:{bytes(self).hex()[:16]}" 132 | 133 | @classmethod 134 | def serialized_size(cls): 135 | return cls._SERIALIZED_SIZE 136 | 137 | @classmethod 138 | def _from_exact_bytes(cls, data): 139 | return cls(*cls._split(data, *cls._COMPONENT_TYPES)) 140 | 141 | def __bytes__(self): 142 | return b''.join(bytes(comp) for comp in self._components()) 143 | 144 | @classmethod 145 | def reencrypted(cls, capsule: Capsule, kfrag: KeyFrag) -> 'CapsuleFrag': 146 | rk = kfrag.key 147 | e1 = capsule.point_e * rk 148 | v1 = capsule.point_v * rk 149 | proof = CapsuleFragProof.from_kfrag_and_cfrag(capsule, kfrag, e1, v1) 150 | 151 | return cls(point_e1=e1, 152 | point_v1=v1, 153 | kfrag_id=kfrag.id, 154 | precursor=kfrag.precursor, 155 | proof=proof, 156 | ) 157 | 158 | def verify(self, 159 | capsule: Capsule, 160 | verifying_pk: PublicKey, 161 | delegating_pk: PublicKey, 162 | receiving_pk: PublicKey, 163 | ) -> 'VerifiedCapsuleFrag': 164 | """ 165 | Verifies the validity of this fragment. 166 | """ 167 | 168 | params = PARAMETERS 169 | 170 | # Here are the formulaic constituents shared with 171 | # `CapsuleFragProof.from_kfrag_and_cfrag`. 172 | 173 | e = capsule.point_e 174 | v = capsule.point_v 175 | 176 | e1 = self.point_e1 177 | v1 = self.point_v1 178 | 179 | u = params.u 180 | u1 = self.proof.kfrag_commitment 181 | 182 | e2 = self.proof.point_e2 183 | v2 = self.proof.point_v2 184 | u2 = self.proof.kfrag_pok 185 | 186 | h = hash_to_cfrag_verification([e, e1, e2, v, v1, v2, u, u1, u2]) 187 | 188 | ### 189 | 190 | precursor = self.precursor 191 | kfrag_id = self.kfrag_id 192 | 193 | kfrag_message = kfrag_signature_message(kfrag_id=self.kfrag_id, 194 | commitment=self.proof.kfrag_commitment, 195 | precursor=self.precursor, 196 | maybe_delegating_pk=delegating_pk, 197 | maybe_receiving_pk=receiving_pk) 198 | 199 | if not self.proof.kfrag_signature.verify(verifying_pk, kfrag_message): 200 | raise VerificationError("Invalid KeyFrag signature") 201 | 202 | z = self.proof.signature 203 | 204 | # TODO: if one or more of the values here are incorrect, 205 | # we'll get the wrong `h` (since they're all hashed into it), 206 | # so perhaps it's enough to check only one of these equations. 207 | # See https://github.com/nucypher/rust-umbral/issues/46 for details. 208 | correct_reencryption_of_e = e * z == e2 + e1 * h 209 | correct_reencryption_of_v = v * z == v2 + v1 * h 210 | correct_rk_commitment = u * z == u2 + u1 * h 211 | 212 | if not (correct_reencryption_of_e and correct_reencryption_of_v and correct_rk_commitment): 213 | raise VerificationError("Failed to verify reencryption proof") 214 | 215 | return VerifiedCapsuleFrag(self) 216 | 217 | 218 | class VerifiedCapsuleFrag(Serializable): 219 | """ 220 | Verified capsule frag, good for decryption. 221 | Can be cast to ``bytes``, but cannot be deserialized from bytes directly. 222 | It can only be obtained from :py:meth:`CapsuleFrag.verify`. 223 | """ 224 | 225 | def __init__(self, cfrag: CapsuleFrag): 226 | self.cfrag = cfrag 227 | 228 | def __bytes__(self): 229 | return bytes(self.cfrag) 230 | 231 | @classmethod 232 | def serialized_size(cls): 233 | return CapsuleFrag.serialized_size() 234 | 235 | @classmethod 236 | def from_verified_bytes(cls, data) -> 'VerifiedCapsuleFrag': 237 | """ 238 | Restores a verified capsule frag directly from serialized bytes, 239 | skipping :py:meth:`CapsuleFrag.verify` call. 240 | 241 | Intended for internal storage; 242 | make sure that the bytes come from a trusted source. 243 | """ 244 | cfrag = CapsuleFrag.from_bytes(data) 245 | return cls(cfrag) 246 | 247 | def __eq__(self, other): 248 | return self.cfrag == other.cfrag 249 | 250 | def __hash__(self): 251 | return hash((self.__class__, bytes(self))) 252 | 253 | def __str__(self): 254 | return f"{self.__class__.__name__}:{bytes(self).hex()[:16]}" 255 | -------------------------------------------------------------------------------- /umbral/curve.py: -------------------------------------------------------------------------------- 1 | from . import openssl 2 | 3 | SECP256K1 = openssl.Curve.from_name('secp256k1') 4 | 5 | CURVE = SECP256K1 6 | -------------------------------------------------------------------------------- /umbral/curve_point.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | from . import openssl 4 | from .curve import CURVE 5 | from .curve_scalar import CurveScalar 6 | from .serializable import Serializable, Deserializable 7 | 8 | 9 | class CurvePoint(Serializable, Deserializable): 10 | """ 11 | Represents an OpenSSL EC_POINT except more Pythonic. 12 | """ 13 | 14 | def __init__(self, backend_point) -> None: 15 | self._backend_point = backend_point 16 | 17 | @classmethod 18 | def generator(cls) -> 'CurvePoint': 19 | return cls(CURVE.point_generator) 20 | 21 | @classmethod 22 | def random(cls) -> 'CurvePoint': 23 | """ 24 | Returns a CurvePoint object with a cryptographically secure EC_POINT based 25 | on the provided curve. 26 | """ 27 | return cls.generator() * CurveScalar.random_nonzero() 28 | 29 | def to_affine(self) -> Tuple[int, int]: 30 | """ 31 | Returns a tuple of Python ints in the format of (x, y) that represents 32 | the point in the curve. 33 | """ 34 | return openssl.point_to_affine_coords(CURVE, self._backend_point) 35 | 36 | @classmethod 37 | def serialized_size(cls): 38 | return CURVE.field_element_size + 1 # compressed point size 39 | 40 | @classmethod 41 | def _from_exact_bytes(cls, data: bytes): 42 | """ 43 | Returns a CurvePoint object from the given byte data on the curve provided. 44 | """ 45 | return cls(openssl.point_from_bytes(CURVE, data)) 46 | 47 | def __bytes__(self) -> bytes: 48 | """ 49 | Returns the CurvePoint serialized as bytes in the compressed form. 50 | """ 51 | return openssl.point_to_bytes_compressed(CURVE, self._backend_point) 52 | 53 | def __eq__(self, other): 54 | """ 55 | Compares two EC_POINTS for equality. 56 | """ 57 | return openssl.point_eq(CURVE, self._backend_point, other._backend_point) 58 | 59 | def __mul__(self, other: CurveScalar) -> 'CurvePoint': 60 | """ 61 | Performs an EC_POINT_mul on an EC_POINT and a BIGNUM. 62 | """ 63 | return CurvePoint(openssl.point_mul_bn(CURVE, self._backend_point, other._backend_bignum)) 64 | 65 | def __add__(self, other: 'CurvePoint') -> 'CurvePoint': 66 | """ 67 | Performs an EC_POINT_add on two EC_POINTS. 68 | """ 69 | return CurvePoint(openssl.point_add(CURVE, self._backend_point, other._backend_point)) 70 | 71 | def __sub__(self, other: 'CurvePoint') -> 'CurvePoint': 72 | """ 73 | Performs subtraction by adding the inverse of the `other` to the point. 74 | """ 75 | return self + (-other) 76 | 77 | def __neg__(self) -> 'CurvePoint': 78 | """ 79 | Computes the additive inverse of a CurvePoint, by performing an 80 | EC_POINT_invert on itself. 81 | """ 82 | return CurvePoint(openssl.point_neg(CURVE, self._backend_point)) 83 | -------------------------------------------------------------------------------- /umbral/curve_scalar.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Union, Tuple 2 | 3 | from . import openssl 4 | from .curve import CURVE 5 | from .serializable import Serializable, Deserializable 6 | if TYPE_CHECKING: # pragma: no cover 7 | from .hashing import Hash 8 | 9 | 10 | class CurveScalar(Serializable, Deserializable): 11 | """ 12 | Represents an OpenSSL Bignum modulo the order of a curve. Some of these 13 | operations will only work with prime numbers. 14 | 15 | By default, the underlying OpenSSL BIGNUM has BN_FLG_CONSTTIME set for 16 | constant time operations. 17 | """ 18 | 19 | def __init__(self, backend_bignum): 20 | self._backend_bignum = backend_bignum 21 | 22 | @classmethod 23 | def random_nonzero(cls) -> 'CurveScalar': 24 | """ 25 | Returns a CurveScalar object with a cryptographically secure OpenSSL BIGNUM. 26 | """ 27 | return cls(openssl.bn_random_nonzero(CURVE.bn_order)) 28 | 29 | @classmethod 30 | def from_int(cls, num: int, check_normalization: bool = True) -> 'CurveScalar': 31 | """ 32 | Returns a CurveScalar object from a given integer on a curve. 33 | """ 34 | modulus = CURVE.bn_order if check_normalization else None 35 | conv_bn = openssl.bn_from_int(num, check_modulus=modulus) 36 | return cls(conv_bn) 37 | 38 | @classmethod 39 | def from_digest(cls, digest: 'Hash') -> 'CurveScalar': 40 | # TODO (#39): this is used in Umbral scheme itself, 41 | # and needs to be able to return a guaranteed nonzero scalar. 42 | # Currently just matching what we have in rust-umbral 43 | # (taking bytes modulo curve order). 44 | # Can produce zeros! 45 | return cls(openssl.bn_from_bytes(digest.finalize(), apply_modulus=CURVE.bn_order)) 46 | 47 | @classmethod 48 | def serialized_size(cls): 49 | return CURVE.scalar_size 50 | 51 | @classmethod 52 | def _from_exact_bytes(cls, data: bytes): 53 | return cls(openssl.bn_from_bytes(data, check_modulus=CURVE.bn_order)) 54 | 55 | def __bytes__(self) -> bytes: 56 | """ 57 | Returns the CurveScalar as bytes. 58 | """ 59 | return openssl.bn_to_bytes(self._backend_bignum, CURVE.scalar_size) 60 | 61 | def __int__(self) -> int: 62 | """ 63 | Converts the CurveScalar to a Python int. 64 | """ 65 | return openssl.bn_to_int(self._backend_bignum) 66 | 67 | def __eq__(self, other) -> bool: 68 | """ 69 | Compares the two BIGNUMS or int. 70 | """ 71 | if isinstance(other, int): 72 | other = CurveScalar.from_int(other) 73 | return openssl.bn_cmp(self._backend_bignum, other._backend_bignum) == 0 74 | 75 | @classmethod 76 | def one(cls): 77 | return cls(openssl.bn_one()) 78 | 79 | def is_zero(self): 80 | return openssl.bn_is_zero(self._backend_bignum) 81 | 82 | def __mul__(self, other: Union[int, 'CurveScalar']) -> 'CurveScalar': 83 | """ 84 | Performs a BN_mod_mul between two BIGNUMS. 85 | """ 86 | if isinstance(other, int): 87 | other = CurveScalar.from_int(other) 88 | return CurveScalar(openssl.bn_mul(self._backend_bignum, 89 | other._backend_bignum, 90 | CURVE.bn_order)) 91 | 92 | def __add__(self, other : Union[int, 'CurveScalar']) -> 'CurveScalar': 93 | """ 94 | Performs a BN_mod_add on two BIGNUMs. 95 | """ 96 | if isinstance(other, int): 97 | other = CurveScalar.from_int(other) 98 | return CurveScalar(openssl.bn_add(self._backend_bignum, 99 | other._backend_bignum, 100 | CURVE.bn_order)) 101 | 102 | def __sub__(self, other : Union[int, 'CurveScalar']) -> 'CurveScalar': 103 | """ 104 | Performs a BN_mod_sub on two BIGNUMS. 105 | """ 106 | if isinstance(other, int): 107 | other = CurveScalar.from_int(other) 108 | return CurveScalar(openssl.bn_sub(self._backend_bignum, 109 | other._backend_bignum, 110 | CURVE.bn_order)) 111 | 112 | def invert(self) -> 'CurveScalar': 113 | """ 114 | Performs a BN_mod_inverse. 115 | WARNING: Only in constant time if BN_FLG_CONSTTIME is set on the BN. 116 | """ 117 | return CurveScalar(openssl.bn_invert(self._backend_bignum, CURVE.bn_order)) 118 | -------------------------------------------------------------------------------- /umbral/dem.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Optional 3 | 4 | from cryptography.hazmat.primitives.kdf.hkdf import HKDF 5 | from cryptography.hazmat.primitives import hashes 6 | 7 | import nacl 8 | from nacl.bindings.crypto_aead import ( 9 | crypto_aead_xchacha20poly1305_ietf_encrypt as xchacha_encrypt, 10 | crypto_aead_xchacha20poly1305_ietf_decrypt as xchacha_decrypt, 11 | crypto_aead_xchacha20poly1305_ietf_KEYBYTES as XCHACHA_KEY_SIZE, 12 | crypto_aead_xchacha20poly1305_ietf_NPUBBYTES as XCHACHA_NONCE_SIZE, 13 | crypto_aead_xchacha20poly1305_ietf_ABYTES as XCHACHA_TAG_SIZE, 14 | ) 15 | 16 | from . import openssl 17 | 18 | 19 | def kdf(data: bytes, 20 | key_length: int, 21 | salt: Optional[bytes] = None, 22 | info: Optional[bytes] = None, 23 | ) -> bytes: 24 | 25 | hkdf = HKDF(algorithm=hashes.SHA256(), 26 | length=key_length, 27 | salt=salt, 28 | info=info, 29 | backend=openssl.backend) 30 | return hkdf.derive(data) 31 | 32 | 33 | class DEM: 34 | 35 | KEY_SIZE = XCHACHA_KEY_SIZE 36 | NONCE_SIZE = XCHACHA_NONCE_SIZE 37 | TAG_SIZE = XCHACHA_TAG_SIZE 38 | 39 | def __init__(self, 40 | key_material: bytes, 41 | salt: Optional[bytes] = None, 42 | info: Optional[bytes] = None, 43 | ): 44 | self._key = kdf(key_material, self.KEY_SIZE, salt, info) 45 | 46 | def encrypt(self, plaintext: bytes, authenticated_data: bytes = b"") -> bytes: 47 | nonce = os.urandom(self.NONCE_SIZE) 48 | ciphertext = xchacha_encrypt(plaintext, authenticated_data, nonce, self._key) 49 | return nonce + ciphertext 50 | 51 | def decrypt(self, nonce_and_ciphertext: bytes, authenticated_data: bytes = b"") -> bytes: 52 | 53 | if len(nonce_and_ciphertext) < self.NONCE_SIZE: 54 | raise ValueError("The ciphertext must include the nonce") 55 | 56 | nonce = nonce_and_ciphertext[:self.NONCE_SIZE] 57 | ciphertext = nonce_and_ciphertext[self.NONCE_SIZE:] 58 | 59 | # Prevent an out of bounds error deep in NaCl 60 | if len(ciphertext) < self.TAG_SIZE: 61 | raise ValueError("The authentication tag is missing or malformed") 62 | 63 | try: 64 | return xchacha_decrypt(ciphertext, authenticated_data, nonce, self._key) 65 | except nacl.exceptions.CryptoError as e: 66 | raise ValueError("Decryption of ciphertext failed: " 67 | "either someone tampered with the ciphertext or " 68 | "you are using an incorrect decryption key.") from e 69 | -------------------------------------------------------------------------------- /umbral/errors.py: -------------------------------------------------------------------------------- 1 | class VerificationError(Exception): 2 | """ 3 | Integrity of the data cannot be verified, see the message for details. 4 | """ 5 | -------------------------------------------------------------------------------- /umbral/hashing.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional, Iterable, Union, List, cast 2 | 3 | from cryptography.hazmat.primitives import hashes 4 | 5 | from .openssl import backend, ErrorInvalidCompressedPoint 6 | from .curve import CURVE 7 | from .curve_scalar import CurveScalar 8 | from .curve_point import CurvePoint 9 | from .serializable import Serializable, bool_bytes 10 | 11 | if TYPE_CHECKING: # pragma: no cover 12 | from .key_frag import KeyFragID 13 | from .keys import PublicKey 14 | 15 | 16 | class Hash: 17 | 18 | OUTPUT_SIZE = 32 19 | 20 | def __init__(self, dst: Optional[bytes] = None): 21 | self._backend_hash_algorithm = hashes.SHA256() 22 | self._hash = hashes.Hash(self._backend_hash_algorithm, backend=backend) 23 | 24 | if dst is not None: 25 | len_dst = len(dst).to_bytes(4, byteorder='big') 26 | self.update(len_dst + dst) 27 | 28 | def update(self, data: Union[bytes, Serializable]) -> None: 29 | self._hash.update(bytes(data)) 30 | 31 | def finalize(self) -> bytes: 32 | return self._hash.finalize() 33 | 34 | 35 | def hash_to_polynomial_arg(precursor: CurvePoint, 36 | pubkey: CurvePoint, 37 | dh_point: CurvePoint, 38 | kfrag_id: 'KeyFragID', 39 | ) -> CurveScalar: 40 | digest = Hash(b"POLYNOMIAL_ARG") 41 | digest.update(precursor) 42 | digest.update(pubkey) 43 | digest.update(dh_point) 44 | digest.update(kfrag_id) 45 | return CurveScalar.from_digest(digest) 46 | 47 | 48 | def hash_capsule_points(e: CurvePoint, v: CurvePoint) -> CurveScalar: 49 | digest = Hash(b"CAPSULE_POINTS") 50 | digest.update(e) 51 | digest.update(v) 52 | return CurveScalar.from_digest(digest) 53 | 54 | 55 | def hash_to_shared_secret(precursor: CurvePoint, 56 | pubkey: CurvePoint, 57 | dh_point: CurvePoint 58 | ) -> CurveScalar: 59 | digest = Hash(b"SHARED_SECRET") 60 | digest.update(precursor) 61 | digest.update(pubkey) 62 | digest.update(dh_point) 63 | return CurveScalar.from_digest(digest) 64 | 65 | 66 | def hash_to_cfrag_verification(points: Iterable[CurvePoint]) -> CurveScalar: 67 | digest = Hash(b"CFRAG_VERIFICATION") 68 | for point in points: 69 | digest.update(point) 70 | return CurveScalar.from_digest(digest) 71 | 72 | 73 | def kfrag_signature_message(kfrag_id: 'KeyFragID', 74 | commitment: CurvePoint, 75 | precursor: CurvePoint, 76 | maybe_delegating_pk: Optional['PublicKey'], 77 | maybe_receiving_pk: Optional['PublicKey'], 78 | ) -> bytes: 79 | 80 | # Have to convert to bytes manually because `mypy` is not smart enough to resolve types. 81 | 82 | delegating_part = ([bool_bytes(True), bytes(maybe_delegating_pk)] 83 | if maybe_delegating_pk 84 | else [bool_bytes(False)]) 85 | cast(List[Serializable], delegating_part) 86 | 87 | receiving_part = ([bool_bytes(True), bytes(maybe_receiving_pk)] 88 | if maybe_receiving_pk 89 | else [bool_bytes(False)]) 90 | 91 | components = ([bytes(kfrag_id), bytes(commitment), bytes(precursor)] + 92 | delegating_part + 93 | receiving_part) 94 | 95 | return b''.join(components) 96 | 97 | 98 | def unsafe_hash_to_point(dst: bytes, data: bytes) -> CurvePoint: 99 | """ 100 | Hashes arbitrary data into a valid EC point of the specified curve, 101 | using the try-and-increment method. 102 | 103 | WARNING: Do not use when the input data is secret, as this implementation is not 104 | in constant time, and hence, it is not safe with respect to timing attacks. 105 | """ 106 | 107 | len_data = len(data).to_bytes(4, byteorder='big') 108 | data_with_len = len_data + data 109 | sign = b'\x02' 110 | 111 | # We use an internal 32-bit counter as additional input 112 | for i in range(2**32): 113 | ibytes = i.to_bytes(4, byteorder='big') 114 | digest = Hash(dst) 115 | digest.update(data_with_len + ibytes) 116 | point_data = digest.finalize()[:CURVE.field_element_size] 117 | 118 | compressed_point = sign + point_data 119 | 120 | try: 121 | return CurvePoint.from_bytes(compressed_point) 122 | except ErrorInvalidCompressedPoint: 123 | # If it is not a valid point, continue on 124 | pass 125 | 126 | # Only happens with probability 2^(-32) 127 | raise ValueError('Could not hash input into the curve') # pragma: no cover 128 | -------------------------------------------------------------------------------- /umbral/keys.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Tuple 3 | 4 | from .curve_scalar import CurveScalar 5 | from .curve_point import CurvePoint 6 | from .dem import kdf 7 | from .hashing import Hash 8 | from .serializable import Serializable, SerializableSecret, Deserializable 9 | 10 | 11 | class SecretKey(SerializableSecret, Deserializable): 12 | """ 13 | Umbral secret (private) key. 14 | """ 15 | 16 | def __init__(self, scalar_key: CurveScalar): 17 | self._scalar_key = scalar_key 18 | # Precached public key. 19 | # We are assuming here that there will be on average more 20 | # derivations of a public key from a secret key than secret key instantiations. 21 | self._public_key = PublicKey(CurvePoint.generator() * self._scalar_key) 22 | 23 | @classmethod 24 | def random(cls) -> 'SecretKey': 25 | """ 26 | Generates a random secret key and returns it. 27 | """ 28 | return cls(CurveScalar.random_nonzero()) 29 | 30 | def public_key(self) -> 'PublicKey': 31 | """ 32 | Returns the associated public key. 33 | """ 34 | return self._public_key 35 | 36 | def __str__(self): 37 | return f"{self.__class__.__name__}:..." 38 | 39 | def __hash__(self): 40 | raise RuntimeError("Hashing secret objects is not secure") 41 | 42 | def secret_scalar(self) -> CurveScalar: 43 | return self._scalar_key 44 | 45 | @classmethod 46 | def serialized_size(cls): 47 | return CurveScalar.serialized_size() 48 | 49 | @classmethod 50 | def _from_exact_bytes(cls, data: bytes): 51 | return cls(CurveScalar._from_exact_bytes(data)) 52 | 53 | def to_secret_bytes(self) -> bytes: 54 | return bytes(self._scalar_key) 55 | 56 | 57 | class PublicKey(Serializable, Deserializable): 58 | """ 59 | Umbral public key. 60 | 61 | Created using :py:meth:`SecretKey.public_key`. 62 | """ 63 | 64 | def __init__(self, point_key: CurvePoint): 65 | self._point_key = point_key 66 | 67 | def point(self) -> CurvePoint: 68 | return self._point_key 69 | 70 | @classmethod 71 | def serialized_size(cls): 72 | return CurvePoint.serialized_size() 73 | 74 | @classmethod 75 | def _from_exact_bytes(cls, data: bytes): 76 | return cls(CurvePoint._from_exact_bytes(data)) 77 | 78 | def __bytes__(self) -> bytes: 79 | return bytes(self._point_key) 80 | 81 | def __str__(self): 82 | return f"{self.__class__.__name__}:{bytes(self).hex()[:16]}" 83 | 84 | def __eq__(self, other): 85 | return self._point_key == other._point_key 86 | 87 | def __hash__(self) -> int: 88 | return hash((self.__class__, bytes(self))) 89 | 90 | 91 | class SecretKeyFactory(SerializableSecret, Deserializable): 92 | """ 93 | This class handles keyring material for Umbral, by allowing deterministic 94 | derivation of :py:class:`SecretKey` objects based on labels. 95 | 96 | Don't use this key material directly as a key. 97 | """ 98 | 99 | _KEY_SEED_SIZE = 32 100 | _DERIVED_KEY_SIZE = 64 101 | 102 | def __init__(self, key_seed: bytes): 103 | self.__key_seed = key_seed 104 | 105 | @classmethod 106 | def random(cls) -> 'SecretKeyFactory': 107 | """ 108 | Creates a random factory. 109 | """ 110 | return cls(os.urandom(cls._KEY_SEED_SIZE)) 111 | 112 | @classmethod 113 | def seed_size(cls): 114 | """ 115 | Returns the seed size required by 116 | :py:meth:`~SecretKeyFactory.from_secure_randomness`. 117 | """ 118 | return cls._KEY_SEED_SIZE 119 | 120 | @classmethod 121 | def from_secure_randomness(cls, seed: bytes) -> 'SecretKeyFactory': 122 | """ 123 | Creates a secret key factory using the given random bytes 124 | (of size :py:meth:`~SecretKeyFactory.seed_size`). 125 | 126 | .. warning:: 127 | 128 | Make sure the given seed has been obtained 129 | from a cryptographically secure source of randomness! 130 | """ 131 | if len(seed) != cls.seed_size(): 132 | raise ValueError(f"Expected {cls.seed_size()} bytes, got {len(seed)}") 133 | return cls(seed) 134 | 135 | def make_key(self, label: bytes) -> SecretKey: 136 | """ 137 | Creates a :py:class:`SecretKey` deterministically from the given label. 138 | """ 139 | tag = b"KEY_DERIVATION/" + label 140 | key = kdf(self.__key_seed, self._DERIVED_KEY_SIZE, info=tag) 141 | 142 | digest = Hash(tag) 143 | digest.update(key) 144 | scalar_key = CurveScalar.from_digest(digest) 145 | 146 | return SecretKey(scalar_key) 147 | 148 | def make_factory(self, label: bytes) -> 'SecretKeyFactory': 149 | """ 150 | Creates a :py:class:`SecretKeyFactory` deterministically from the given label. 151 | """ 152 | tag = b"FACTORY_DERIVATION/" + label 153 | key_seed = kdf(self.__key_seed, self._KEY_SEED_SIZE, info=tag) 154 | return SecretKeyFactory(key_seed) 155 | 156 | @classmethod 157 | def serialized_size(cls): 158 | return cls._KEY_SEED_SIZE 159 | 160 | @classmethod 161 | def _from_exact_bytes(cls, data: bytes): 162 | return cls(data) 163 | 164 | def to_secret_bytes(self) -> bytes: 165 | return bytes(self.__key_seed) 166 | 167 | def __str__(self): 168 | return f"{self.__class__.__name__}:..." 169 | 170 | def __hash__(self): 171 | raise RuntimeError("Hashing secret objects is not secure") 172 | -------------------------------------------------------------------------------- /umbral/params.py: -------------------------------------------------------------------------------- 1 | from .hashing import unsafe_hash_to_point 2 | 3 | 4 | class Parameters: 5 | 6 | def __init__(self): 7 | self.u = unsafe_hash_to_point(b'PARAMETERS', b'POINT_U') 8 | 9 | 10 | PARAMETERS = Parameters() 11 | -------------------------------------------------------------------------------- /umbral/pre.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Optional, Sequence, List 2 | 3 | from .capsule import Capsule 4 | from .capsule_frag import VerifiedCapsuleFrag, CapsuleFrag 5 | from .dem import DEM 6 | from .keys import PublicKey, SecretKey 7 | from .key_frag import VerifiedKeyFrag, KeyFrag, KeyFragBase 8 | from .signing import Signer 9 | 10 | 11 | def encrypt(delegating_pk: PublicKey, plaintext: bytes) -> Tuple[Capsule, bytes]: 12 | """ 13 | Generates and encapsulates a symmetric key and uses it to encrypt the given plaintext. 14 | 15 | Returns the KEM Capsule and the ciphertext. 16 | """ 17 | capsule, key_seed = Capsule.from_public_key(delegating_pk) 18 | dem = DEM(bytes(key_seed)) 19 | ciphertext = dem.encrypt(plaintext, authenticated_data=bytes(capsule)) 20 | return capsule, ciphertext 21 | 22 | 23 | def decrypt_original(delegating_sk: SecretKey, capsule: Capsule, ciphertext: bytes) -> bytes: 24 | """ 25 | Opens the capsule using the delegator's key used for encryption and gets what's inside. 26 | We hope that's a symmetric key, which we use to decrypt the ciphertext 27 | and return the resulting cleartext. 28 | """ 29 | key_seed = capsule.open_original(delegating_sk) 30 | dem = DEM(bytes(key_seed)) 31 | return dem.decrypt(ciphertext, authenticated_data=bytes(capsule)) 32 | 33 | 34 | def generate_kfrags(delegating_sk: SecretKey, 35 | receiving_pk: PublicKey, 36 | signer: Signer, 37 | threshold: int, 38 | shares: int, 39 | sign_delegating_key: bool = True, 40 | sign_receiving_key: bool = True, 41 | ) -> List[VerifiedKeyFrag]: 42 | """ 43 | Generates ``shares`` key fragments to pass to proxies for re-encryption. 44 | At least ``threshold`` of them will be needed for decryption. 45 | If ``sign_delegating_key`` or ``sign_receiving_key`` are ``True``, 46 | the corresponding keys will have to be provided to :py:meth:`KeyFrag.verify`. 47 | """ 48 | 49 | base = KeyFragBase(delegating_sk, receiving_pk, signer, threshold) 50 | 51 | # Technically we could allow it, but what would be the use of these kfrags? 52 | if shares < threshold: 53 | raise ValueError(f"Creating less kfrags ({shares}) " 54 | f"than threshold ({threshold}) makes them useless") 55 | 56 | kfrags = [KeyFrag.from_base(base, sign_delegating_key, sign_receiving_key) 57 | for _ in range(shares)] 58 | 59 | # Make them verified - we know they're good. 60 | return [VerifiedKeyFrag(kfrag) for kfrag in kfrags] 61 | 62 | 63 | def reencrypt(capsule: Capsule, kfrag: VerifiedKeyFrag) -> VerifiedCapsuleFrag: 64 | """ 65 | Creates a capsule fragment using the given key fragment. 66 | Capsule fragments can later be used to decrypt the ciphertext. 67 | """ 68 | # We could let duck typing do its work, 69 | # but it's better to make a common error more understandable. 70 | if isinstance(kfrag, KeyFrag) and not isinstance(kfrag, VerifiedKeyFrag): 71 | raise TypeError("KeyFrag must be verified before reencryption") 72 | 73 | return VerifiedCapsuleFrag(CapsuleFrag.reencrypted(capsule, kfrag.kfrag)) 74 | 75 | 76 | def decrypt_reencrypted(receiving_sk: SecretKey, 77 | delegating_pk: PublicKey, 78 | capsule: Capsule, 79 | verified_cfrags: Sequence[VerifiedCapsuleFrag], 80 | ciphertext: bytes, 81 | ) -> bytes: 82 | """ 83 | Decrypts the ciphertext using the original capsule and the reencrypted capsule fragments. 84 | """ 85 | # We could let duck typing do its work, 86 | # but it's better to make a common error more understandable. 87 | for cfrag in verified_cfrags: 88 | if isinstance(cfrag, CapsuleFrag) and not isinstance(cfrag, VerifiedCapsuleFrag): 89 | raise TypeError("All CapsuleFrags must be verified before decryption") 90 | 91 | cfrags = [vcfrag.cfrag for vcfrag in verified_cfrags] 92 | key_seed = capsule.open_reencrypted(receiving_sk, delegating_pk, cfrags) 93 | dem = DEM(bytes(key_seed)) 94 | return dem.decrypt(ciphertext, authenticated_data=bytes(capsule)) 95 | -------------------------------------------------------------------------------- /umbral/serializable.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod, ABC 2 | from typing import Tuple, Type, List, Any, TypeVar 3 | 4 | 5 | class HasSerializedSize(ABC): 6 | """ 7 | A base serialization mixin, denoting a type with a constant-size serialized representation. 8 | """ 9 | 10 | @classmethod 11 | @abstractmethod 12 | def serialized_size(cls) -> int: 13 | """ 14 | Returns the size in bytes of the serialized representation of this object 15 | (obtained with ``bytes()`` or ``to_secret_bytes()``). 16 | """ 17 | raise NotImplementedError 18 | 19 | 20 | class Deserializable(HasSerializedSize): 21 | """ 22 | A mixin for composable deserialization. 23 | """ 24 | 25 | Self = TypeVar('Self', bound='Deserializable') 26 | 27 | @classmethod 28 | def from_bytes(cls: Type[Self], data: bytes) -> Self: 29 | """ 30 | Restores the object from serialized bytes. 31 | """ 32 | expected_size = cls.serialized_size() 33 | if len(data) != expected_size: 34 | raise ValueError(f"Expected {expected_size} bytes, got {len(data)}") 35 | return cls._from_exact_bytes(data) 36 | 37 | @staticmethod 38 | def _split(data: bytes, *types: Type) -> List[Any]: 39 | """ 40 | Given a list of ``Deserializable`` types, attempts to deserialize them from the bytestring 41 | one by one and returns the list of the resulting objects and the remaining bytestring. 42 | """ 43 | objs = [] 44 | pos = 0 45 | 46 | for tp in types: 47 | 48 | if issubclass(tp, bool): 49 | size = bool_serialized_size() 50 | else: 51 | size = tp.serialized_size() 52 | 53 | chunk = data[pos:pos+size] 54 | 55 | if issubclass(tp, bool): 56 | obj = bool_from_exact_bytes(chunk) 57 | else: 58 | obj = tp._from_exact_bytes(chunk) 59 | 60 | objs.append(obj) 61 | pos += size 62 | 63 | return objs 64 | 65 | @classmethod 66 | @abstractmethod 67 | def _from_exact_bytes(cls: Type[Self], data: bytes) -> Self: 68 | """ 69 | Deserializes the object from a bytestring of exactly the expected length 70 | (defined by ``serialized_size()``). 71 | """ 72 | raise NotImplementedError 73 | 74 | 75 | class Serializable(HasSerializedSize): 76 | """ 77 | A mixin for composable serialization. 78 | """ 79 | 80 | @abstractmethod 81 | def __bytes__(self): 82 | """ 83 | Serializes the object into bytes. 84 | """ 85 | raise NotImplementedError 86 | 87 | 88 | class SerializableSecret(HasSerializedSize): 89 | """ 90 | A mixin for composable serialization of objects containing secret data. 91 | """ 92 | 93 | @abstractmethod 94 | def to_secret_bytes(self): 95 | """ 96 | Serializes the object into bytes. 97 | This bytestring is secret, handle with care! 98 | """ 99 | raise NotImplementedError 100 | 101 | 102 | def bool_serialized_size() -> int: 103 | return 1 104 | 105 | 106 | def bool_bytes(b: bool) -> bytes: 107 | return b'\x01' if b else b'\x00' 108 | 109 | 110 | def bool_from_exact_bytes(data: bytes) -> bool: 111 | if data == b'\x01': 112 | b = True 113 | elif data == b'\x00': 114 | b = False 115 | else: 116 | raise ValueError("Incorrectly serialized boolean; " 117 | f"expected b'\\x00' or b'\\x01', got {repr(data)}") 118 | return b 119 | -------------------------------------------------------------------------------- /umbral/signing.py: -------------------------------------------------------------------------------- 1 | from . import openssl 2 | from .curve import CURVE 3 | from .curve_scalar import CurveScalar 4 | from .hashing import Hash 5 | from .keys import SecretKey, PublicKey 6 | from .serializable import Serializable, Deserializable 7 | 8 | 9 | def digest_for_signing(message: bytes) -> Hash: 10 | # Not using a DST here to make life easier for third-party verifiers 11 | digest = Hash() 12 | digest.update(message) 13 | return digest 14 | 15 | 16 | class Signer: 17 | """ 18 | An object possessing the capability to create signatures. 19 | For safety reasons serialization is prohibited. 20 | """ 21 | 22 | def __init__(self, secret_key: SecretKey): 23 | self.__secret_key = secret_key 24 | 25 | def sign_digest(self, digest: Hash) -> 'Signature': 26 | 27 | secret_bn = self.__secret_key.secret_scalar()._backend_bignum 28 | r_int, s_int = openssl.ecdsa_sign(curve=CURVE, 29 | secret_bn=secret_bn, 30 | prehashed_message=digest.finalize(), 31 | hash_algorithm=digest._backend_hash_algorithm) 32 | 33 | # Normalize s. This is a non-malleability measure, which OpenSSL doesn't do. 34 | # See Bitcoin's BIP-0062 for more details: 35 | # https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#Low_S_values_in_signatures 36 | 37 | # s is public, so no constant-timeness required here 38 | if s_int > (CURVE.order >> 1): 39 | s_int = CURVE.order - s_int 40 | 41 | # Already normalized, don't waste time 42 | r = CurveScalar.from_int(r_int, check_normalization=False) 43 | s = CurveScalar.from_int(s_int, check_normalization=False) 44 | 45 | return Signature(r, s) 46 | 47 | def sign(self, message: bytes) -> 'Signature': 48 | """ 49 | Hashes and signs the message. 50 | """ 51 | return self.sign_digest(digest_for_signing(message)) 52 | 53 | def verifying_key(self) -> PublicKey: 54 | """ 55 | Returns the public verification key corresponding to the secret key used for signing. 56 | """ 57 | return self.__secret_key.public_key() 58 | 59 | def __str__(self): 60 | return f"{self.__class__.__name__}:..." 61 | 62 | def __hash__(self): 63 | raise RuntimeError(f"{self.__class__.__name__} objects do not support hashing") 64 | 65 | def __bytes__(self): 66 | raise RuntimeError(f"{self.__class__.__name__} objects do not support serialization") 67 | 68 | 69 | class Signature(Serializable, Deserializable): 70 | """ 71 | Wrapper for ECDSA signatures. 72 | """ 73 | 74 | def __init__(self, r: CurveScalar, s: CurveScalar): 75 | self.r = r 76 | self.s = s 77 | 78 | def verify_digest(self, verifying_pk: PublicKey, digest: Hash) -> bool: 79 | return openssl.ecdsa_verify(curve=CURVE, 80 | sig_r=int(self.r), 81 | sig_s=int(self.s), 82 | public_point=verifying_pk.point()._backend_point, 83 | prehashed_message=digest.finalize(), 84 | hash_algorithm=digest._backend_hash_algorithm) 85 | 86 | def verify(self, verifying_pk: PublicKey, message: bytes) -> bool: 87 | """ 88 | Returns ``True`` if the ``message`` was signed by someone possessing the secret counterpart 89 | to ``verifying_pk``. 90 | """ 91 | digest = digest_for_signing(message) 92 | return self.verify_digest(verifying_pk, digest) 93 | 94 | @classmethod 95 | def serialized_size(cls): 96 | return CurveScalar.serialized_size() * 2 97 | 98 | @classmethod 99 | def _from_exact_bytes(cls, data: bytes): 100 | return cls(*cls._split(data, CurveScalar, CurveScalar)) 101 | 102 | def __bytes__(self): 103 | return bytes(self.r) + bytes(self.s) 104 | 105 | def __str__(self): 106 | return f"{self.__class__.__name__}:{bytes(self).hex()[:16]}" 107 | 108 | def __eq__(self, other): 109 | return self.r == other.r and self.s == other.s 110 | 111 | def __hash__(self) -> int: 112 | return hash((self.__class__, bytes(self))) 113 | -------------------------------------------------------------------------------- /vectors/generate_test_vectors.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from umbral import ( 5 | SecretKey, Signer, KeyFrag, CapsuleFrag, 6 | encrypt, generate_kfrags, reencrypt) 7 | from umbral.curve_scalar import CurveScalar 8 | from umbral.curve_point import CurvePoint 9 | from umbral.hashing import Hash, unsafe_hash_to_point 10 | from umbral.dem import DEM, kdf 11 | 12 | 13 | ####################### 14 | # Auxiliary functions # 15 | ####################### 16 | 17 | def hexlify(data): 18 | if isinstance(data, int): 19 | return hex(data)[2:] 20 | else: 21 | return bytes(data).hex() 22 | 23 | 24 | def create_test_vector_file(vector, filename, generate_again=False): 25 | path = os.path.join(os.path.dirname(__file__), filename) 26 | 27 | mode = 'w' if generate_again else 'x' 28 | try: 29 | with open(path, mode) as f: 30 | json.dump(vector, f, indent=2) 31 | except FileExistsError: 32 | pass 33 | 34 | 35 | # If True, this will overwrite existing test vector files with new randomly generated instances 36 | generate_again = True 37 | 38 | ######### 39 | # SETUP # 40 | ######### 41 | 42 | # We create also some Umbral objects for later 43 | delegating_sk = SecretKey.random() 44 | receiving_sk = SecretKey.random() 45 | signing_sk = SecretKey.random() 46 | 47 | verifying_pk = signing_sk.public_key() 48 | delegating_pk = delegating_sk.public_key() 49 | receiving_pk = receiving_sk.public_key() 50 | 51 | kfrags = generate_kfrags(delegating_sk=delegating_sk, 52 | receiving_pk=receiving_pk, 53 | signer=Signer(signing_sk), 54 | threshold=6, 55 | shares=10, 56 | ) 57 | 58 | plain_data = b'peace at dawn' 59 | 60 | capsule, ciphertext = encrypt(delegating_pk, plain_data) 61 | 62 | cfrag = CapsuleFrag.from_bytes(bytes(reencrypt(capsule, kfrags[0]))) 63 | points = [capsule.point_e, cfrag.point_e1, cfrag.proof.point_e2, 64 | capsule.point_v, cfrag.point_v1, cfrag.proof.point_v2, 65 | cfrag.proof.kfrag_commitment, cfrag.proof.kfrag_pok] 66 | 67 | z = cfrag.proof.signature 68 | 69 | 70 | ########################### 71 | # CurveScalar arithmetics # 72 | ########################### 73 | 74 | # Let's generate two random CurveScalars 75 | bn1 = CurveScalar.random_nonzero() 76 | bn2 = CurveScalar.random_nonzero() 77 | 78 | # Expected results for some binary operations 79 | expected = [('Addition', bn1 + bn2), 80 | ('Subtraction', bn1 - bn2), 81 | ('Multiplication', bn1 * bn2), 82 | ('Inverse', bn1.invert()), 83 | ] 84 | 85 | expected = [{'operation': op, 'result': hexlify(result)} for (op, result) in expected] 86 | 87 | # Definition of test vector 88 | vector_suite = { 89 | 'name': 'Test vectors for CurveScalar operations', 90 | 'params': 'default', 91 | 'first operand': hexlify(bn1), 92 | 'second operand': hexlify(bn2), 93 | 'vectors': expected 94 | } 95 | 96 | json_file = 'vectors_scalar_operations.json' 97 | 98 | create_test_vector_file(vector_suite, json_file, generate_again=generate_again) 99 | 100 | 101 | 102 | ############################### 103 | # CurveScalar.from_digest() # 104 | ############################### 105 | 106 | # Test vectors for different kinds of inputs (bytes, CurvePoints, CurveScalars, etc.) 107 | inputs = ([b''], 108 | [b'abc'], 109 | [capsule.point_e], 110 | [z], 111 | [capsule.point_e, z], 112 | points, 113 | ) 114 | 115 | vectors = list() 116 | for input_to_hash in inputs: 117 | digest = Hash(b'some_dst') 118 | for input_ in input_to_hash: 119 | digest.update(input_) 120 | scalar = CurveScalar.from_digest(digest) 121 | json_input = [{'class': data.__class__.__name__, 122 | 'bytes': hexlify(data), 123 | } for data in input_to_hash] 124 | 125 | json_input = {'input': json_input, 'output': hexlify(scalar) } 126 | 127 | vectors.append(json_input) 128 | 129 | vector_suite = { 130 | 'name' : 'Test vectors for umbral.curvebn.CurveScalar.from_digest()', 131 | 'params' : 'default', 132 | 'vectors' : vectors 133 | } 134 | 135 | create_test_vector_file(vector_suite, 'vectors_scalar_from_digest.json', generate_again=generate_again) 136 | #print(json.dumps(vector_suite, indent=2)) 137 | 138 | 139 | ############### 140 | # CurvePoints # 141 | ############### 142 | 143 | point1 = CurvePoint.random() 144 | point2 = CurvePoint.random() 145 | 146 | # Expected results for some CurvePoint operations 147 | expected = [('Addition', point1 + point2), 148 | ('Subtraction', point1 - point2), 149 | ('Multiplication', point1 * bn1), 150 | ('Inversion', -point1), 151 | ('To_affine.X', point1.to_affine()[0]), 152 | ('To_affine.Y', point1.to_affine()[1]), 153 | ('kdf', kdf(bytes(point1), DEM.KEY_SIZE)), 154 | ] 155 | 156 | expected = [{'operation': op, 'result': hexlify(result)} for (op, result) in expected] 157 | 158 | # Definition of test vector 159 | vector_suite = { 160 | 'name': 'Test vectors for CurvePoint operations', 161 | 'params': 'default', 162 | 'first CurvePoint operand': hexlify(point1), 163 | 'second CurvePoint operand': hexlify(point2), 164 | 'CurveScalar operand': hexlify(bn1), 165 | 'vectors': expected 166 | } 167 | 168 | json_file = 'vectors_point_operations.json' 169 | 170 | create_test_vector_file(vector_suite, json_file, generate_again=generate_again) 171 | 172 | 173 | ######################## 174 | # unsafe_hash_to_point # 175 | ######################## 176 | 177 | inputs = (b'', 178 | b'abc', 179 | b'NuCypher', 180 | b'Nucypher', 181 | ) 182 | 183 | vectors = list() 184 | for data in inputs: 185 | for dst in inputs: 186 | point = unsafe_hash_to_point(dst=dst, data=data) 187 | json_input = {'data': hexlify(data), 188 | 'dst': hexlify(dst), 189 | 'point': hexlify(point), 190 | } 191 | 192 | vectors.append(json_input) 193 | 194 | vector_suite = { 195 | 'name': 'Test vectors for unsafe_hash_to_point()', 196 | 'params': 'default', 197 | 'vectors': vectors 198 | } 199 | 200 | create_test_vector_file(vector_suite, 'vectors_unsafe_hash_to_point.json', generate_again=generate_again) 201 | #print(json.dumps(vector_suite, indent=2)) 202 | 203 | 204 | ########## 205 | # KFrags # 206 | ########## 207 | 208 | vectors = list() 209 | for kfrag in kfrags: 210 | kfrag = KeyFrag.from_bytes(bytes(kfrag)) 211 | kfrag.verify(verifying_pk, delegating_pk, receiving_pk) 212 | 213 | json_input = {'kfrag': hexlify(kfrag)} 214 | 215 | vectors.append(json_input) 216 | 217 | vector_suite = { 218 | 'name': 'Test vectors for KFrags', 219 | 'description': ('This is a collection of KFrags generated under the ' 220 | 'enclosed delegating, verifying and receiving keys. ' 221 | 'Each of them must deserialize correctly and the ' 222 | 'call to verify() must succeed.'), 223 | 'params': 'default', 224 | 'verifying_pk': hexlify(verifying_pk), 225 | 'delegating_pk': hexlify(delegating_pk), 226 | 'receiving_pk': hexlify(receiving_pk), 227 | 'vectors': vectors 228 | } 229 | 230 | #print(json.dumps(vector_suite, indent=2)) 231 | create_test_vector_file(vector_suite, 'vectors_kfrags.json', generate_again=generate_again) 232 | 233 | 234 | ########## 235 | # CFrags # 236 | ########## 237 | 238 | vectors = list() 239 | 240 | for kfrag in kfrags: 241 | cfrag = reencrypt(capsule, kfrag) 242 | json_input = {'kfrag': hexlify(kfrag), 'cfrag': hexlify(cfrag)} 243 | vectors.append(json_input) 244 | 245 | vector_suite = { 246 | 'name': 'Test vectors for CFrags', 247 | 'description': ('This is a collection of CFrags, originated from the ' 248 | 'enclosed Capsule, under the enclosed delegating, ' 249 | 'verifying and receiving keys. Each CFrag must deserialize ' 250 | 'correctly and can be replicated with a call to ' 251 | '`reencrypt(kfrag, capsule)`'), 252 | 'params': 'default', 253 | 'capsule': hexlify(capsule), 254 | 'verifying_pk': hexlify(verifying_pk), 255 | 'delegating_pk': hexlify(delegating_pk), 256 | 'receiving_pk': hexlify(receiving_pk), 257 | 'vectors': vectors 258 | } 259 | 260 | #print(json.dumps(vector_suite, indent=2)) 261 | create_test_vector_file(vector_suite, 'vectors_cfrags.json', generate_again=generate_again) 262 | -------------------------------------------------------------------------------- /vectors/vectors_kfrags.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Test vectors for KFrags", 3 | "description": "This is a collection of KFrags generated under the enclosed delegating, verifying and receiving keys. Each of them must deserialize correctly and the call to verify() must succeed.", 4 | "params": "default", 5 | "verifying_pk": "030b95b3f249297824b32d3391392d62a9aff32e8698fa78c7e8ce4a9d17071f56", 6 | "delegating_pk": "02d67029bb92522059225d190038230c23466e28d132d48f714f9098168a562b8a", 7 | "receiving_pk": "03b0d0243e8954b408047eee3b09b5ed132ccc25ec70e99fc74b6e9f54e5ecf9c7", 8 | "vectors": [ 9 | { 10 | "kfrag": "2565903141941fecb01fb92dfbdd66a0a1d4eb94d3226beb2f7adcbb8282ffd0b7934320225868d8f9ace7784fd3ae0c20a09c5a2b698fae0554d71b1f016670036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b026cb94f302809d19aacc81da19f2156db9c610498310b930d7787d8a2366dadd3d977c6befcff3088016d5d9050f4932cb4a9ce483dd7cf27e1f157f8d23f598248d5027a8961a09c874c82a8c479fbf074675790edff37e6927a7d4ea269e600685dc9469c87e6d9d469656b2f40b33e6d6813ec3519fc065a4ffc0eeb8abd324c1914451341cdb7abd7c5aa01adade220b2894be5a33caa9ee7d3b4637bdccf0101" 11 | }, 12 | { 13 | "kfrag": "ae85e679cf44b1877ff36e48c4965144724f17cc7605f53e50007aabbc2c7ab3871d04e6b5666b81a99ba0c6a70ba8675f2122a39eaec04a62c8496d5149ac9d036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b02e3e7722bffcb6674987b0089f6b8ac0d6cb220d9661edb5ed9dca5fcd33d5bdb1e41b92c9a8e4346b2dd1072389e4ce36a6230eecb59b7e0e9d84f35bf14f0f23014a1263e17d07a94c5d9c248c692686c745df3c5d11566b9ca17ec62e59a5265749ef2701a58483bd106be985c3e4a03750e3c0c21424cf7f59682224c4f6e29c242904ade5ac377974eaa662b6ed6eed13fd30509b9cb332b4fa89043a2a70101" 14 | }, 15 | { 16 | "kfrag": "0bb747aeafa005ceb7be0d1f7fac93094150c5d1c4a08b7705dfe98d38a901ddc7eb5aad9d622148f2c229f53d46b8a17dab61b37eb19bdd6c062fa81d725c93036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b038ab4abc7c62834517b7a296199e43c62da3dfa6c4ecadb4a3aa7aac5f4d9f301cdaacf4115acea35edd9858ee8a04a05cbcf346374ca4a3daa54da89754e018222f869edbd7224f49fd705b53cd11af90b078d77b58b272e1e39a10be6a9966db3d3ba5ba267c360b96cf2aaeb28e87f87f76f7216af4e1594b67f71e8ba195f4c305d98fe469e738caf771745f3865ebf7fd114efc09f8b69383bae8020ae2f0101" 17 | }, 18 | { 19 | "kfrag": "4ace898ce8231afe709556f0816b7c3b18a9aaa50fd6afe4596cc32b5cad26a68634f36bf1374f4a915477f9cdb0b8ae2ad6d4b2fc149c693a2cef1a248452c5036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b027c2de1ca923a4f1c7783d42dd0d145efeb1fb8b352168a255ec4661a7ad5532b9653705171ebc24147bb8c9f97500b25f2a3a5304e3d8dfc91d2a1f8dc0d4a5e29044bc078fbe2d8c9b6e500a70af07fbce661f4ecb86cdb2c847b11c136782100962b6d129d9cdb3fa930980c8fe7326fc0897e5a40cdf01f2984fa017c19fb399a0157286cd368653d27522d8775557009377051c9b56b69b014d051ae8b1a0101" 20 | }, 21 | { 22 | "kfrag": "ccc346110f4ab3fa3b0aeca1630e077537ac5e2c74f31a6d09de1a74e5db4f044a1e816cd104fdfabc92bbb40c6f9361071260c13a14b866097749062af79d83036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b03c18e0774891c9d7bb8502e35a2dc2aabeb461f4f0cf43a78a75786869007d60895a31ea32286ff5516b3510559e1e7e3bbbebcedd1c7540b002764e6901816244a073e19a1d9c5e460fbec5dbd33717e9958c89f6adab0484cbd40d1163b8ea71a7208d0de602d4d12c024c0974568fc2aaf23aafb1405157cbcf97738fc9dc0076b9f77d53e20bab18afba1ef40232b55335dbf33ccc365df277fe1a8ecaa530101" 23 | }, 24 | { 25 | "kfrag": "3166d84565cc545cd994957f2aee5009edf3611b5b348e6d39eb27c02198f3cf722cd82f01fd45953cc999741fedf34ea363a229b4414340dbacd0a3fbefb13c036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b02e307917315eab38d97ba09a5fda8c278b4678ad9c8486b17b79739fdd2c9b088ff59ee77f0f2a5455e4d31439fe3652a284839c2c0fd1e989b5f80179501873b13b84ac39b7f9f0de069eedfa3c79d4781dfcceba1c344e0f003f4053ba938465b0e56d5066adea34ff1365e77358dc2a44a319d9a0a7edc283d05790cf9a8152bfab400aefd27bd41374b761de99974a94c4d1076d78269cadb4011cb06b1180101" 26 | }, 27 | { 28 | "kfrag": "71c8673926b3df970da991a3d53cd6cff27963ac7d88ad2921e84ae47e8f3c930112609fb5f87ef70ebdbdec2d9cc58fd1f171bea4b93fb4a57de058666fdcc5036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b033c83e00f04744ed4ac3a6cadc64568dfe526e579f56f5c26e5749a0fe795b0339700c611b793210edfdc823f5332ed4640f6cf09e419336f3e13b71b29de065f7cd643768ebea6137a612bfb8328cb0868ac6e001e9bf8270817446b15027a1d6e6b066214a61c379b54d71b608774172181d09ff6b14ab43dae3ad68fb18abe2fad7b818fe6bee1c27158d5bc3c85c114611c43bfe52e4f6969b9a53c7db1240101" 29 | }, 30 | { 31 | "kfrag": "4e27b679bb415c4fb50a84e342b7a6405c03b015199f8178d825be10c304976bcb91e3038cf7f69cf437fab1a1d55a7ae61710248d1cd45ce31055492c9f9d2c036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b024f2493f41095fec4b28b7dc3d9a91ce656ac98f23f35e4ff0e87d27e422bb445a01e102936f9c672c72adf3dd9a659875b93a8a4fa5ce35dc27fae52e3b7274c110a3cf0dfc988a3dfeda1621831686c00c7c164747a484f3f254ffd05a453f7b84772259002ac4555054edfc31a58a597ef47e8dcbca0860acc2668d646d5411ff0e2b9f5bfe651197a66927dbd57a973d7b18c1e1ffc0ad29c6ccb9c07698e0101" 32 | }, 33 | { 34 | "kfrag": "7a359c5e31c6bf94bfc09eaa7c199ccda576fdda37d74360d19ddf71930e9423ffb95215a64396ac14f860092c15a7079d58c725484016226a85967c55166602036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b02417fb0a631dc12df3aa4a94c5f838bb2a08aee9c98efae3d5dec6f10d8addd579a3d0eedb2d7826fb6bb8a9fe9b17696ecffd03070afec2f12a315d1dc344470300b9d4641d4cb30760463639a8949444fe5fc08a44d4f36e15640d7945020e6f372b8fce1db831621f1dc7049c5d63140c95dd9ed48de402d09e0eb9a96afbd4a8e539d507cb863bbd5b4cf939271809e4850bbcfb5ca1ff3fef7c377f433490101" 35 | }, 36 | { 37 | "kfrag": "df55683a69e4136205a61c895959d86eb64840bb2175d5d15e4491b27e6a38bc80dc89e8f1631175607e6c67a34a758020c74693337cf3714a6cf78d355da17a036523005d1234abcd44d36704620cfcff444edac7a64dfc256281966351a7803b03ab6ffe8768ecd246956362065ca48488183f96c3ee5957be7a7ce02f3ef9cb12bab3cf7756f986806106919c948a4b191bbf84df94503a2b0a81a5141cb9e1e17de0ee2599d0bbafe972bd9e567eece93c262fc634ed2690a1914abda632fac2ae3cc293a58554a55dbc50c0e111c6e9bb0208631833da5e66b1adb2aa1ecdd615c09098fba7af4b03763b846e1f5358a6bac27c36eadd6cd2c98fd58c277e0b0101" 38 | } 39 | ] 40 | } -------------------------------------------------------------------------------- /vectors/vectors_point_operations.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Test vectors for CurvePoint operations", 3 | "params": "default", 4 | "first CurvePoint operand": "03945c237f3b4cab9638d89bba3a098d37a8f4c981788969c2846e988748315bfd", 5 | "second CurvePoint operand": "03d495c93def84d2b7c709d5896d64339129a28db5a6a2cd4c686c895da538228e", 6 | "CurveScalar operand": "0a686fbc44fdc4713b7901c88fe416cc9b187501e671a4c70781742f3bcef1ec", 7 | "vectors": [ 8 | { 9 | "operation": "Addition", 10 | "result": "02338b0a976b3701353558685966d2b166174dbce6d697d2e50069833a06157bf9" 11 | }, 12 | { 13 | "operation": "Subtraction", 14 | "result": "02864a1ee4df38dffb51940819fca1a89624521ed14b8ec873e4b701fa6d8c1f5b" 15 | }, 16 | { 17 | "operation": "Multiplication", 18 | "result": "03e7882ab867f7006915d16738ef9adebc2a1946fea829478e7e444e6113ecf1f1" 19 | }, 20 | { 21 | "operation": "Inversion", 22 | "result": "02945c237f3b4cab9638d89bba3a098d37a8f4c981788969c2846e988748315bfd" 23 | }, 24 | { 25 | "operation": "To_affine.X", 26 | "result": "945c237f3b4cab9638d89bba3a098d37a8f4c981788969c2846e988748315bfd" 27 | }, 28 | { 29 | "operation": "To_affine.Y", 30 | "result": "fd0576e382f8f0ce0849c72789c3bd2fe2ee453efc606ff8815108e734e088ef" 31 | }, 32 | { 33 | "operation": "kdf", 34 | "result": "26f9fa1e3d2bd1fe3b14ea0a6e1276a214b1f5c59ca01cb3b74a7cb74e327f1c" 35 | } 36 | ] 37 | } -------------------------------------------------------------------------------- /vectors/vectors_scalar_from_digest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Test vectors for umbral.curvebn.CurveScalar.from_digest()", 3 | "params": "default", 4 | "vectors": [ 5 | { 6 | "input": [ 7 | { 8 | "class": "bytes", 9 | "bytes": "" 10 | } 11 | ], 12 | "output": "42184a0ea1e39037cad1ed7f3bb0cd8b7fe978e6d8b94f965e47d582cbdb8208" 13 | }, 14 | { 15 | "input": [ 16 | { 17 | "class": "bytes", 18 | "bytes": "616263" 19 | } 20 | ], 21 | "output": "02e2c58350c30e80f9deea1ae19e21a0baa7761f4448c792f205b8e9b7ac1ab3" 22 | }, 23 | { 24 | "input": [ 25 | { 26 | "class": "CurvePoint", 27 | "bytes": "02558f1de19a58e73a94e8fbbc6d3b1de2d312d90746ea74cb29f046943bf57871" 28 | } 29 | ], 30 | "output": "5dfe037b1041c4f4d89bed6305061d5d0f7f996f51cab49af958c9e635c47792" 31 | }, 32 | { 33 | "input": [ 34 | { 35 | "class": "CurveScalar", 36 | "bytes": "7a44f4f0e25258ed69f205b3770070c557d288c69a3cc453b2a42205d8c1c196" 37 | } 38 | ], 39 | "output": "93134c97fd051748346bf8ee6dc9f3dd920effc2faa81f7c243d0565b10ebe5e" 40 | }, 41 | { 42 | "input": [ 43 | { 44 | "class": "CurvePoint", 45 | "bytes": "02558f1de19a58e73a94e8fbbc6d3b1de2d312d90746ea74cb29f046943bf57871" 46 | }, 47 | { 48 | "class": "CurveScalar", 49 | "bytes": "7a44f4f0e25258ed69f205b3770070c557d288c69a3cc453b2a42205d8c1c196" 50 | } 51 | ], 52 | "output": "3cd3272ccea4e738abe18c100656a6ed2ba30e6e5723c1205641c8ba72aef03c" 53 | }, 54 | { 55 | "input": [ 56 | { 57 | "class": "CurvePoint", 58 | "bytes": "02558f1de19a58e73a94e8fbbc6d3b1de2d312d90746ea74cb29f046943bf57871" 59 | }, 60 | { 61 | "class": "CurvePoint", 62 | "bytes": "025e6a08eb9376adfea3a92e05fea213c493fe051461fdf5639a7108e8687eeacf" 63 | }, 64 | { 65 | "class": "CurvePoint", 66 | "bytes": "030d7ea7752848f5af2aa01bde8b45e180089fc7cdbc60b59235207a6527773d73" 67 | }, 68 | { 69 | "class": "CurvePoint", 70 | "bytes": "02906780e9484aec2102a01a157f10ced5aec952cd00631d94d5ea2edfa9b68083" 71 | }, 72 | { 73 | "class": "CurvePoint", 74 | "bytes": "03fcfdb46bf83a68e0d674e7d5b7c0365c8fa05dd418f2ba1a4aea2abcbcd12a19" 75 | }, 76 | { 77 | "class": "CurvePoint", 78 | "bytes": "02d45ec4ea9bf9d0acfba0422c6d4cfb087bd2f0084127eb90debdd94e391927f7" 79 | }, 80 | { 81 | "class": "CurvePoint", 82 | "bytes": "026cb94f302809d19aacc81da19f2156db9c610498310b930d7787d8a2366dadd3" 83 | }, 84 | { 85 | "class": "CurvePoint", 86 | "bytes": "03711b30de53e38ba240d34e796e09b8eabe11c385a02a6f87eb8512e1c3fff690" 87 | } 88 | ], 89 | "output": "fb39d851bd5f661406a4f2101b18023aadba39b966f605402179d95c07696f16" 90 | } 91 | ] 92 | } -------------------------------------------------------------------------------- /vectors/vectors_scalar_operations.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Test vectors for CurveScalar operations", 3 | "params": "default", 4 | "first operand": "0a686fbc44fdc4713b7901c88fe416cc9b187501e671a4c70781742f3bcef1ec", 5 | "second operand": "6af63df254cc9dd5728e098ffe9ad3cc1e0252b2e3562522907cab1a41656895", 6 | "vectors": [ 7 | { 8 | "operation": "Addition", 9 | "result": "755eadae99ca6246ae070b588e7eea98b91ac7b4c9c7c9e997fe1f497d345a81" 10 | }, 11 | { 12 | "operation": "Subtraction", 13 | "result": "9f7231c9f031269bc8eaf838914942ff37c4ff35b2641fe036d727a1ca9fca98" 14 | }, 15 | { 16 | "operation": "Multiplication", 17 | "result": "a0bc896003f0e4feca2176f978b2cfa99ca73af19bf38782064bc137d9f00169" 18 | }, 19 | { 20 | "operation": "Inverse", 21 | "result": "e84f604508a66bfa6df07529238588040aee19b38c68330c031715f478426873" 22 | } 23 | ] 24 | } -------------------------------------------------------------------------------- /vectors/vectors_unsafe_hash_to_point.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Test vectors for unsafe_hash_to_point()", 3 | "params": "default", 4 | "vectors": [ 5 | { 6 | "data": "", 7 | "dst": "", 8 | "point": "0215ec7bf0b50732b49f8228e07d24365338f9e3ab994b00af08e5a3bffe55fd8b" 9 | }, 10 | { 11 | "data": "", 12 | "dst": "616263", 13 | "point": "0297427e8f434c897d66d7ad40b51e0a11f8bdfed31e724ca4ac86c14fb5e2668f" 14 | }, 15 | { 16 | "data": "", 17 | "dst": "4e75437970686572", 18 | "point": "025aa967c88c73854f4f4d9286a7ce4292898dfa42c7593b75cf823cd1b6dada96" 19 | }, 20 | { 21 | "data": "", 22 | "dst": "4e75637970686572", 23 | "point": "02f3fcae4fb3596fbb34feaf3e2fff938b177b55d89c66f728e51fef220d9b702f" 24 | }, 25 | { 26 | "data": "616263", 27 | "dst": "", 28 | "point": "0204c19746f60b6c4abbce9dbe31f2e0df9b22d8130cc0844cbf67db154d944db3" 29 | }, 30 | { 31 | "data": "616263", 32 | "dst": "616263", 33 | "point": "024c70ca862edba77a8265ee46e0137729826a79721855888bf7791feea42b9990" 34 | }, 35 | { 36 | "data": "616263", 37 | "dst": "4e75437970686572", 38 | "point": "02357334755ceedaef03cb81b6dbbebd8399e0cf40a122a586069ae241e34fc869" 39 | }, 40 | { 41 | "data": "616263", 42 | "dst": "4e75637970686572", 43 | "point": "0256ec5dbf81d55fbdad3c2095177982a068bb0043dd2cf2834cc6a53e538157bf" 44 | }, 45 | { 46 | "data": "4e75437970686572", 47 | "dst": "", 48 | "point": "02b0cd14ef08638d57804c768d3b0a171461268f6faede586751f2919bdd7490b6" 49 | }, 50 | { 51 | "data": "4e75437970686572", 52 | "dst": "616263", 53 | "point": "02a1c3e1c00f45a059fcf7749e31c5206388aa72bcc7c10195907e9c70c2a0a700" 54 | }, 55 | { 56 | "data": "4e75437970686572", 57 | "dst": "4e75437970686572", 58 | "point": "02100c656eed3ed2e175e5430bbd644ac86f24fa69fc1c5b3fd65ece562b480764" 59 | }, 60 | { 61 | "data": "4e75437970686572", 62 | "dst": "4e75637970686572", 63 | "point": "0227a46bc66817fcaa803535a1c109674d300de5df0d8d11f6588325cf6cedf2b1" 64 | }, 65 | { 66 | "data": "4e75637970686572", 67 | "dst": "", 68 | "point": "02483315691815818fa1f1804406fc4246940cc8cb39405401e2aa5fd8d94bfa64" 69 | }, 70 | { 71 | "data": "4e75637970686572", 72 | "dst": "616263", 73 | "point": "02dc01829e4725f8cacf6990c12ab0a5f837770b21e41bdd9964bb0f1ad52fcc31" 74 | }, 75 | { 76 | "data": "4e75637970686572", 77 | "dst": "4e75437970686572", 78 | "point": "02b6653e2ed79579380104598cf83fc2b119dd8b91afae2a2a8077ffdca0b212ad" 79 | }, 80 | { 81 | "data": "4e75637970686572", 82 | "dst": "4e75637970686572", 83 | "point": "02192de02d9c15a52d90ef7192794a2fc925c09f7dcdb4b584b8c7fab33bbda1df" 84 | } 85 | ] 86 | } --------------------------------------------------------------------------------