├── .cargo └── audit.toml ├── .github ├── scripts │ └── cargo.sh └── workflows │ ├── benches.yml │ └── ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE.md ├── README.md ├── benches ├── BENCHMARKS_classic.md ├── BENCHMARKS_hybridized.md ├── benches.rs └── generate.sh ├── bib └── CoverCrypt.pdf ├── deny.toml ├── examples ├── decrypt.rs ├── encrypt.rs └── runme.rs └── src ├── abe_policy ├── access_policy.rs ├── access_structure.rs ├── attribute.rs ├── dimension.rs ├── mod.rs ├── rights.rs └── tests.rs ├── ae.rs ├── api.rs ├── core ├── kem.rs ├── kem │ └── mlkem.rs ├── mod.rs ├── nike.rs ├── nike │ ├── p256.rs │ └── r25519.rs ├── primitives.rs ├── serialization │ └── mod.rs └── tests.rs ├── data_struct ├── README.md ├── dictionary.rs ├── error.rs ├── mod.rs ├── revision_map.rs └── revision_vec.rs ├── encrypted_header.rs ├── error.rs ├── lib.rs ├── test_utils ├── mod.rs ├── non_regression.rs └── tests_data │ ├── legacy_policy.json │ ├── non_regression_vector.json │ ├── policy_v1.json │ └── policy_v2.json └── traits.rs /.cargo/audit.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | # Waiting for https://github.com/Argyle-Software/kyber/pull/110 3 | ignore = [ 4 | "RUSTSEC-2023-0079", # pqc-kyber 5 | ] 6 | -------------------------------------------------------------------------------- /.github/scripts/cargo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | cargo fmt --check 6 | 7 | cargo clippy --no-deps --all-targets -- -D warnings 8 | 9 | cargo install --locked cargo-deny && cargo deny check 10 | 11 | cargo clippy --no-deps --all-targets --no-default-features --features curve25519,mlkem-768 -- -D warnings 12 | cargo clippy --no-deps --all-targets --no-default-features --features p-256,mlkem-512 -- -D warnings 13 | cargo clippy --no-deps --all-targets --no-default-features --features p-256,mlkem-768 -- -D warnings 14 | 15 | cargo test 16 | cargo test --no-default-features --features curve25519,mlkem-768 17 | cargo test --no-default-features --features p-256,mlkem-512 18 | cargo test --no-default-features --features p-256,mlkem-768 19 | -------------------------------------------------------------------------------- /.github/workflows/benches.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Benches on demand 3 | 4 | # on: push 5 | on: workflow_dispatch 6 | 7 | jobs: 8 | bench-classic: 9 | uses: Cosmian/reusable_workflows/.github/workflows/cargo-bench.yml@develop 10 | with: 11 | toolchain: stable 12 | features: test-utils 13 | force: true 14 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI checks 3 | 4 | on: push 5 | 6 | jobs: 7 | cargo-test: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v1 11 | - uses: dtolnay/rust-toolchain@master 12 | with: 13 | toolchain: stable 14 | components: rustfmt, clippy 15 | - name: Run test script 16 | run: | 17 | bash .github/scripts/cargo.sh 18 | cargo-publish: 19 | needs: 20 | - cargo-test 21 | uses: Cosmian/reusable_workflows/.github/workflows/cargo-publish.yml@develop 22 | if: startsWith(github.ref, 'refs/tags/') 23 | with: 24 | toolchain: stable 25 | secrets: inherit 26 | cleanup: 27 | needs: 28 | - cargo-test 29 | uses: Cosmian/reusable_workflows/.github/workflows/cleanup_cache.yml@develop 30 | secrets: inherit 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target* 2 | .cargo_check 3 | *nix* 4 | /*.sh 5 | /.vscode 6 | Cargo.lock 7 | **/.#* 8 | **/#*# 9 | **/*~ 10 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Pre-requisites: 3 | # pip install pre-commit conventional-pre-commit 4 | # pre-commit install 5 | # pre-commit install --install-hooks -t commit-msg 6 | # pre-commit autoupdate 7 | 8 | # See https://pre-commit.com for more information 9 | # See https://pre-commit.com/hooks.html for more hooks 10 | exclude: datasets|tests_data 11 | repos: 12 | - repo: https://github.com/compilerla/conventional-pre-commit 13 | rev: v2.1.1 14 | hooks: 15 | - id: conventional-pre-commit 16 | stages: [commit-msg] 17 | args: [] # optional: list of Conventional Commits types to allow e.g. [feat, fix, ci, chore, test] 18 | 19 | - repo: https://github.com/pre-commit/mirrors-prettier 20 | rev: v3.0.0-alpha.4 21 | hooks: 22 | - id: prettier 23 | stages: [commit] 24 | exclude_types: 25 | - yaml 26 | - markdown 27 | 28 | - repo: https://github.com/igorshubovych/markdownlint-cli 29 | rev: v0.33.0 30 | hooks: 31 | - id: markdownlint-fix 32 | args: [--disable=MD004, --disable=MD013, --disable=MD024, --disable=MD041] 33 | 34 | - repo: https://github.com/Lucas-C/pre-commit-hooks-nodejs 35 | rev: v1.1.2 36 | hooks: 37 | - id: htmlhint 38 | - id: dockerfile_lint 39 | - id: markdown-toc 40 | 41 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 42 | rev: 0.2.2 43 | hooks: 44 | - id: yamlfmt 45 | args: [--mapping, '2', --sequence, '4', --offset, '2'] 46 | 47 | - repo: https://github.com/crate-ci/typos 48 | rev: typos-dict-v0.9.14 49 | hooks: 50 | - id: typos 51 | 52 | - repo: https://github.com/Lucas-C/pre-commit-hooks 53 | rev: v1.4.1 54 | hooks: 55 | - id: forbid-crlf 56 | - id: remove-crlf 57 | - id: forbid-tabs 58 | exclude: ^.git/ 59 | - id: remove-tabs 60 | exclude: ^.git/ 61 | 62 | - repo: https://github.com/sirosen/texthooks 63 | rev: 0.4.0 64 | hooks: 65 | - id: fix-smartquotes 66 | - id: fix-ligatures 67 | - id: fix-spaces 68 | - id: forbid-bidi-controls 69 | - id: macro-expand 70 | 71 | - repo: https://github.com/jumanjihouse/pre-commit-hooks 72 | rev: 3.0.0 73 | hooks: 74 | - id: git-check 75 | - id: shellcheck 76 | 77 | - repo: https://github.com/doublify/pre-commit-rust 78 | rev: v1.0 79 | hooks: 80 | - id: cargo-check 81 | 82 | - repo: https://github.com/Cosmian/git-hooks.git 83 | rev: v1.0.36 84 | hooks: 85 | - id: stable-cargo-format 86 | # - id: dprint-toml-fix 87 | # - id: cargo-upgrade 88 | # - id: cargo-update 89 | - id: cargo-machete 90 | - id: cargo-test 91 | - id: cargo-test-doc 92 | - id: clippy-autofix-all-targets 93 | - id: clippy-all-targets 94 | - id: stable-cargo-format 95 | - id: cargo-dry-publish 96 | args: [--allow-dirty] 97 | 98 | - repo: https://github.com/pre-commit/pre-commit-hooks 99 | rev: v4.4.0 100 | hooks: 101 | - id: check-added-large-files 102 | - id: check-ast 103 | - id: check-byte-order-marker 104 | - id: check-builtin-literals 105 | - id: check-case-conflict 106 | - id: check-docstring-first 107 | - id: check-json 108 | - id: check-merge-conflict 109 | - id: check-symlinks 110 | - id: check-toml 111 | - id: check-vcs-permalinks 112 | - id: check-xml 113 | - id: check-yaml 114 | - id: debug-statements 115 | - id: destroyed-symlinks 116 | - id: detect-private-key 117 | - id: double-quote-string-fixer 118 | - id: end-of-file-fixer 119 | - id: file-contents-sorter 120 | - id: fix-byte-order-marker 121 | - id: fix-encoding-pragma 122 | - id: mixed-line-ending 123 | args: [--fix=lf] 124 | - id: name-tests-test 125 | - id: requirements-txt-fixer 126 | - id: sort-simple-yaml 127 | - id: trailing-whitespace 128 | 129 | - repo: https://github.com/psf/black 130 | rev: 22.12.0 131 | hooks: 132 | - id: black 133 | # avoid clash with `double-quote-string-fixer` 134 | args: [--skip-string-normalization] 135 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cosmian_cover_crypt" 3 | version = "15.0.0" 4 | authors = [ 5 | "Théophile Brezot ", 6 | "Bruno Grieder ", 7 | "Hugo Rosenkranz-Costa ", 8 | "Emmanuel COSTE ", 9 | ] 10 | documentation = "https://docs.rs/cosmian_cover_crypt/" 11 | edition = "2021" 12 | license = "BUSL-1.1" 13 | repository = "https://github.com/Cosmian/cover_crypt" 14 | description = "Key Policy attribute encryption based on subset cover" 15 | 16 | [lib] 17 | crate-type = ["lib", "cdylib", "staticlib"] 18 | name = "cosmian_cover_crypt" 19 | 20 | # The cdylib is only interesting if the `--features ffi` flag is set on build 21 | # This does not seem to be actionable conditionally https://github.com/rust-lang/cargo/issues/4881 22 | 23 | [[bench]] 24 | name = "benches" 25 | harness = false 26 | required-features = ["test-utils"] 27 | 28 | [[example]] 29 | name = "encrypt" 30 | required-features = ["test-utils"] 31 | 32 | [[example]] 33 | name = "decrypt" 34 | required-features = ["test-utils"] 35 | 36 | [[example]] 37 | name = "runme" 38 | required-features = ["test-utils"] 39 | 40 | [features] 41 | default = ["mlkem-512", "curve25519"] 42 | mlkem-512 = [] 43 | mlkem-768 = [] 44 | p-256 = ["elliptic-curve", "p256", "subtle"] 45 | curve25519 = ["cosmian_crypto_core/curve25519"] 46 | test-utils = [] 47 | 48 | [dependencies] 49 | cosmian_crypto_core = { version = "10.0.1", default-features = false, features = [ 50 | "ser", 51 | "sha3", 52 | "aes", 53 | ] } 54 | elliptic-curve = { version = "0.13.8", optional = true } 55 | ml-kem = { version = "0.2.1", features = ["zeroize"] } 56 | p256 = { version = "0.13.2", optional = true } 57 | serde = { version = "1.0", features = ["derive"] } 58 | serde_json = { version = "1.0", features = ["preserve_order"] } 59 | subtle = { version = "2.6.1", optional = true } 60 | tiny-keccak = { version = "2.0.2", features = ["kmac", "sha3"] } 61 | zeroize = "1.6.0" 62 | 63 | [dev-dependencies] 64 | base64 = { version = "0.21.0" } 65 | criterion = { version = "0.5", features = [ 66 | "html_reports", 67 | ], default-features = false } 68 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. 2 | "Business Source License" is a trademark of MariaDB Corporation Ab. 3 | 4 | Parameters 5 | 6 | Licensor: Cosmian Tech SAS. 7 | Licensed Work: Cosmian KMS version 4.11.3 or later. 8 | The Licensed Work is (c) 2024 Cosmian Tech SAS. 9 | Additional Use Grant: You may use the Licensed Work in production, provided 10 | your total use of does not exceed a total of 4 vCPUS on virtual 11 | machines or 2 physical cores on bare metal machines. This use 12 | does not include offering the Licensed Work to third parties. 13 | Change Date: Four years from the date the Licensed Work is published. 14 | Change License: MPL 2.0 15 | 16 | For information about alternative licensing arrangements for the Licensed Work, 17 | please contact contact@cosmian.com. 18 | 19 | Notice 20 | 21 | Business Source License 1.1 22 | 23 | Terms 24 | 25 | The Licensor hereby grants you the right to copy, modify, create derivative 26 | works, redistribute, and make non-production use of the Licensed Work. The 27 | Licensor may make an Additional Use Grant, above, permitting limited production use. 28 | 29 | Effective on the Change Date, or the fourth anniversary of the first publicly 30 | available distribution of a specific version of the Licensed Work under this 31 | License, whichever comes first, the Licensor hereby grants you rights under 32 | the terms of the Change License, and the rights granted in the paragraph 33 | above terminate. 34 | 35 | If your use of the Licensed Work does not comply with the requirements 36 | currently in effect as described in this License, you must purchase a 37 | commercial license from the Licensor, its affiliated entities, or authorized 38 | resellers, or you must refrain from using the Licensed Work. 39 | 40 | All copies of the original and modified Licensed Work, and derivative works 41 | of the Licensed Work, are subject to this License. This License applies 42 | separately for each version of the Licensed Work and the Change Date may vary 43 | for each version of the Licensed Work released by Licensor. 44 | 45 | You must conspicuously display this License on each original or modified copy 46 | of the Licensed Work. If you receive the Licensed Work in original or 47 | modified form from a third party, the terms and conditions set forth in this 48 | License apply to your use of that work. 49 | 50 | Any use of the Licensed Work in violation of this License will automatically 51 | terminate your rights under this License for the current and all other 52 | versions of the Licensed Work. 53 | 54 | This License does not grant you any right in any trademark or logo of 55 | Licensor or its affiliates (provided that you may use a trademark or logo of 56 | Licensor as expressly required by this License). 57 | 58 | TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON 59 | AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, 60 | EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF 61 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND 62 | TITLE. 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Covercrypt 2 | 3 | ![Build status](https://github.com/Cosmian/cover_crypt/actions/workflows/ci.yml/badge.svg) 4 | ![latest version](https://img.shields.io/crates/v/cosmian_cover_crypt.svg) 5 | 6 | Implementation of the [CoverCrypt](bib/CoverCrypt.pdf) algorithm which allows 7 | creating ciphertexts for a set of attributes and issuing user keys with access 8 | policies over these attributes. 9 | 10 | 11 | 12 | - [Getting started](#getting-started) 13 | - [Building and testing](#building-and-testing) 14 | - [Features](#features) 15 | * [Key generation](#key-generation) 16 | * [Policies and coordinates](#policies-and-coordinates) 17 | * [Serialization](#serialization) 18 | * [Symmetric key encapsulation](#symmetric-key-encapsulation) 19 | * [Secret key decapsulation](#secret-key-decapsulation) 20 | - [Benchmarks](#benchmarks) 21 | - [Documentation](#documentation) 22 | 23 | 24 | 25 | ## Getting started 26 | 27 | See [`examples/runme.rs`](./examples/runme.rs) for a code sample that 28 | introduces the main CoverCrypt functionalities. It can be run using 29 | `cargo run --example runme`. 30 | 31 | ## Building and testing 32 | 33 | To build the core only, run: 34 | 35 | ```bash 36 | cargo build --release 37 | ``` 38 | 39 | To build everything: 40 | 41 | ```bash 42 | cargo build --release --all-features 43 | ``` 44 | 45 | The code contains numerous tests that you can run using: 46 | 47 | ```bash 48 | cargo test --release --all-features 49 | ``` 50 | 51 | Benchmarks can be run using (one can pass any feature flag): 52 | 53 | ```bash 54 | bash ./benches/generate.sh 55 | ``` 56 | 57 | ## Features 58 | 59 | In CoverCrypt, messages are encrypted using a symmetric scheme. The right 60 | management is performed by a novel asymmetric scheme used to encapsulate a 61 | symmetric key for a set of attributes. This encapsulation is stored in an 62 | object called encrypted header, along with the symmetric ciphertext. 63 | 64 | This design brings several advantages: 65 | 66 | - the central authority has a unique key to protect (the master secret key); 67 | - encapsulation can be performed without the need to store any sensitive 68 | information (public cryptography); 69 | - encryption is as fast as symmetric schemes can be. 70 | 71 | CoverCrypt encryption is post-quantum secure (with a post-quantum security 72 | level of 128 bits): 73 | 74 | - all encapsulations can be hybridized using INDCPA-KYBER, the INDCPA (a 75 | security level) version of the NIST standard for the post-quantum KEM, 76 | [Kyber](https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=8406610); 77 | the formal proof of the security can be found in the [CoverCrypt 78 | paper](#documentation). 79 | - the actual data is encrypted using AES-GCM with a 256-bit key. 80 | 81 | The CoverCrypt scheme also ensures that: 82 | 83 | - user secret keys are unique; 84 | - user secret keys are traceable (under some assumption, cf 85 | [CoverCrypt paper](#documentation)). 86 | 87 | ### Key generation 88 | 89 | Asymmetric keys must be generated beforehand. This is the role of a central 90 | authority, which is in charge of: 91 | 92 | - generating and updating the master keys according to the right policy; 93 | - generate and update user secret keys according to its rights. 94 | 95 | The CoverCrypt API exposes 4 functions: 96 | 97 | - `CoverCrypt::generate_master_keys`: generate master keys 98 | - `CoverCrypt::update_master_keys`: update the master keys 99 | - `CoverCrypt::generate_user_secret_key`: create a user secret key 100 | - `CoverCrypt::refresh_user_secret_key`: update a user secret key 101 | 102 | The key generations may be long if the policy contains many rights or if there 103 | are many users. But this is usually run once at setup. Key update and refresh 104 | stay fast if the changes are small. 105 | 106 | ### Policies and coordinates 107 | 108 | CoverCrypt is an attribute-based encryption algorithm. This means that an 109 | encrypted header produced for the attributes `France` and `Top Secret` can only 110 | be decrypted by the user holding a key corresponding to these attributes. 111 | 112 | In order to transform this high-level view into encapsulations, the following 113 | objects are defined: 114 | 115 | - **policy**: defines all possible rights; a policy is built from a set of 116 | axes which are composed of sets of attributes. 117 | - **encryption policy**: subset of the policy used to encrypt; an encryption 118 | policy is expressed as a boolean expression of attributes. 119 | - **user policy**: subset of the policy for which a user key enables 120 | decryption; a user policy is expressed as a boolean expression of attributes. 121 | - **coordinate**: combination of one attribute from each policy axis. 122 | 123 | When generating the master keys, the global policy is converted into the set of 124 | all possible coordinates and a keypair is generated for each one of these 125 | coordinates. The master public key holds all the public key of all these 126 | keypairs and the master secret key holds the secret key of all these keypairs. 127 | 128 | When encrypting for a given encryption policy, this policy is converted into a 129 | set of coordinates. Then, one key encapsulation is generated per coordinate using 130 | the corresponding public sub-key in the master public key. 131 | 132 | Similarly, when generating a user secret key for a given user policy, this 133 | policy is converted into the set of corresponding coordinates and the user 134 | receives the secret sub-key associated to each coordinates. 135 | 136 | **Example**: the following policy is composed of two axes. The `Security` axis 137 | composed of three attributes and the `Country` axis composed of 4 attributes. 138 | 139 | ```txt 140 | Policy: { 141 | Security: { // <- first axis 142 | None, 143 | Medium, 144 | High 145 | }, 146 | Country: { // <- second axis 147 | France, 148 | Germany, 149 | UK, 150 | Spain 151 | } 152 | } 153 | ``` 154 | 155 | The encryption policy `Security::Medium && ( Country::France || 156 | Country::Spain)` would be converted into two coordinates. The encryption policy 157 | `Security::High` would be expanded into `Security::High && (Country::France || 158 | ... || Country::Spain)` then converted into 4 coordinates. 159 | 160 | ### Serialization 161 | 162 | The size of the serialized keys and encapsulation is given by the following formulas: 163 | 164 | - master secret key: 165 | $$3 \cdot L_{sk} + \texttt{LEB128sizeof}(|\mathcal{P}|) + \sum\limits_{p~\in~\mathcal{P}} \left( \texttt{LEB128sizeof}(\texttt{sizeof}(p)) + \texttt{sizeof}(p) + 1 + L_{sk} + \delta_{p,~h} \cdot L_{sk}^{pq}\right)$$ 166 | - public key: 167 | $$2 \cdot L_{pk} + \texttt{LEB128sizeof}(|\mathcal{P}|) + \sum\limits_{p~\in~\mathcal{P}} \left( \texttt{LEB128sizeof}(\texttt{sizeof}(p)) + \texttt{sizeof}(p) + 1 + L_{pk} + \delta_{p,~h} \cdot L_{pk}^{pq}\right)$$ 168 | - user secret key: 169 | $$2 \cdot L_{sk} + \texttt{LEB128sizeof}(n_{p}) + \sum\limits_{p~\in~coordinates} \left( 1 + L_{sk} + \delta_{p,~h} \cdot L_{sk}^{pq}\right)$$ 170 | - encapsulation: 171 | $$2 \cdot L_{pk} + T + \texttt{LEB128sizeof}(n_{p}) + \sum\limits_{p~\in~coordinates} \left(1 + \delta_{p,~c} \cdot L_{pk} + \delta_{p,~h} \cdot L_c^{pq}\right)$$ 172 | - encrypted header (encapsulation and symmetrically encrypted metadata): 173 | $$\texttt{sizeof}(encapsulation) + \texttt{LEB128sizeof} \left(C_{overhead} + \texttt{sizeof}(metadata)\right) + C_{overhead} + \texttt{sizeof}(metadata)$$ 174 | 175 | where: 176 | 177 | - $|\mathcal{P}|$ is the number of coordinates related to the encryption policy 178 | - $\delta_{p,~c} = 1$ if $p$ is a classic coordinate, 0 otherwise 179 | - $\delta_{p,~h} = 1 - \delta_{p,~c}$ (i.e. 1 if $p$ is a hybridized coordinate, 180 | 0 otherwise) 181 | - $\texttt{sizeof}: n \rightarrow$ size of $n$ in bytes 182 | - $\texttt{LEB128sizeof}: n \rightarrow \left\lceil \frac{8 \cdot \texttt{sizeof}(n)}{7}\right\rceil$ 183 | 184 | **NOTE**: For our implementation `CoverCryptX25519Aes256`: 185 | 186 | - Curve25519 public key length: $L_{pk} = 32~\textnormal{bytes}$ (compressed 187 | Ristretto representation) 188 | - Curve25519 secret key length: $L_{sk} = 32~\textnormal{bytes}$ 189 | - INDCPA-Kyber public key length: $L_{pk}^{pq} = 1184$ 190 | - INDCPA-Kyber secret key length: $L_{sk}^{pq} = 1152$ 191 | - INDCPA-Kyber ciphertext length: $L_c^{pq} = 1088$ 192 | - EAKEM tag length: $T = 16~\textnormal{bytes}$ 193 | - Symmetric encryption overhead: $C_{overhead} = 28~\textnormal{bytes}$ (16 bytes for the MAC tag and 12 bytes for the nonce) 194 | 195 | ### Symmetric key encapsulation 196 | 197 | This is the core of the CoverCrypt scheme. It allows creating a symmetric key 198 | and its encapsulation for a given set of rights. 199 | 200 | To ease the management of the encapsulations, an object `EncryptedHeader`is 201 | provided in the API. An encrypted header holds an encapsulation and a symmetric 202 | ciphertext of an optional additional data. This additional data can be useful 203 | to store metadata. 204 | 205 | Classic implementation sizes: 206 | 207 | | Nb. of coordinates | Encapsulation size (in bytes) | User decryption key size (in bytes) | 208 | |-------------------|-------------------------------|-------------------------------------| 209 | | 1 | 130 | 98 | 210 | | 2 | 163 | 131 | 211 | | 3 | 196 | 164 | 212 | | 4 | 229 | 197 | 213 | | 5 | 262 | 230 | 214 | 215 | Post-quantum implementation sizes: 216 | 217 | | Nb. of coordinates | Encapsulation size (in bytes) | User decryption key size (in bytes) | 218 | |-------------------|-------------------------------|-------------------------------------| 219 | | 1 | 1186 | 1250 | 220 | | 2 | 2275 | 2435 | 221 | | 3 | 3364 | 3620 | 222 | | 4 | 4453 | 4805 | 223 | | 5 | 5542 | 5990 | 224 | 225 | **Note**: encapsulations grow bigger with the size of the target set of rights 226 | and so does the encapsulation time. 227 | 228 | ### Secret key decapsulation 229 | 230 | A user can retrieve the symmetric key needed to decrypt a CoverCrypt ciphertext 231 | by decrypting the associated `EncryptedHeader`. This is only possible if the 232 | user secret keys contains the appropriate rights. 233 | 234 | ## Benchmarks 235 | 236 | The benchmarks presented in this section are run on a Intel(R) Xeon(R) Platinum 8171M CPU @ 2.60GHz. 237 | 238 | - [CoverCrypt classic implementation](./benches/BENCHMARKS_classic.md) 239 | - [CoverCrypt post-quantum implementation](./benches/BENCHMARKS_hybridized.md) 240 | 241 | ## Documentation 242 | 243 | A formal description and proof of the CoverCrypt scheme is given in [this paper](./bib/CoverCrypt.pdf). 244 | It also contains an interesting discussion about the implementation. 245 | 246 | The developer documentation can be found on [doc.rs](https://docs.rs/cosmian_cover_crypt/latest/cosmian_cover_crypt/index.html) 247 | -------------------------------------------------------------------------------- /benches/BENCHMARKS_classic.md: -------------------------------------------------------------------------------- 1 | # Benchmarks for Covercrypt: classic implementation (pre-quantum) 2 | 3 | ## Table of Contents 4 | 5 | - [Overview](#overview) 6 | - [Benchmark Results](#benchmark-results) 7 | - [Header encryption](#header-encryption) 8 | - [Header encryption and decryption](#header-encryption-and-decryption) 9 | - [Key serialization](#key-serialization) 10 | - [Header serialization](#header-serialization) 11 | 12 | ## Overview 13 | 14 | This is a benchmark comparison report. 15 | 16 | ## Benchmark Results 17 | 18 | ### Header encryption 19 | 20 | | | `1 partition(s), 1 access` | `2 partition(s), 1 access` | `3 partition(s), 1 access` | `4 partition(s), 1 access` | `5 partition(s), 1 access` | 21 | |:-------|:------------------------------------|:------------------------------------|:------------------------------------|:------------------------------------|:------------------------------------ | 22 | | | `133.32 us` (**1.00x**) | `182.22 us` (*1.37x slower*) | `230.37 us` (*1.73x slower*) | `278.18 us` (*2.09x slower*) | `335.55 us` (*2.52x slower*) | 23 | 24 | ### Header encryption and decryption 25 | 26 | | | `ciphertexts with 1 partition(s), usk with 1 partitions` | `ciphertexts with 2 partition(s), usk with 1 partitions` | `ciphertexts with 3 partition(s), usk with 1 partitions` | `ciphertexts with 4 partition(s), usk with 1 partitions` | `ciphertexts with 5 partition(s), usk with 1 partitions` | `ciphertexts with 1 partition(s), usk with 2 partitions` | `ciphertexts with 2 partition(s), usk with 2 partitions` | `ciphertexts with 3 partition(s), usk with 2 partitions` | `ciphertexts with 4 partition(s), usk with 2 partitions` | `ciphertexts with 5 partition(s), usk with 2 partitions` | `ciphertexts with 1 partition(s), usk with 3 partitions` | `ciphertexts with 2 partition(s), usk with 3 partitions` | `ciphertexts with 3 partition(s), usk with 3 partitions` | `ciphertexts with 4 partition(s), usk with 3 partitions` | `ciphertexts with 5 partition(s), usk with 3 partitions` | `ciphertexts with 1 partition(s), usk with 4 partitions` | `ciphertexts with 2 partition(s), usk with 4 partitions` | `ciphertexts with 3 partition(s), usk with 4 partitions` | `ciphertexts with 4 partition(s), usk with 4 partitions` | `ciphertexts with 5 partition(s), usk with 4 partitions` | `ciphertexts with 1 partition(s), usk with 5 partitions` | `ciphertexts with 2 partition(s), usk with 5 partitions` | `ciphertexts with 3 partition(s), usk with 5 partitions` | `ciphertexts with 4 partition(s), usk with 5 partitions` | `ciphertexts with 5 partition(s), usk with 5 partitions` | 27 | |:-------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------ | 28 | | | `236.02 us` (**1.00x**) | `293.92 us` (*1.25x slower*) | `356.80 us` (*1.51x slower*) | `419.65 us` (*1.78x slower*) | `480.53 us` (*2.04x slower*) | `229.46 us` (**1.03x faster**) | `310.51 us` (*1.32x slower*) | `406.11 us` (*1.72x slower*) | `484.32 us` (*2.05x slower*) | `565.05 us` (*2.39x slower*) | `227.64 us` (**1.04x faster**) | `339.10 us` (*1.44x slower*) | `450.91 us` (*1.91x slower*) | `543.62 us` (*2.30x slower*) | `645.11 us` (*2.73x slower*) | `312.85 us` (*1.33x slower*) | `446.54 us` (*1.89x slower*) | `572.54 us` (*2.43x slower*) | `689.37 us` (*2.92x slower*) | `810.78 us` (*3.44x slower*) | `231.57 us` (**1.02x faster**) | `375.80 us` (*1.59x slower*) | `527.09 us` (*2.23x slower*) | `661.97 us` (*2.80x slower*) | `814.49 us` (*3.45x slower*) | 29 | 30 | ### Key serialization 31 | 32 | | | `MSK` | `MPK` | `USK 1 partition` | 33 | |:-------|:--------------------------|:----------------------------------|:--------------------------------- | 34 | | | `801.07 ns` (**1.00x**) | `83.76 us` (*104.55x slower*) | `118.50 ns` (**6.76x faster**) | 35 | 36 | ### Header serialization 37 | 38 | | | `1 partition(s)` | `2 partition(s)` | `3 partition(s)` | `4 partition(s)` | `5 partition(s)` | 39 | |:-------|:--------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------- | 40 | | | `10.27 us` (**1.00x**) | `10.25 us` (**1.00x faster**) | `10.40 us` (**1.01x slower**) | `10.41 us` (**1.01x slower**) | `10.53 us` (**1.02x slower**) | 41 | 42 | --- 43 | Made with [criterion-table](https://github.com/nu11ptr/criterion-table) 44 | -------------------------------------------------------------------------------- /benches/BENCHMARKS_hybridized.md: -------------------------------------------------------------------------------- 1 | # Benchmarks for Covercrypt post-quantum implementation 2 | 3 | ## Table of Contents 4 | 5 | - [Overview](#overview) 6 | - [Benchmark Results](#benchmark-results) 7 | - [Header encryption](#header-encryption) 8 | - [Header encryption and decryption](#header-encryption-and-decryption) 9 | - [Key serialization](#key-serialization) 10 | - [Header serialization](#header-serialization) 11 | 12 | ## Overview 13 | 14 | This is a benchmark comparison report. 15 | 16 | ## Benchmark Results 17 | 18 | ### Header encryption 19 | 20 | | | `1 partition(s), 1 access` | `2 partition(s), 1 access` | `3 partition(s), 1 access` | `4 partition(s), 1 access` | `5 partition(s), 1 access` | 21 | |:-------|:------------------------------------|:------------------------------------|:------------------------------------|:------------------------------------|:------------------------------------ | 22 | | | `176.93 us` (**1.00x**) | `266.21 us` (*1.50x slower*) | `369.05 us` (*2.09x slower*) | `466.62 us` (*2.64x slower*) | `569.79 us` (*3.22x slower*) | 23 | 24 | ### Header encryption and decryption 25 | 26 | | | `ciphertexts with 1 partition(s), usk with 1 partitions` | `ciphertexts with 2 partition(s), usk with 1 partitions` | `ciphertexts with 3 partition(s), usk with 1 partitions` | `ciphertexts with 4 partition(s), usk with 1 partitions` | `ciphertexts with 5 partition(s), usk with 1 partitions` | `ciphertexts with 1 partition(s), usk with 2 partitions` | `ciphertexts with 2 partition(s), usk with 2 partitions` | `ciphertexts with 3 partition(s), usk with 2 partitions` | `ciphertexts with 4 partition(s), usk with 2 partitions` | `ciphertexts with 5 partition(s), usk with 2 partitions` | `ciphertexts with 1 partition(s), usk with 3 partitions` | `ciphertexts with 2 partition(s), usk with 3 partitions` | `ciphertexts with 3 partition(s), usk with 3 partitions` | `ciphertexts with 4 partition(s), usk with 3 partitions` | `ciphertexts with 5 partition(s), usk with 3 partitions` | `ciphertexts with 1 partition(s), usk with 4 partitions` | `ciphertexts with 2 partition(s), usk with 4 partitions` | `ciphertexts with 3 partition(s), usk with 4 partitions` | `ciphertexts with 4 partition(s), usk with 4 partitions` | `ciphertexts with 5 partition(s), usk with 4 partitions` | `ciphertexts with 1 partition(s), usk with 5 partitions` | `ciphertexts with 2 partition(s), usk with 5 partitions` | `ciphertexts with 3 partition(s), usk with 5 partitions` | `ciphertexts with 4 partition(s), usk with 5 partitions` | `ciphertexts with 5 partition(s), usk with 5 partitions` | 27 | |:-------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------|:------------------------------------------------------------------ | 28 | | | `309.26 us` (**1.00x**) | `423.71 us` (*1.37x slower*) | `550.71 us` (*1.78x slower*) | `665.59 us` (*2.15x slower*) | `807.60 us` (*2.61x slower*) | `350.83 us` (*1.13x slower*) | `501.92 us` (*1.62x slower*) | `650.93 us` (*2.10x slower*) | `801.53 us` (*2.59x slower*) | `998.96 us` (*3.23x slower*) | `304.14 us` (**1.02x faster**) | `489.87 us` (*1.58x slower*) | `668.58 us` (*2.16x slower*) | `837.40 us` (*2.71x slower*) | `1.05 ms` (*3.39x slower*) | `404.11 us` (*1.31x slower*) | `616.11 us` (*1.99x slower*) | `827.60 us` (*2.68x slower*) | `1.02 ms` (*3.31x slower*) | `1.22 ms` (*3.94x slower*) | `363.74 us` (*1.18x slower*) | `579.54 us` (*1.87x slower*) | `852.07 us` (*2.76x slower*) | `1.05 ms` (*3.40x slower*) | `1.32 ms` (*4.28x slower*) | 29 | 30 | ### Key serialization 31 | 32 | | | `MSK` | `MPK` | `USK 1 partition` | 33 | |:-------|:-------------------------|:---------------------------------|:---------------------------------- | 34 | | | `12.48 us` (**1.00x**) | `101.83 us` (*8.16x slower*) | `900.00 ns` (**13.87x faster**) | 35 | 36 | ### Header serialization 37 | 38 | | | `1 partition(s)` | `2 partition(s)` | `3 partition(s)` | `4 partition(s)` | `5 partition(s)` | 39 | |:-------|:--------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------- | 40 | | | `10.55 us` (**1.00x**) | `11.62 us` (**1.10x slower**) | `12.03 us` (*1.14x slower*) | `12.91 us` (*1.22x slower*) | `13.27 us` (*1.26x slower*) | 41 | 42 | --- 43 | Made with [criterion-table](https://github.com/nu11ptr/criterion-table) 44 | -------------------------------------------------------------------------------- /benches/benches.rs: -------------------------------------------------------------------------------- 1 | use cosmian_cover_crypt::{api::Covercrypt, cc_keygen, traits::KemAc, AccessPolicy}; 2 | use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; 3 | 4 | const C_ENC_APS: [(&str, usize); 5] = [ 5 | ("SEC::LOW && (DPT::MKG) ", 1), 6 | ("SEC::LOW && (DPT::MKG || DPT::FIN) ", 2), 7 | ("SEC::LOW && (DPT::MKG || DPT::FIN || DPT::DEV) ", 3), 8 | ( 9 | "SEC::LOW && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR) ", 10 | 4, 11 | ), 12 | ( 13 | "SEC::LOW && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR || DPT::RD) ", 14 | 5, 15 | ), 16 | ]; 17 | 18 | const C_USK_APS: [(&str, usize); 5] = [ 19 | ("SEC::LOW && CTR::FR && DPT::MKG", 8), 20 | ("SEC::LOW && CTR::FR && (DPT::MKG || DPT::FIN)", 12), 21 | ( 22 | "SEC::LOW && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV)", 23 | 16, 24 | ), 25 | ( 26 | "SEC::LOW && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR)", 27 | 20, 28 | ), 29 | ( 30 | "SEC::LOW && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR || DPT::RD)", 31 | 24, 32 | ), 33 | ]; 34 | 35 | const H_ENC_APS: [(&str, usize); 5] = [ 36 | ("SEC::TOP && (DPT::MKG) ", 1), 37 | ("SEC::TOP && (DPT::MKG || DPT::FIN) ", 2), 38 | ("SEC::TOP && (DPT::MKG || DPT::FIN || DPT::DEV) ", 3), 39 | ( 40 | "SEC::TOP && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR) ", 41 | 4, 42 | ), 43 | ( 44 | "SEC::TOP && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR || DPT::RD) ", 45 | 5, 46 | ), 47 | ]; 48 | 49 | const H_USK_APS: [(&str, usize); 5] = [ 50 | ("SEC::TOP && CTR::FR && DPT::MKG", 12), 51 | ("SEC::TOP && CTR::FR && (DPT::MKG || DPT::FIN)", 18), 52 | ( 53 | "SEC::TOP && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV)", 54 | 24, 55 | ), 56 | ( 57 | "SEC::TOP && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR)", 58 | 30, 59 | ), 60 | ( 61 | "SEC::TOP && CTR::FR && (DPT::MKG || DPT::FIN || DPT::DEV || DPT::HR || DPT::RD)", 62 | 36, 63 | ), 64 | ]; 65 | 66 | macro_rules! gen_enc { 67 | ($cc:ident, $mpk:ident, $ap:ident, $cnt:ident) => {{ 68 | let (k, enc) = $cc 69 | .encaps(&$mpk, &AccessPolicy::parse($ap).unwrap()) 70 | .unwrap(); 71 | assert_eq!(enc.count(), $cnt); 72 | (k, enc) 73 | }}; 74 | } 75 | 76 | macro_rules! gen_usk { 77 | ($cc:ident, $msk:ident, $ap:ident, $cnt:ident) => {{ 78 | let usk = $cc 79 | .generate_user_secret_key(&mut $msk, &AccessPolicy::parse($ap).unwrap()) 80 | .unwrap(); 81 | assert_eq!(usk.count(), $cnt); 82 | usk 83 | }}; 84 | } 85 | 86 | fn bench_classical_encapsulation(c: &mut Criterion) { 87 | let cc = Covercrypt::default(); 88 | let (_, mpk) = cc_keygen(&cc, true).unwrap(); 89 | 90 | { 91 | let mut group = c.benchmark_group("Classic encapsulation"); 92 | for (enc_ap, cnt_enc) in C_ENC_APS { 93 | let _ = gen_enc!(cc, mpk, enc_ap, cnt_enc); 94 | let eap = AccessPolicy::parse(enc_ap).unwrap(); 95 | group.bench_function(format!("{:?} encs", cnt_enc), |b| { 96 | b.iter(|| cc.encaps(&mpk, &eap).unwrap()) 97 | }); 98 | } 99 | } 100 | } 101 | 102 | fn bench_classical_decapsulation(c: &mut Criterion) { 103 | let cc = Covercrypt::default(); 104 | let (mut msk, mpk) = cc_keygen(&cc, true).unwrap(); 105 | 106 | { 107 | let mut group = c.benchmark_group("Decapsulation"); 108 | for (enc_ap, cnt_enc) in C_ENC_APS { 109 | let eap = AccessPolicy::parse(enc_ap).unwrap(); 110 | for (usk_ap, cnt_secret) in C_USK_APS { 111 | let uap = AccessPolicy::parse(usk_ap).unwrap(); 112 | 113 | let usk = gen_usk!(cc, msk, usk_ap, cnt_secret); 114 | let (k, enc) = gen_enc!(cc, mpk, enc_ap, cnt_enc); 115 | assert_eq!(Some(k), cc.decaps(&usk, &enc).unwrap()); 116 | 117 | group.bench_function( 118 | format!("{:?} encs vs {:?} secrets", cnt_enc, cnt_secret), 119 | |b| { 120 | b.iter_batched( 121 | || { 122 | ( 123 | cc.generate_user_secret_key(&mut msk, &uap).unwrap(), 124 | cc.encaps(&mpk, &eap).unwrap(), 125 | ) 126 | }, 127 | |(usk, (_, enc))| cc.decaps(&usk, &enc).unwrap(), 128 | BatchSize::SmallInput, 129 | ) 130 | }, 131 | ); 132 | } 133 | } 134 | } 135 | } 136 | 137 | fn bench_hybridized_encapsulation(c: &mut Criterion) { 138 | let cc = Covercrypt::default(); 139 | let (_, mpk) = cc_keygen(&cc, true).unwrap(); 140 | 141 | { 142 | let mut group = c.benchmark_group("Hybridized encapsulation"); 143 | for (enc_ap, cnt_enc) in H_ENC_APS { 144 | let eap = AccessPolicy::parse(enc_ap).unwrap(); 145 | let _ = gen_enc!(cc, mpk, enc_ap, cnt_enc); 146 | group.bench_function(format!("{:?} encs", cnt_enc), |b| { 147 | b.iter(|| cc.encaps(&mpk, &eap).unwrap()) 148 | }); 149 | } 150 | } 151 | } 152 | 153 | fn bench_hybridized_decapsulation(c: &mut Criterion) { 154 | let cc = Covercrypt::default(); 155 | let (mut msk, mpk) = cc_keygen(&cc, true).unwrap(); 156 | 157 | { 158 | let mut group = c.benchmark_group("Hybridized Decapsulation"); 159 | for (enc_ap, enc_cnt) in H_ENC_APS { 160 | let eap = AccessPolicy::parse(enc_ap).unwrap(); 161 | for (usk_ap, usk_cnt) in H_USK_APS { 162 | let uap = AccessPolicy::parse(usk_ap).unwrap(); 163 | let usk = gen_usk!(cc, msk, usk_ap, usk_cnt); 164 | let (k, enc) = gen_enc!(cc, mpk, enc_ap, enc_cnt); 165 | assert_eq!(Some(k), cc.decaps(&usk, &enc).unwrap()); 166 | 167 | group.bench_function( 168 | format!("{:?} encapsulations vs {:?} secrets", enc_cnt, usk_cnt), 169 | |b| { 170 | b.iter_batched( 171 | || { 172 | ( 173 | cc.generate_user_secret_key(&mut msk, &uap).unwrap(), 174 | cc.encaps(&mpk, &eap).unwrap(), 175 | ) 176 | }, 177 | |(usk, (_, enc))| cc.decaps(&usk, &enc).unwrap(), 178 | BatchSize::SmallInput, 179 | ) 180 | }, 181 | ); 182 | } 183 | } 184 | } 185 | } 186 | 187 | criterion_group!( 188 | name = benches; 189 | config = Criterion::default().sample_size(5000); 190 | targets = 191 | bench_classical_encapsulation, 192 | bench_classical_decapsulation, 193 | bench_hybridized_encapsulation, 194 | bench_hybridized_decapsulation 195 | ); 196 | 197 | criterion_main!(benches); 198 | -------------------------------------------------------------------------------- /benches/generate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # Usage: bash generate.sh 6 | 7 | if [[ "$1" == "generate" ]] ; then 8 | cargo install cargo-criterion 9 | cargo install criterion-table 10 | 11 | cargo criterion --features "test-utils" --message-format=json | criterion-table >benches/BENCHMARKS.md 12 | fi 13 | 14 | sed -i "s/❌ //g" benches/BENCHMARKS*.md 15 | sed -i "s/🚀 //g" benches/BENCHMARKS*.md 16 | sed -i "s/✅ //g" benches/BENCHMARKS*.md 17 | -------------------------------------------------------------------------------- /bib/CoverCrypt.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cosmian/cover_crypt/089a548d4373dd099a57bb1c5219ad0a4cf25fe4/bib/CoverCrypt.pdf -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # This template contains all of the possible sections and their default values 2 | 3 | # Note that all fields that take a lint level have these possible values: 4 | # * deny - An error will be produced and the check will fail 5 | # * warn - A warning will be produced, but the check will not fail 6 | # * allow - No warning or error will be produced, though in some cases a note 7 | # will be 8 | 9 | # The values provided in this template are the default values that will be used 10 | # when any section or field is not specified in your own configuration 11 | 12 | # Root options 13 | 14 | # The graph table configures how the dependency graph is constructed and thus 15 | # which crates the checks are performed against 16 | [graph] 17 | # If 1 or more target triples (and optionally, target_features) are specified, 18 | # only the specified targets will be checked when running `cargo deny check`. 19 | # This means, if a particular package is only ever used as a target specific 20 | # dependency, such as, for example, the `nix` crate only being used via the 21 | # `target_family = "unix"` configuration, that only having windows targets in 22 | # this list would mean the nix crate, as well as any of its exclusive 23 | # dependencies not shared by any other crates, would be ignored, as the target 24 | # list here is effectively saying which targets you are building for. 25 | targets = [ 26 | 27 | 28 | # The triple can be any string, but only the target triples built in to 29 | # rustc (as of 1.40) can be checked against actual config expressions 30 | # "x86_64-unknown-linux-musl", 31 | # You can also specify which target_features you promise are enabled for a 32 | # particular target. target_features are currently not validated against 33 | # the actual valid features supported by the target architecture. 34 | # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, 35 | ] 36 | # When creating the dependency graph used as the source of truth when checks are 37 | # executed, this field can be used to prune crates from the graph, removing them 38 | # from the view of cargo-deny. This is an extremely heavy hammer, as if a crate 39 | # is pruned from the graph, all of its dependencies will also be pruned unless 40 | # they are connected to another crate in the graph that hasn't been pruned, 41 | # so it should be used with care. The identifiers are [Package ID Specifications] 42 | # (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) 43 | # exclude = [] 44 | # If true, metadata will be collected with `--all-features`. Note that this can't 45 | # be toggled off if true, if you want to conditionally enable `--all-features` it 46 | # is recommended to pass `--all-features` on the cmd line instead 47 | all-features = true 48 | # If true, metadata will be collected with `--no-default-features`. The same 49 | # caveat with `all-features` applies 50 | no-default-features = false 51 | # If set, these feature will be enabled when collecting metadata. If `--features` 52 | # is specified on the cmd line they will take precedence over this option. 53 | # features = [] 54 | 55 | # The output table provides options for how/if diagnostics are outputted 56 | [output] 57 | # When outputting inclusion graphs in diagnostics that include features, this 58 | # option can be used to specify the depth at which feature edges will be added. 59 | # This option is included since the graphs can be quite large and the addition 60 | # of features from the crate(s) to all of the graph roots can be far too verbose. 61 | # This option can be overridden via `--feature-depth` on the cmd line 62 | feature-depth = 1 63 | 64 | # This section is considered when running `cargo deny check advisories` 65 | # More documentation for the advisories section can be found here: 66 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 67 | [advisories] 68 | # The path where the advisory databases are cloned/fetched into 69 | # db-path = "$CARGO_HOME/advisory-dbs" 70 | # The url(s) of the advisory databases to use 71 | # db-urls = ["https://github.com/rustsec/advisory-db"] 72 | # A list of advisory IDs to ignore. Note that ignored advisories will still 73 | # output a note when they are encountered. 74 | ignore = [ 75 | 76 | 77 | # { id = "RUSTSEC-2023-0071", reason = "rsa" }, 78 | # { id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, 79 | # "a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish 80 | # { crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, 81 | ] 82 | # If this is true, then cargo deny will use the git executable to fetch advisory database. 83 | # If this is false, then it uses a built-in git library. 84 | # Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. 85 | # See Git Authentication for more information about setting up git authentication. 86 | # git-fetch-with-cli = true 87 | 88 | # This section is considered when running `cargo deny check licenses` 89 | # More documentation for the licenses section can be found here: 90 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 91 | [licenses] 92 | # List of explicitly allowed licenses 93 | # See https://spdx.org/licenses/ for list of possible licenses 94 | # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. 95 | allow = [ 96 | "BUSL-1.1", 97 | "MIT", 98 | "Apache-2.0", 99 | "Apache-2.0 WITH LLVM-exception", 100 | "BSD-2-Clause", 101 | "BSD-3-Clause", 102 | "CC0-1.0", 103 | "Unicode-3.0", 104 | ] 105 | # The confidence threshold for detecting a license from license text. 106 | # The higher the value, the more closely the license text must be to the 107 | # canonical license text of a valid SPDX license file. 108 | # [possible values: any between 0.0 and 1.0]. 109 | confidence-threshold = 0.8 110 | # Allow 1 or more licenses on a per-crate basis, so that particular licenses 111 | # aren't accepted for every possible crate as with the normal allow list 112 | exceptions = [ 113 | 114 | 115 | # Each entry is the crate and version constraint, and its specific allow 116 | # list 117 | # { allow = ["Zlib"], crate = "adler32" }, 118 | ] 119 | 120 | # Some crates don't have (easily) machine readable licensing information, 121 | # adding a clarification entry for it allows you to manually specify the 122 | # licensing information 123 | [[licenses.clarify]] 124 | # The package spec the clarification applies to 125 | crate = "ring" 126 | # The SPDX expression for the license requirements of the crate 127 | expression = "MIT AND ISC AND OpenSSL" 128 | # One or more files in the crate's source used as the "source of truth" for 129 | # the license expression. If the contents match, the clarification will be used 130 | # when running the license check, otherwise the clarification will be ignored 131 | # and the crate will be checked normally, which may produce warnings or errors 132 | # depending on the rest of your configuration 133 | license-files = [ 134 | # Each entry is a crate relative path, and the (opaque) hash of its contents 135 | { path = "LICENSE", hash = 0xbd0eed23 }, 136 | ] 137 | 138 | [licenses.private] 139 | # If true, ignores workspace crates that aren't published, or are only 140 | # published to private registries. 141 | # To see how to mark a crate as unpublished (to the official registry), 142 | # visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. 143 | ignore = false 144 | # One or more private registries that you might publish crates to, if a crate 145 | # is only published to private registries, and ignore is true, the crate will 146 | # not have its license(s) checked 147 | registries = [ 148 | 149 | 150 | # "https://sekretz.com/registry 151 | ] 152 | 153 | # This section is considered when running `cargo deny check bans`. 154 | # More documentation about the 'bans' section can be found here: 155 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 156 | [bans] 157 | # Lint level for when multiple versions of the same crate are detected 158 | multiple-versions = "warn" 159 | # Lint level for when a crate version requirement is `*` 160 | wildcards = "allow" 161 | # The graph highlighting used when creating dotgraphs for crates 162 | # with multiple versions 163 | # * lowest-version - The path to the lowest versioned duplicate is highlighted 164 | # * simplest-path - The path to the version with the fewest edges is highlighted 165 | # * all - Both lowest-version and simplest-path are used 166 | highlight = "all" 167 | # The default lint level for `default` features for crates that are members of 168 | # the workspace that is being checked. This can be overridden by allowing/denying 169 | # `default` on a crate-by-crate basis if desired. 170 | workspace-default-features = "allow" 171 | # The default lint level for `default` features for external crates that are not 172 | # members of the workspace. This can be overridden by allowing/denying `default` 173 | # on a crate-by-crate basis if desired. 174 | external-default-features = "allow" 175 | # List of crates that are allowed. Use with care! 176 | allow = [ 177 | 178 | 179 | # "ansi_term@0.11.0", 180 | # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, 181 | ] 182 | # List of crates to deny 183 | deny = [ 184 | 185 | 186 | # "ansi_term@0.11.0", 187 | # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, 188 | # Wrapper crates can optionally be specified to allow the crate when it 189 | # is a direct dependency of the otherwise banned crate 190 | # { crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, 191 | ] 192 | 193 | # List of features to allow/deny 194 | # Each entry the name of a crate and a version range. If version is 195 | # not specified, all versions will be matched. 196 | # [[bans.features]] 197 | # crate = "reqwest" 198 | # Features to not allow 199 | # deny = ["json"] 200 | # Features to allow 201 | # allow = [ 202 | # "rustls", 203 | # "__rustls", 204 | # "__tls", 205 | # "hyper-rustls", 206 | # "rustls", 207 | # "rustls-pemfile", 208 | # "rustls-tls-webpki-roots", 209 | # "tokio-rustls", 210 | # "webpki-roots", 211 | # ] 212 | # If true, the allowed features must exactly match the enabled feature set. If 213 | # this is set there is no point setting `deny` 214 | # exact = true 215 | 216 | # Certain crates/versions that will be skipped when doing duplicate detection. 217 | skip = [ 218 | 219 | 220 | # "ansi_term@0.11.0", 221 | # { crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, 222 | ] 223 | # Similarly to `skip` allows you to skip certain crates during duplicate 224 | # detection. Unlike skip, it also includes the entire tree of transitive 225 | # dependencies starting at the specified crate, up to a certain depth, which is 226 | # by default infinite. 227 | skip-tree = [ 228 | 229 | 230 | # "ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies 231 | # { crate = "ansi_term@0.11.0", depth = 20 }, 232 | ] 233 | 234 | # This section is considered when running `cargo deny check sources`. 235 | # More documentation about the 'sources' section can be found here: 236 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 237 | [sources] 238 | # Lint level for what to happen when a crate from a crate registry that is not 239 | # in the allow list is encountered 240 | unknown-registry = "warn" 241 | # Lint level for what to happen when a crate from a git repository that is not 242 | # in the allow list is encountered 243 | unknown-git = "warn" 244 | # List of URLs for allowed crate registries. Defaults to the crates.io index 245 | # if not specified. If it is specified but empty, no registries are allowed. 246 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 247 | # List of URLs for allowed Git repositories 248 | allow-git = ["https://github.com/Cosmian/crypto_core.git"] 249 | 250 | [sources.allow-org] 251 | # # 1 or more github.com organizations to allow git sources for 252 | # github = [""] 253 | # # 1 or more gitlab.com organizations to allow git sources for 254 | # gitlab = [""] 255 | # # 1 or more bitbucket.org organizations to allow git sources for 256 | # bitbucket = [""] 257 | -------------------------------------------------------------------------------- /examples/decrypt.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::Read}; 2 | 3 | use cosmian_cover_crypt::{traits::PkeAc, XEnc}; 4 | use cosmian_crypto_core::{bytes_ser_de::Deserializer, Aes256Gcm}; 5 | 6 | fn main() { 7 | use cosmian_cover_crypt::api::Covercrypt; 8 | use cosmian_cover_crypt::UserSecretKey; 9 | use cosmian_crypto_core::bytes_ser_de::Serializable; 10 | 11 | let cc = Covercrypt::default(); 12 | 13 | let usk = UserSecretKey::deserialize(&{ 14 | let mut bytes = Vec::new(); 15 | File::open("usk.txt") 16 | .unwrap() 17 | .read_to_end(&mut bytes) 18 | .unwrap(); 19 | bytes 20 | }) 21 | .unwrap(); 22 | 23 | let ctx = { 24 | let mut bytes = Vec::new(); 25 | File::open("ctx.txt") 26 | .unwrap() 27 | .read_to_end(&mut bytes) 28 | .unwrap(); 29 | let mut de = Deserializer::new(&bytes); 30 | (de.read::().unwrap(), de.read_vec().unwrap()) 31 | }; 32 | 33 | for _ in 0..1_000_000 { 34 | PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::decrypt(&cc, &usk, &ctx) 35 | .expect("cannot decrypt hybrid header"); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /examples/encrypt.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::File, 3 | io::{Read, Write}, 4 | }; 5 | 6 | use cosmian_cover_crypt::{ 7 | api::Covercrypt, cc_keygen, traits::PkeAc, AccessPolicy, MasterPublicKey, MasterSecretKey, 8 | UserSecretKey, XEnc, 9 | }; 10 | use cosmian_crypto_core::{ 11 | bytes_ser_de::{Deserializer, Serializable, Serializer}, 12 | Aes256Gcm, 13 | }; 14 | 15 | #[allow(dead_code)] 16 | /// Generates a new USK and encrypted header and prints them. 17 | fn generate_new(cc: &Covercrypt, msk: &mut MasterSecretKey, mpk: &MasterPublicKey) { 18 | let ap = AccessPolicy::parse("DPT::FIN && SEC::TOP").unwrap(); 19 | 20 | let usk = cc.generate_user_secret_key(msk, &ap).unwrap(); 21 | let ctx = PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::encrypt(cc, mpk, &ap, b"gotcha") 22 | .expect("cannot encrypt!"); 23 | 24 | // Ensure decryption is OK 25 | PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::decrypt(cc, &usk, &ctx).unwrap(); 26 | 27 | { 28 | File::create("./usk.txt") 29 | .unwrap() 30 | .write_all(&usk.serialize().unwrap()) 31 | .unwrap(); 32 | 33 | let usk = UserSecretKey::deserialize(&{ 34 | let mut bytes = Vec::new(); 35 | File::open("usk.txt") 36 | .unwrap() 37 | .read_to_end(&mut bytes) 38 | .unwrap(); 39 | bytes 40 | }) 41 | .unwrap(); 42 | 43 | // Ensure decryption is OK 44 | PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::decrypt(cc, &usk, &ctx).unwrap(); 45 | 46 | File::create("./ctx.txt") 47 | .unwrap() 48 | .write_all(&{ 49 | let mut ser = Serializer::new(); 50 | ser.write(&ctx.0).unwrap(); 51 | ser.write_vec(&ctx.1).unwrap(); 52 | ser.finalize() 53 | }) 54 | .unwrap(); 55 | 56 | let ctx = { 57 | let mut bytes = Vec::new(); 58 | File::open("ctx.txt") 59 | .unwrap() 60 | .read_to_end(&mut bytes) 61 | .unwrap(); 62 | let mut de = Deserializer::new(&bytes); 63 | (de.read::().unwrap(), de.read_vec().unwrap()) 64 | }; 65 | 66 | // Ensure decryption is OK 67 | PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::decrypt(cc, &usk, &ctx).unwrap(); 68 | } 69 | } 70 | 71 | fn main() { 72 | let ap = AccessPolicy::parse("DPT::FIN && SEC::TOP").unwrap(); 73 | let cc = Covercrypt::default(); 74 | let (mut _msk, mpk) = cc_keygen(&cc, false).unwrap(); 75 | 76 | // Un-comment this line to generate new usk.txt and ctx.txt files. 77 | // 78 | // generate_new(&cc, &mut _msk, &mpk); 79 | 80 | let ptx = "testing encryption/decryption".as_bytes(); 81 | 82 | for _ in 0..100 { 83 | PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::encrypt(&cc, &mpk, &ap, ptx) 84 | .expect("cannot encrypt!"); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /examples/runme.rs: -------------------------------------------------------------------------------- 1 | //! This is the demo given in `README.md` and `lib.rs` 2 | 3 | use cosmian_cover_crypt::{api::Covercrypt, test_utils::cc_keygen, AccessPolicy, EncryptedHeader}; 4 | 5 | fn main() { 6 | let cc = Covercrypt::default(); 7 | let (mut msk, mpk) = cc_keygen(&cc, false).unwrap(); 8 | 9 | // The user has a security clearance `Security Level::Top Secret`, 10 | // and belongs to the finance department (`Department::FIN`). 11 | let access_policy = 12 | AccessPolicy::parse("Security Level::Top Secret && Department::FIN").unwrap(); 13 | let mut usk = cc 14 | .generate_user_secret_key(&mut msk, &access_policy) 15 | .unwrap(); 16 | 17 | // Encrypt 18 | let (_, encrypted_header) = 19 | EncryptedHeader::generate(&cc, &mpk, &access_policy.clone(), None, None).unwrap(); 20 | 21 | // The user is able to decrypt the encrypted header. 22 | assert!(encrypted_header.decrypt(&cc, &usk, None).unwrap().is_some()); 23 | 24 | // 25 | // Rekey the user access policy. 26 | let mpk = cc.rekey(&mut msk, &access_policy).unwrap(); 27 | 28 | let enc_policy = AccessPolicy::parse("Security Level::Top Secret").unwrap(); 29 | // Encrypt with rotated attribute 30 | let (_, new_encrypted_header) = 31 | EncryptedHeader::generate(&cc, &mpk, &enc_policy, None, None).unwrap(); 32 | 33 | // user cannot decrypt the newly encrypted header 34 | assert!(new_encrypted_header 35 | .decrypt(&cc, &usk, None) 36 | .unwrap() 37 | .is_none()); 38 | 39 | // refresh user secret key, do not grant old encryption access 40 | cc.refresh_usk(&mut msk, &mut usk, false).unwrap(); 41 | 42 | // The user with refreshed key is able to decrypt the newly encrypted header. 43 | assert!(new_encrypted_header 44 | .decrypt(&cc, &usk, None) 45 | .unwrap() 46 | .is_some()); 47 | 48 | // But it cannot decrypt old ciphertexts 49 | assert!(encrypted_header.decrypt(&cc, &usk, None).unwrap().is_none()); 50 | } 51 | -------------------------------------------------------------------------------- /src/abe_policy/access_policy.rs: -------------------------------------------------------------------------------- 1 | //! This module defines methods to parse and manipulate access policies. 2 | //! 3 | //! Access policies are boolean equations of *qualified attributes*. Attributes are 4 | //! defined as a combination of a dimension name and a component name (belonging 5 | //! to the named dimension). 6 | 7 | use std::{ 8 | collections::LinkedList, 9 | fmt::Debug, 10 | ops::{BitAnd, BitOr}, 11 | }; 12 | 13 | use crate::{abe_policy::QualifiedAttribute, Error}; 14 | 15 | /// An access policy is a boolean expression of qualified attributes. 16 | #[derive(Debug, Clone, PartialEq, Eq)] 17 | pub enum AccessPolicy { 18 | Broadcast, 19 | Term(QualifiedAttribute), 20 | Conjunction(Box, Box), 21 | Disjunction(Box, Box), 22 | } 23 | 24 | impl AccessPolicy { 25 | /// Find the corresponding closing parenthesis in the boolean expression 26 | /// given as a string. 27 | fn find_matching_closing_parenthesis(boolean_expression: &str) -> Result { 28 | let mut count = 0; 29 | for (index, c) in boolean_expression.chars().enumerate() { 30 | match c { 31 | '(' => count += 1, 32 | ')' => count -= 1, 33 | _ => {} 34 | }; 35 | if count < 0 { 36 | return Ok(index); 37 | } 38 | } 39 | Err(Error::InvalidBooleanExpression(format!( 40 | "Missing closing parenthesis in boolean expression {boolean_expression}" 41 | ))) 42 | } 43 | 44 | /// Parses the given string into an access policy. 45 | /// 46 | /// # Abstract grammar 47 | /// 48 | /// The following abstract grammar describes the valid access policies 49 | /// syntax. The brackets are used to denote optional elements, the pipes 50 | /// ('|') to denote options, slashes to denote REGEXP, and spaces to denote 51 | /// concatenation. Spaces may be interleaved with elements. They are simply 52 | /// ignored by the parser. 53 | /// 54 | /// - access_policy: [ attribute | group [ operator access_policy ]] 55 | /// - attribute: dimension [ separator component ] 56 | /// - group: ( access_policy ) 57 | /// - operator: OR | AND 58 | /// - OR: || 59 | /// - AND: && 60 | /// - separator: :: 61 | /// - dimension: /[^&|: ]+/ 62 | /// - component: /[^&|: ]+/ 63 | /// 64 | /// The REGEXP used to describe the dimension and the component stands for: 65 | /// "at least one character that is neither '&', '|', ':' nor ' '". 66 | /// 67 | /// # Precedence rule 68 | /// 69 | /// Note that the usual precedence rules hold in expressing an access 70 | /// policy: 71 | /// 1. grouping takes precedence over all operators; 72 | /// 2. the logical AND takes precedence over the logical OR. 73 | /// 74 | /// # Examples 75 | /// 76 | /// The following expressions define valid access policies: 77 | /// 78 | /// - "DPT::MKG && (CTR::FR || CTR::DE)" 79 | /// - "" 80 | /// - "SEC::Low Secret && CTR::FR" 81 | /// 82 | /// Notice that the arity of the operators is two. Therefore the following 83 | /// access policy is *invalid*: 84 | /// 85 | /// - "DPT::MKG (&& CTR::FR || CTR::DE)" 86 | /// 87 | /// It is not possible to concatenate attributes. Therefore the following 88 | /// access policy is *invalid*: 89 | /// 90 | /// - "DPT::MKG DPT::FIN" 91 | pub fn parse(mut e: &str) -> Result { 92 | let seeker = |c: &char| !"()|&".contains(*c); 93 | let mut q = LinkedList::::new(); 94 | loop { 95 | e = e.trim(); 96 | 97 | if e.is_empty() { 98 | if let Some(first) = q.pop_front() { 99 | return Ok(Self::conjugate(first, q.into_iter())); 100 | } else { 101 | return Err(Error::InvalidBooleanExpression( 102 | "empty string is not a valid access policy".to_string(), 103 | )); 104 | } 105 | } else if e == "*" { 106 | return Ok(Self::conjugate(Self::Broadcast, q.into_iter())); 107 | } else { 108 | match &e[..1] { 109 | "(" => { 110 | let offset = Self::find_matching_closing_parenthesis(&e[1..])?; 111 | q.push_back(Self::parse(&e[1..1 + offset]).map_err(|err| { 112 | Error::InvalidBooleanExpression(format!( 113 | "error while parsing '{e}': {err}" 114 | )) 115 | })?); 116 | e = &e[2 + offset..]; 117 | } 118 | "|" => { 119 | if e[1..].is_empty() || &e[1..2] != "|" { 120 | return Err(Error::InvalidBooleanExpression(format!( 121 | "invalid separator in: '{e}'" 122 | ))); 123 | } 124 | let base = q.pop_front().ok_or_else(|| { 125 | Error::InvalidBooleanExpression(format!("leading OR operand in '{e}'")) 126 | })?; 127 | let lhs = Self::conjugate(base, q.into_iter()); 128 | return Ok(lhs | Self::parse(&e[2..])?); 129 | } 130 | "&" => { 131 | if e[1..].is_empty() || &e[1..2] != "&" { 132 | return Err(Error::InvalidBooleanExpression(format!( 133 | "invalid leading separator in: '{e}'" 134 | ))); 135 | } 136 | if q.is_empty() { 137 | return Err(Error::InvalidBooleanExpression(format!( 138 | "leading AND operand in '{e}'" 139 | ))); 140 | } 141 | e = &e[2..]; 142 | } 143 | ")" => { 144 | return Err(Error::InvalidBooleanExpression(format!( 145 | "unmatched closing parenthesis in '{e}'" 146 | ))); 147 | } 148 | _ => { 149 | let attr: String = e.chars().take_while(seeker).collect(); 150 | q.push_back(Self::Term(QualifiedAttribute::try_from(attr.as_str())?)); 151 | e = &e[attr.len()..]; 152 | } 153 | } 154 | } 155 | } 156 | } 157 | 158 | /// Conjugate the access policies from the given iterator. 159 | fn conjugate(first: Self, policies: impl Iterator) -> Self { 160 | policies.fold(first, |mut res, operand| { 161 | res = res & operand; 162 | res 163 | }) 164 | } 165 | 166 | /// Converts the access policy into the Disjunctive Normal Form (DNF) of its 167 | /// attributes. Returns the DNF as the list of its conjunctions, themselves 168 | /// represented as the list of their attributes. 169 | #[must_use] 170 | pub fn to_dnf(&self) -> Vec> { 171 | match self { 172 | Self::Term(attr) => vec![vec![attr.clone()]], 173 | Self::Conjunction(lhs, rhs) => { 174 | let combinations_left = lhs.to_dnf(); 175 | let combinations_right = rhs.to_dnf(); 176 | let mut res = 177 | Vec::with_capacity(combinations_left.len() * combinations_right.len()); 178 | for value_left in combinations_left { 179 | for value_right in &combinations_right { 180 | res.push([value_left.as_slice(), value_right.as_slice()].concat()); 181 | } 182 | } 183 | res 184 | } 185 | Self::Disjunction(lhs, rhs) => [lhs.to_dnf(), rhs.to_dnf()].concat(), 186 | Self::Broadcast => vec![vec![]], 187 | } 188 | } 189 | } 190 | 191 | impl BitAnd for AccessPolicy { 192 | type Output = Self; 193 | 194 | fn bitand(self, rhs: Self) -> Self::Output { 195 | if self == Self::Broadcast { 196 | rhs 197 | } else if rhs == Self::Broadcast { 198 | self 199 | } else { 200 | Self::Conjunction(Box::new(self), Box::new(rhs)) 201 | } 202 | } 203 | } 204 | 205 | impl BitOr for AccessPolicy { 206 | type Output = Self; 207 | 208 | fn bitor(self, rhs: Self) -> Self::Output { 209 | if self == Self::Broadcast { 210 | self 211 | } else if rhs == Self::Broadcast { 212 | rhs 213 | } else { 214 | Self::Disjunction(Box::new(self), Box::new(rhs)) 215 | } 216 | } 217 | } 218 | 219 | #[cfg(test)] 220 | mod tests { 221 | use super::AccessPolicy; 222 | 223 | #[test] 224 | fn test_access_policy_parsing() { 225 | // These are valid access policies. 226 | let ap = AccessPolicy::parse("(D1::A && (D2::A) || D2::B)").unwrap(); 227 | println!("{ap:#?}"); 228 | let ap = AccessPolicy::parse("D1::A && D2::A || D2::B").unwrap(); 229 | println!("{ap:#?}"); 230 | let ap = AccessPolicy::parse("D1::A && (D2::A || D2::B)").unwrap(); 231 | println!("{ap:#?}"); 232 | let ap = AccessPolicy::parse("D1::A (D2::A || D2::B)").unwrap(); 233 | println!("{ap:#?}"); 234 | assert_eq!(AccessPolicy::parse("*").unwrap(), AccessPolicy::Broadcast); 235 | assert!(AccessPolicy::parse("").is_err()); 236 | 237 | // These are invalid access policies. 238 | // TODO: make this one valid (change the parsing rule of the attribute). 239 | assert!(AccessPolicy::parse("D1").is_err()); 240 | assert!(AccessPolicy::parse("D1::A (&& D2::A || D2::B)").is_err()); 241 | assert!(AccessPolicy::parse("|| D2::B").is_err()); 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /src/abe_policy/attribute.rs: -------------------------------------------------------------------------------- 1 | use std::{convert::TryFrom, fmt::Debug, ops::BitOr}; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::Error; 6 | 7 | /// Hint the user about which kind of encryption to use. 8 | #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] 9 | pub enum EncryptionHint { 10 | /// Hybridized encryption should be used. 11 | Hybridized, 12 | /// Classic encryption should be used. 13 | Classic, 14 | } 15 | 16 | impl BitOr for EncryptionHint { 17 | type Output = Self; 18 | 19 | fn bitor(self, rhs: Self) -> Self::Output { 20 | if self == Self::Hybridized || rhs == Self::Hybridized { 21 | Self::Hybridized 22 | } else { 23 | Self::Classic 24 | } 25 | } 26 | } 27 | 28 | impl EncryptionHint { 29 | #[must_use] 30 | pub fn new(is_hybridized: bool) -> Self { 31 | if is_hybridized { 32 | Self::Hybridized 33 | } else { 34 | Self::Classic 35 | } 36 | } 37 | } 38 | 39 | impl From for bool { 40 | fn from(val: EncryptionHint) -> Self { 41 | val == EncryptionHint::Hybridized 42 | } 43 | } 44 | 45 | /// Whether to provide an encryption key in the master public key for this 46 | /// attribute. 47 | #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] 48 | pub enum AttributeStatus { 49 | EncryptDecrypt, 50 | DecryptOnly, 51 | } 52 | 53 | impl BitOr for AttributeStatus { 54 | type Output = Self; 55 | 56 | fn bitor(self, rhs: Self) -> Self::Output { 57 | if self == Self::DecryptOnly || rhs == Self::DecryptOnly { 58 | Self::DecryptOnly 59 | } else { 60 | Self::EncryptDecrypt 61 | } 62 | } 63 | } 64 | 65 | impl From for bool { 66 | fn from(val: AttributeStatus) -> Self { 67 | val == AttributeStatus::EncryptDecrypt 68 | } 69 | } 70 | 71 | /// A qualified attribute is composed of a dimension an attribute name. 72 | #[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Serialize, Deserialize)] 73 | #[serde(try_from = "&str", into = "String")] 74 | pub struct QualifiedAttribute { 75 | pub dimension: String, 76 | pub name: String, 77 | } 78 | 79 | impl QualifiedAttribute { 80 | /// Creates a qualified attribute with the given dimension and attribute names. 81 | #[must_use] 82 | pub fn new(dimension: &str, name: &str) -> Self { 83 | Self { 84 | dimension: dimension.to_owned(), 85 | name: name.to_owned(), 86 | } 87 | } 88 | } 89 | 90 | impl Debug for QualifiedAttribute { 91 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 92 | f.write_fmt(format_args!("{}::{}", &self.dimension, &self.name)) 93 | } 94 | } 95 | 96 | impl std::fmt::Display for QualifiedAttribute { 97 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 98 | write!(f, "{}::{}", self.dimension, self.name) 99 | } 100 | } 101 | 102 | impl From for String { 103 | fn from(attr: QualifiedAttribute) -> Self { 104 | attr.to_string() 105 | } 106 | } 107 | 108 | impl From<(&str, &str)> for QualifiedAttribute { 109 | fn from(input: (&str, &str)) -> Self { 110 | Self { 111 | dimension: input.0.to_owned(), 112 | name: input.1.to_owned(), 113 | } 114 | } 115 | } 116 | 117 | impl From<(String, String)> for QualifiedAttribute { 118 | fn from(input: (String, String)) -> Self { 119 | Self { 120 | dimension: input.0, 121 | name: input.1, 122 | } 123 | } 124 | } 125 | 126 | impl TryFrom<&str> for QualifiedAttribute { 127 | type Error = Error; 128 | 129 | fn try_from(s: &str) -> Result { 130 | let (dimension, component) = s.split_once("::").ok_or_else(|| { 131 | Error::InvalidAttribute(format!("at least one separator '::' expected in {s}")) 132 | })?; 133 | 134 | if component.contains("::") { 135 | return Err(Error::InvalidAttribute(format!( 136 | "separator '::' expected only once in {s}" 137 | ))); 138 | } 139 | 140 | if dimension.is_empty() || component.is_empty() { 141 | return Err(Error::InvalidAttribute(format!( 142 | "empty dimension or empty name in {s}" 143 | ))); 144 | } 145 | 146 | Ok(Self::new(dimension.trim(), component.trim())) 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/abe_policy/mod.rs: -------------------------------------------------------------------------------- 1 | mod access_policy; 2 | mod access_structure; 3 | mod attribute; 4 | mod dimension; 5 | mod rights; 6 | 7 | #[cfg(any(test, feature = "test-utils"))] 8 | mod tests; 9 | 10 | pub use access_policy::AccessPolicy; 11 | pub use access_structure::AccessStructure; 12 | pub use attribute::{AttributeStatus, EncryptionHint, QualifiedAttribute}; 13 | pub use dimension::{Attribute, Dimension}; 14 | pub use rights::Right; 15 | #[cfg(any(test, feature = "test-utils"))] 16 | pub use tests::gen_structure; 17 | 18 | #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] 19 | pub enum Version { 20 | V1, 21 | } 22 | -------------------------------------------------------------------------------- /src/abe_policy/rights.rs: -------------------------------------------------------------------------------- 1 | use cosmian_crypto_core::bytes_ser_de::{to_leb128_len, Deserializer, Serializable, Serializer}; 2 | use std::{hash::Hash, ops::Deref}; 3 | 4 | use crate::Error; 5 | 6 | /// A right is a combination of the IDs of its associated attributes. 7 | #[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] 8 | pub struct Right(pub(crate) Vec); 9 | 10 | impl Right { 11 | /// Returns the right associated to the given point. 12 | /// 13 | /// A point is defined as a sequence of attribute IDs while a right is some compact 14 | /// representation of it, that is a fixed-point for the permutation. 15 | pub fn from_point(mut attribute_ids: Vec) -> Result { 16 | // A set of attribute has no order. Enforcing an order here allows having a unique 17 | // representation for all permutations. 18 | attribute_ids.sort_unstable(); 19 | // Allocate an upper-bound on the actual space required. 20 | let mut ser = Serializer::with_capacity(4 * attribute_ids.len()); 21 | for value in attribute_ids { 22 | ser.write_leb128_u64(u64::try_from(value)?)?; 23 | } 24 | Ok(Self(ser.finalize().to_vec())) 25 | } 26 | } 27 | 28 | impl Deref for Right { 29 | type Target = [u8]; 30 | 31 | fn deref(&self) -> &Self::Target { 32 | &self.0 33 | } 34 | } 35 | 36 | impl From> for Right { 37 | fn from(value: Vec) -> Self { 38 | Self(value) 39 | } 40 | } 41 | 42 | impl From<&[u8]> for Right { 43 | fn from(value: &[u8]) -> Self { 44 | Self(value.to_vec()) 45 | } 46 | } 47 | 48 | impl Serializable for Right { 49 | type Error = Error; 50 | 51 | fn length(&self) -> usize { 52 | to_leb128_len(self.len()) + self.len() 53 | } 54 | 55 | fn write(&self, ser: &mut Serializer) -> Result { 56 | ser.write_vec(self).map_err(Self::Error::from) 57 | } 58 | 59 | fn read(de: &mut Deserializer) -> Result { 60 | let bytes = de.read_vec()?; 61 | Ok(Self(bytes)) 62 | } 63 | } 64 | 65 | #[cfg(test)] 66 | mod tests { 67 | use cosmian_crypto_core::{bytes_ser_de::Deserializer, reexport::rand_core::CryptoRngCore}; 68 | 69 | use super::*; 70 | 71 | impl Right { 72 | pub fn random(rng: &mut impl CryptoRngCore) -> Self { 73 | let mut r = Self(vec![0; 16]); 74 | rng.fill_bytes(&mut r.0); 75 | r 76 | } 77 | } 78 | 79 | #[test] 80 | fn test_rights() -> Result<(), Error> { 81 | let mut values: Vec = vec![12, 0, usize::MAX, 1]; 82 | let r = Right::from_point(values.clone())?; 83 | values.sort_unstable(); 84 | let mut de = Deserializer::new(&r); 85 | for v in values { 86 | let val = de.read_leb128_u64().unwrap() as usize; 87 | assert_eq!(v, val); 88 | } 89 | Ok(()) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/abe_policy/tests.rs: -------------------------------------------------------------------------------- 1 | use crate::{abe_policy::AccessStructure, Error}; 2 | 3 | use super::EncryptionHint; 4 | 5 | pub fn gen_structure(policy: &mut AccessStructure, complete: bool) -> Result<(), Error> { 6 | policy.add_hierarchy("SEC".to_string())?; 7 | 8 | policy.add_attribute( 9 | crate::abe_policy::QualifiedAttribute { 10 | dimension: "SEC".to_string(), 11 | name: "LOW".to_string(), 12 | }, 13 | EncryptionHint::Classic, 14 | None, 15 | )?; 16 | policy.add_attribute( 17 | crate::abe_policy::QualifiedAttribute { 18 | dimension: "SEC".to_string(), 19 | name: "TOP".to_string(), 20 | }, 21 | EncryptionHint::Hybridized, 22 | Some("LOW"), 23 | )?; 24 | 25 | policy.add_anarchy("DPT".to_string())?; 26 | [ 27 | ("RD", EncryptionHint::Classic), 28 | ("HR", EncryptionHint::Classic), 29 | ("MKG", EncryptionHint::Classic), 30 | ("FIN", EncryptionHint::Classic), 31 | ("DEV", EncryptionHint::Classic), 32 | ] 33 | .into_iter() 34 | .try_for_each(|(attribute, hint)| { 35 | policy.add_attribute( 36 | crate::abe_policy::QualifiedAttribute { 37 | dimension: "DPT".to_string(), 38 | name: attribute.to_string(), 39 | }, 40 | hint, 41 | None, 42 | ) 43 | })?; 44 | 45 | if complete { 46 | policy.add_anarchy("CTR".to_string())?; 47 | [ 48 | ("EN", EncryptionHint::Classic), 49 | ("DE", EncryptionHint::Classic), 50 | ("IT", EncryptionHint::Classic), 51 | ("FR", EncryptionHint::Classic), 52 | ("SP", EncryptionHint::Classic), 53 | ] 54 | .into_iter() 55 | .try_for_each(|(attribute, hint)| { 56 | policy.add_attribute( 57 | crate::abe_policy::QualifiedAttribute { 58 | dimension: "CTR".to_string(), 59 | name: attribute.to_string(), 60 | }, 61 | hint, 62 | None, 63 | ) 64 | })?; 65 | } 66 | 67 | Ok(()) 68 | } 69 | 70 | #[test] 71 | fn test_edit_anarchic_attributes() { 72 | use super::QualifiedAttribute; 73 | 74 | let mut structure = AccessStructure::new(); 75 | gen_structure(&mut structure, false).unwrap(); 76 | 77 | assert_eq!(structure.attributes().count(), 7); 78 | 79 | // Try renaming Research to already used name MKG 80 | assert!(structure 81 | .rename_attribute(&QualifiedAttribute::new("DPT", "RD"), "MKG".to_string(),) 82 | .is_err()); 83 | 84 | // Rename RD to Research 85 | assert!(structure 86 | .rename_attribute( 87 | &QualifiedAttribute::new("DPT", "RD"), 88 | "Research".to_string(), 89 | ) 90 | .is_ok()); 91 | 92 | let order: Vec<_> = structure 93 | .attributes() 94 | .filter(|a| a.dimension.as_str() == "SEC") 95 | .map(|a| a.name) 96 | .collect(); 97 | 98 | assert!(order.len() == 2); 99 | 100 | // Add new attribute Sales 101 | let new_attr = QualifiedAttribute::new("DPT", "Sales"); 102 | assert!(structure 103 | .add_attribute(new_attr.clone(), EncryptionHint::Classic, None) 104 | .is_ok()); 105 | assert_eq!(structure.attributes().count(), 8); 106 | 107 | // Try adding already existing attribute HR 108 | let duplicate_attr = QualifiedAttribute::new("DPT", "HR"); 109 | assert!(structure 110 | .add_attribute(duplicate_attr, EncryptionHint::Classic, None) 111 | .is_err()); 112 | 113 | // Try adding attribute to non existing dimension 114 | let missing_dimension = QualifiedAttribute::new("Missing", "dimension"); 115 | assert!(structure 116 | .add_attribute(missing_dimension.clone(), EncryptionHint::Classic, None) 117 | .is_err()); 118 | 119 | // Remove research attribute 120 | let delete_attr = QualifiedAttribute::new("DPT", "Research"); 121 | structure.del_attribute(&delete_attr).unwrap(); 122 | assert_eq!(structure.attributes().count(), 7); 123 | 124 | // Duplicate remove 125 | assert!(structure.del_attribute(&delete_attr).is_err()); 126 | 127 | // Missing dimension remove 128 | assert!(structure.del_attribute(&missing_dimension).is_err()); 129 | 130 | // Remove all attributes from a dimension 131 | structure.del_attribute(&new_attr).unwrap(); 132 | structure 133 | .del_attribute(&QualifiedAttribute::new("DPT", "HR")) 134 | .unwrap(); 135 | structure 136 | .del_attribute(&QualifiedAttribute::new("DPT", "MKG")) 137 | .unwrap(); 138 | 139 | structure.del_dimension("DPT").unwrap(); 140 | 141 | assert_eq!(structure.dimensions().count(), 1); 142 | 143 | // Add new dimension 144 | structure.add_anarchy("DimensionTest".to_string()).unwrap(); 145 | structure 146 | .add_attribute( 147 | QualifiedAttribute::new("DimensionTest", "Attr1"), 148 | EncryptionHint::Classic, 149 | None, 150 | ) 151 | .unwrap(); 152 | structure 153 | .add_attribute( 154 | QualifiedAttribute::new("DimensionTest", "Attr2"), 155 | EncryptionHint::Classic, 156 | None, 157 | ) 158 | .unwrap(); 159 | assert_eq!(structure.dimensions().count(), 2); 160 | 161 | //// Remove the new dimension 162 | structure.del_dimension("DimensionTest").unwrap(); 163 | assert_eq!(structure.dimensions().count(), 1); 164 | 165 | //// Try removing non existing dimension 166 | assert!(structure.del_dimension("MissingDim").is_err()); 167 | } 168 | 169 | #[test] 170 | fn test_edit_hierarchic_attributes() { 171 | use super::QualifiedAttribute; 172 | 173 | let mut structure = AccessStructure::new(); 174 | gen_structure(&mut structure, false).unwrap(); 175 | 176 | assert_eq!( 177 | structure 178 | .attributes() 179 | .filter(|a| a.dimension == "SEC") 180 | .collect::>(), 181 | vec![ 182 | QualifiedAttribute { 183 | dimension: "SEC".to_string(), 184 | name: "LOW".to_string(), 185 | }, 186 | QualifiedAttribute { 187 | dimension: "SEC".to_string(), 188 | name: "TOP".to_string(), 189 | }, 190 | ] 191 | ); 192 | 193 | // Rename ordered dimension 194 | assert!(structure 195 | .rename_attribute(&QualifiedAttribute::new("SEC", "LOW"), "WOL".to_string(),) 196 | .is_ok()); 197 | 198 | let order = structure.attributes().map(|q| q.name).collect::>(); 199 | assert!(order.contains(&"WOL".to_string())); 200 | assert!(!order.contains(&"LOW".to_string())); 201 | 202 | //// Try modifying hierarchical dimension 203 | structure 204 | .del_attribute(&QualifiedAttribute::new("SEC", "WOL")) 205 | .unwrap(); 206 | 207 | structure 208 | .add_attribute( 209 | QualifiedAttribute::new("SEC", "MID"), 210 | EncryptionHint::Classic, 211 | None, 212 | ) 213 | .unwrap(); 214 | 215 | assert_eq!( 216 | structure 217 | .attributes() 218 | .filter(|a| a.dimension == "SEC") 219 | .collect::>(), 220 | vec![ 221 | QualifiedAttribute { 222 | dimension: "SEC".to_string(), 223 | name: "MID".to_string(), 224 | }, 225 | QualifiedAttribute { 226 | dimension: "SEC".to_string(), 227 | name: "TOP".to_string(), 228 | }, 229 | ] 230 | ); 231 | 232 | structure 233 | .add_attribute( 234 | QualifiedAttribute::new("SEC", "LOW"), 235 | EncryptionHint::Classic, 236 | None, 237 | ) 238 | .unwrap(); 239 | 240 | assert_eq!( 241 | structure 242 | .attributes() 243 | .filter(|a| a.dimension == "SEC") 244 | .collect::>(), 245 | vec![ 246 | QualifiedAttribute { 247 | dimension: "SEC".to_string(), 248 | name: "LOW".to_string(), 249 | }, 250 | QualifiedAttribute { 251 | dimension: "SEC".to_string(), 252 | name: "MID".to_string(), 253 | }, 254 | QualifiedAttribute { 255 | dimension: "SEC".to_string(), 256 | name: "TOP".to_string(), 257 | }, 258 | ] 259 | ); 260 | 261 | structure 262 | .del_attribute(&QualifiedAttribute::new("SEC", "MID")) 263 | .unwrap(); 264 | 265 | structure 266 | .add_attribute( 267 | QualifiedAttribute::new("SEC", "MID"), 268 | EncryptionHint::Classic, 269 | Some("LOW"), 270 | ) 271 | .unwrap(); 272 | 273 | assert_eq!( 274 | structure 275 | .attributes() 276 | .filter(|a| a.dimension == "SEC") 277 | .collect::>(), 278 | vec![ 279 | QualifiedAttribute { 280 | dimension: "SEC".to_string(), 281 | name: "LOW".to_string(), 282 | }, 283 | QualifiedAttribute { 284 | dimension: "SEC".to_string(), 285 | name: "MID".to_string(), 286 | }, 287 | QualifiedAttribute { 288 | dimension: "SEC".to_string(), 289 | name: "TOP".to_string(), 290 | }, 291 | ] 292 | ); 293 | 294 | //// Removing a hierarchical dimension is permitted 295 | structure.del_dimension("SEC").unwrap(); 296 | } 297 | -------------------------------------------------------------------------------- /src/ae.rs: -------------------------------------------------------------------------------- 1 | use cosmian_crypto_core::{ 2 | reexport::rand_core::CryptoRngCore, Aes256Gcm, Dem, FixedSizeCBytes, Instantiable, Nonce, 3 | RandomFixedSizeCBytes, SymmetricKey, 4 | }; 5 | use zeroize::Zeroizing; 6 | 7 | use crate::{traits::AE, Error}; 8 | 9 | impl AE<{ Self::KEY_LENGTH }> for Aes256Gcm { 10 | type Error = Error; 11 | 12 | fn encrypt( 13 | rng: &mut impl CryptoRngCore, 14 | key: &SymmetricKey<{ Self::KEY_LENGTH }>, 15 | ptx: &[u8], 16 | ) -> Result, Error> { 17 | let nonce = Nonce::<{ Self::NONCE_LENGTH }>::new(&mut *rng); 18 | let ciphertext = Self::new(key).encrypt(&nonce, ptx, None)?; 19 | Ok([nonce.as_bytes(), &ciphertext].concat()) 20 | } 21 | 22 | fn decrypt( 23 | key: &SymmetricKey<{ Self::KEY_LENGTH }>, 24 | ctx: &[u8], 25 | ) -> Result>, Error> { 26 | if ctx.len() < Self::NONCE_LENGTH { 27 | return Err(Error::CryptoCoreError( 28 | cosmian_crypto_core::CryptoCoreError::DecryptionError, 29 | )); 30 | } 31 | let nonce = Nonce::try_from_slice(&ctx[..Self::NONCE_LENGTH])?; 32 | Self::new(key) 33 | .decrypt(&nonce, &ctx[Self::NONCE_LENGTH..], None) 34 | .map_err(Error::CryptoCoreError) 35 | .map(Zeroizing::new) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/api.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Mutex, MutexGuard}; 2 | 3 | use cosmian_crypto_core::{reexport::rand_core::SeedableRng, CsRng, Secret, SymmetricKey}; 4 | use zeroize::Zeroizing; 5 | 6 | use super::{ 7 | core::primitives::{prune, update_msk, usk_keygen}, 8 | core::MIN_TRACING_LEVEL, 9 | traits::AE, 10 | }; 11 | use crate::{ 12 | core::{ 13 | primitives::{self, full_decaps, refresh, rekey, setup}, 14 | MasterPublicKey, MasterSecretKey, UserSecretKey, XEnc, SHARED_SECRET_LENGTH, 15 | }, 16 | traits::{KemAc, PkeAc}, 17 | AccessPolicy, Error, 18 | }; 19 | #[derive(Debug)] 20 | pub struct Covercrypt { 21 | rng: Mutex, 22 | } 23 | 24 | impl Default for Covercrypt { 25 | fn default() -> Self { 26 | Self { 27 | rng: Mutex::new(CsRng::from_entropy()), 28 | } 29 | } 30 | } 31 | 32 | impl Covercrypt { 33 | pub fn rng(&self) -> MutexGuard { 34 | self.rng.lock().expect("poisoned mutex") 35 | } 36 | 37 | /// Sets up the Covercrypt scheme. 38 | /// 39 | /// Generates a MSK and a MPK only holing broadcasting keys, and with a 40 | /// tracing level of [`MIN_TRACING_LEVEL`](core::MIN_TRACING_LEVEL). 41 | pub fn setup(&self) -> Result<(MasterSecretKey, MasterPublicKey), Error> { 42 | let mut rng = self.rng.lock().expect("Mutex lock failed!"); 43 | let mut msk = setup(MIN_TRACING_LEVEL, &mut *rng)?; 44 | let rights = msk.access_structure.omega()?; 45 | update_msk(&mut *rng, &mut msk, rights)?; 46 | let mpk = msk.mpk()?; 47 | Ok((msk, mpk)) 48 | } 49 | 50 | /// Updates the MSK according to its access structure. Returns the new 51 | /// version of the MPK. 52 | /// 53 | /// Sets the MSK rights to the one defined by the access structure: 54 | /// 55 | /// - removes rights from the MSK that don't belong to the access structure 56 | /// along with their associated secrets; 57 | /// 58 | /// - adds the rights that don't belong yet to the MSK, generating new 59 | /// secrets. 60 | /// 61 | /// The new MPK holds the latest encryption key of each right of the access 62 | /// structure. 63 | // TODO: this function should be internalized and replaced by specialized 64 | // functions. 65 | pub fn update_msk(&self, msk: &mut MasterSecretKey) -> Result { 66 | update_msk( 67 | &mut *self.rng.lock().expect("Mutex lock failed!"), 68 | msk, 69 | msk.access_structure.omega()?, 70 | )?; 71 | msk.mpk() 72 | } 73 | 74 | /// Generates new secrets for each right a USK associated to the given 75 | /// access policy would hold, updates the MSK and returns the new MPK. 76 | /// 77 | /// User keys need to be refreshed. 78 | // TODO document error cases. 79 | pub fn rekey( 80 | &self, 81 | msk: &mut MasterSecretKey, 82 | ap: &AccessPolicy, 83 | ) -> Result { 84 | rekey( 85 | &mut *self.rng.lock().expect("Mutex lock failed!"), 86 | msk, 87 | msk.access_structure.ap_to_usk_rights(ap)?, 88 | )?; 89 | msk.mpk() 90 | } 91 | 92 | /// Removes from the master secret key all but the latest secret of each 93 | /// right a USK associated to the given access policy would hold. Returns 94 | /// the new MPK. 95 | /// 96 | /// This action is *irreversible*, and all user keys need to be refreshed. 97 | // TODO document error cases. 98 | pub fn prune_master_secret_key( 99 | &self, 100 | msk: &mut MasterSecretKey, 101 | ap: &AccessPolicy, 102 | ) -> Result { 103 | prune(msk, &msk.access_structure.ap_to_usk_rights(ap)?); 104 | msk.mpk() 105 | } 106 | 107 | /// Generates a USK associated to the given access policy. 108 | /// 109 | /// The new key is given the latest secret of each right in the 110 | /// complementary space of its access policy. 111 | // TODO document error cases. 112 | pub fn generate_user_secret_key( 113 | &self, 114 | msk: &mut MasterSecretKey, 115 | ap: &AccessPolicy, 116 | ) -> Result { 117 | usk_keygen( 118 | &mut *self.rng.lock().expect("Mutex lock failed!"), 119 | msk, 120 | msk.access_structure.ap_to_usk_rights(ap)?, 121 | ) 122 | } 123 | 124 | /// Refreshes the USK with respect to the given MSK. 125 | /// 126 | /// The USK is given all missing secrets since the first secret hold by the 127 | /// USK, for each right in the complementary space of its access 128 | /// policy. Secrets hold by the USK but have been removed from the MSK are 129 | /// removed. 130 | /// 131 | /// If `keep_old_secrets` is set to false, only the latest secret of each 132 | /// right is kept instead. 133 | /// 134 | /// Updates the tracing level to match the one of the MSK if needed. 135 | // TODO document error cases. 136 | pub fn refresh_usk( 137 | &self, 138 | msk: &mut MasterSecretKey, 139 | usk: &mut UserSecretKey, 140 | keep_old_secrets: bool, 141 | ) -> Result<(), Error> { 142 | refresh( 143 | &mut *self.rng.lock().expect("Mutex lock failed!"), 144 | msk, 145 | usk, 146 | keep_old_secrets, 147 | ) 148 | } 149 | 150 | /// Returns a new encapsulation with the same rights as the one given, along 151 | /// with a freshly generated shared secret. 152 | pub fn recaps( 153 | &self, 154 | msk: &MasterSecretKey, 155 | mpk: &MasterPublicKey, 156 | encapsulation: &XEnc, 157 | ) -> Result<(Secret<32>, XEnc), Error> { 158 | let (_ss, rights) = full_decaps(msk, encapsulation)?; 159 | primitives::encaps( 160 | &mut *self.rng.lock().expect("Mutex lock failed!"), 161 | mpk, 162 | &rights, 163 | ) 164 | } 165 | } 166 | 167 | impl KemAc for Covercrypt { 168 | type EncapsulationKey = MasterPublicKey; 169 | type DecapsulationKey = UserSecretKey; 170 | type Encapsulation = XEnc; 171 | type Error = Error; 172 | 173 | fn encaps( 174 | &self, 175 | ek: &Self::EncapsulationKey, 176 | ap: &AccessPolicy, 177 | ) -> Result<(Secret, Self::Encapsulation), Self::Error> { 178 | primitives::encaps( 179 | &mut *self.rng.lock().expect("Mutex lock failed!"), 180 | ek, 181 | &ek.access_structure.ap_to_enc_rights(ap)?, 182 | ) 183 | } 184 | 185 | fn decaps( 186 | &self, 187 | dk: &Self::DecapsulationKey, 188 | enc: &Self::Encapsulation, 189 | ) -> Result>, Error> { 190 | primitives::decaps(&mut *self.rng.lock().expect("Mutex lock failed!"), dk, enc) 191 | } 192 | } 193 | 194 | impl> PkeAc 195 | for Covercrypt 196 | { 197 | type EncryptionKey = MasterPublicKey; 198 | type DecryptionKey = UserSecretKey; 199 | type Ciphertext = (XEnc, Vec); 200 | type Error = Error; 201 | 202 | fn encrypt( 203 | &self, 204 | mpk: &Self::EncryptionKey, 205 | ap: &AccessPolicy, 206 | ptx: &[u8], 207 | ) -> Result { 208 | let (seed, enc) = self.encaps(mpk, ap)?; 209 | // Locking Covercrypt RNG must be performed after encapsulation since 210 | // this encapsulation also requires locking the RNG. 211 | let mut rng = self.rng.lock().expect("poisoned lock"); 212 | let key = SymmetricKey::derive(&seed, b"Covercrypt AE key")?; 213 | E::encrypt(&mut *rng, &key, ptx).map(|ctx| (enc, ctx)) 214 | } 215 | 216 | fn decrypt( 217 | &self, 218 | usk: &Self::DecryptionKey, 219 | ctx: &Self::Ciphertext, 220 | ) -> Result>>, Self::Error> { 221 | self.decaps(usk, &ctx.0)? 222 | .map(|seed| { 223 | let key = SymmetricKey::derive(&seed, b"Covercrypt AE key")?; 224 | E::decrypt(&key, &ctx.1) 225 | }) 226 | .transpose() 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /src/core/kem.rs: -------------------------------------------------------------------------------- 1 | #[cfg(all(feature = "mlkem-512", feature = "mlkem-768"))] 2 | compile_error!("only one MLKEM version can be chosen at a time"); 3 | 4 | pub mod mlkem; 5 | 6 | #[cfg(feature = "mlkem-512")] 7 | pub use mlkem::MlKem512 as MlKem; 8 | 9 | #[cfg(feature = "mlkem-768")] 10 | pub use mlkem::MlKem768 as MlKem; 11 | -------------------------------------------------------------------------------- /src/core/kem/mlkem.rs: -------------------------------------------------------------------------------- 1 | use cosmian_crypto_core::bytes_ser_de::{Deserializer, Serializable, Serializer}; 2 | use cosmian_crypto_core::{reexport::rand_core::CryptoRngCore, Secret}; 3 | use ml_kem::{ 4 | array::Array, 5 | kem::{Decapsulate, Encapsulate}, 6 | EncodedSizeUser, KemCore, 7 | }; 8 | use zeroize::Zeroize; 9 | 10 | use crate::traits::Kem; 11 | use crate::{core::SHARED_SECRET_LENGTH, Error}; 12 | 13 | macro_rules! make_mlkem { 14 | ($base: ident, $ek: ident, $ek_len: literal, $dk: ident, $dk_len: literal, $enc: ident, $enc_len:literal) => { 15 | #[derive(Debug, PartialEq, Clone)] 16 | pub struct $ek(Box<::EncapsulationKey>); 17 | 18 | impl Serializable for $ek { 19 | type Error = Error; 20 | 21 | fn length(&self) -> usize { 22 | $ek_len 23 | } 24 | 25 | fn write(&self, ser: &mut Serializer) -> Result { 26 | let mut bytes = self.0.as_bytes(); 27 | let n = ser.write_array(&bytes)?; 28 | bytes.zeroize(); 29 | Ok(n) 30 | } 31 | 32 | fn read(de: &mut Deserializer) -> Result { 33 | let mut bytes = Array::from(de.read_array::<$ek_len>()?); 34 | let ek = <::EncapsulationKey>::from_bytes(&bytes); 35 | bytes.zeroize(); 36 | Ok(Self(Box::new(ek))) 37 | } 38 | } 39 | 40 | #[derive(Debug, Clone, PartialEq)] 41 | pub struct $dk(Box<::DecapsulationKey>); 42 | 43 | #[allow(dead_code)] 44 | impl $dk { 45 | pub fn ek(&self) -> $ek { 46 | $ek(Box::new(self.0.encapsulation_key().clone())) 47 | } 48 | } 49 | 50 | impl Serializable for $dk { 51 | type Error = Error; 52 | 53 | fn length(&self) -> usize { 54 | $dk_len 55 | } 56 | 57 | fn write(&self, ser: &mut Serializer) -> Result { 58 | let mut bytes = self.0.as_bytes(); 59 | let n = ser.write_array(&bytes)?; 60 | bytes.zeroize(); 61 | Ok(n) 62 | } 63 | 64 | fn read(de: &mut Deserializer) -> Result { 65 | let mut bytes = Array::from(de.read_array::<$dk_len>()?); 66 | let dk = <::DecapsulationKey>::from_bytes(&bytes); 67 | bytes.zeroize(); 68 | Ok(Self(Box::new(dk))) 69 | } 70 | } 71 | 72 | #[derive(Debug, PartialEq, Eq, Clone, Hash)] 73 | pub struct $enc(Box::CiphertextSize>>); 74 | 75 | impl Serializable for $enc { 76 | type Error = Error; 77 | 78 | fn length(&self) -> usize { 79 | $enc_len 80 | } 81 | 82 | fn write(&self, ser: &mut Serializer) -> Result { 83 | Ok(ser.write_array(&self.0)?) 84 | } 85 | 86 | fn read(de: &mut Deserializer) -> Result { 87 | Ok(Self(Box::new(Array::< 88 | u8, 89 | ::CiphertextSize, 90 | >::from(de.read_array::<$enc_len>()?)))) 91 | } 92 | } 93 | 94 | pub struct $base; 95 | 96 | impl Kem for $base { 97 | type EncapsulationKey = $ek; 98 | type DecapsulationKey = $dk; 99 | type SessionKey = Secret; 100 | 101 | type Encapsulation = $enc; 102 | 103 | type Error = Error; 104 | 105 | fn keygen( 106 | rng: &mut impl CryptoRngCore, 107 | ) -> Result<(Self::DecapsulationKey, Self::EncapsulationKey), Self::Error> { 108 | let (dk, ek) = ::generate(rng); 109 | Ok(($dk(Box::new(dk)), $ek(Box::new(ek)))) 110 | } 111 | 112 | fn enc( 113 | ek: &Self::EncapsulationKey, 114 | rng: &mut impl CryptoRngCore, 115 | ) -> Result<(Self::SessionKey, Self::Encapsulation), Self::Error> { 116 | let (enc, mut ss) = 117 | ek.0.encapsulate(rng) 118 | .map_err(|e| Error::Kem(format!("{:?}", e)))?; 119 | let ss = Secret::from_unprotected_bytes(ss.as_mut()); 120 | Ok((ss, $enc(Box::new(enc)))) 121 | } 122 | 123 | fn dec( 124 | dk: &Self::DecapsulationKey, 125 | enc: &Self::Encapsulation, 126 | ) -> Result { 127 | let mut ss = 128 | dk.0.decapsulate(&enc.0) 129 | .map_err(|e| Self::Error::Kem(format!("{e:?}")))?; 130 | let ss = Secret::from_unprotected_bytes(ss.as_mut()); 131 | Ok(ss) 132 | } 133 | } 134 | }; 135 | } 136 | 137 | make_mlkem!( 138 | MlKem512, 139 | EncapsulationKey512, 140 | 800, 141 | DecapsulationKey512, 142 | 1632, 143 | Encapsulation512, 144 | 768 145 | ); 146 | 147 | make_mlkem!( 148 | MlKem768, 149 | EncapsulationKey768, 150 | 1184, 151 | DecapsulationKey768, 152 | 2400, 153 | Encapsulation768, 154 | 1088 155 | ); 156 | 157 | #[cfg(test)] 158 | mod tests { 159 | use cosmian_crypto_core::{ 160 | bytes_ser_de::test_serialization, reexport::rand_core::SeedableRng, CsRng, 161 | }; 162 | 163 | use super::*; 164 | 165 | macro_rules! test_mlkem { 166 | ($base:ident, $test_name:ident) => { 167 | #[test] 168 | fn $test_name() { 169 | let mut rng = CsRng::from_entropy(); 170 | let (dk, ek) = $base::keygen(&mut rng).unwrap(); 171 | test_serialization(&dk).unwrap(); 172 | test_serialization(&ek).unwrap(); 173 | let (ss1, enc) = $base::enc(&ek, &mut rng).unwrap(); 174 | test_serialization(&enc).unwrap(); 175 | let ss2 = $base::dec(&dk, &enc).unwrap(); 176 | assert_eq!(ss1, ss2); 177 | } 178 | }; 179 | } 180 | 181 | test_mlkem!(MlKem512, test_mlkem512); 182 | test_mlkem!(MlKem768, test_mlkem768); 183 | } 184 | -------------------------------------------------------------------------------- /src/core/nike.rs: -------------------------------------------------------------------------------- 1 | #[cfg(all(feature = "curve25519", feature = "p-256"))] 2 | compile_error!("only one elliptic curve can be chosen at a time"); 3 | 4 | #[cfg(feature = "curve25519")] 5 | mod r25519; 6 | 7 | #[cfg(feature = "curve25519")] 8 | pub use r25519::R25519 as ElGamal; 9 | 10 | #[cfg(feature = "p-256")] 11 | mod p256; 12 | 13 | #[cfg(feature = "p-256")] 14 | pub use p256::P256 as ElGamal; 15 | -------------------------------------------------------------------------------- /src/core/nike/p256.rs: -------------------------------------------------------------------------------- 1 | use std::hash::Hash; 2 | use std::iter::Sum; 3 | use std::ops::Add; 4 | use std::ops::AddAssign; 5 | use std::ops::Div; 6 | use std::ops::Mul; 7 | use std::ops::MulAssign; 8 | use std::ops::Sub; 9 | use std::ops::SubAssign; 10 | 11 | use cosmian_crypto_core::bytes_ser_de::Deserializer; 12 | use cosmian_crypto_core::bytes_ser_de::Serializable; 13 | use cosmian_crypto_core::bytes_ser_de::Serializer; 14 | use cosmian_crypto_core::CryptoCoreError; 15 | use elliptic_curve::group::GroupEncoding; 16 | use elliptic_curve::rand_core::CryptoRngCore; 17 | use elliptic_curve::Field; 18 | use elliptic_curve::PrimeField; 19 | use p256::{ProjectivePoint, Scalar}; 20 | use subtle::ConstantTimeEq; 21 | use tiny_keccak::Hasher; 22 | use tiny_keccak::Sha3; 23 | use zeroize::Zeroize; 24 | 25 | use crate::traits::Group; 26 | use crate::traits::KeyHomomorphicNike; 27 | use crate::traits::Nike; 28 | use crate::traits::One; 29 | use crate::traits::Ring; 30 | use crate::traits::Sampling; 31 | use crate::traits::Zero; 32 | use crate::Error; 33 | 34 | #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] 35 | pub struct P256Point(ProjectivePoint); 36 | 37 | impl Zero for P256Point { 38 | fn zero() -> Self { 39 | Self(ProjectivePoint::IDENTITY) 40 | } 41 | 42 | fn is_zero(&self) -> bool { 43 | self.0.ct_eq(&ProjectivePoint::IDENTITY).into() 44 | } 45 | } 46 | 47 | impl Add for P256Point { 48 | type Output = Self; 49 | 50 | fn add(self, rhs: Self) -> Self::Output { 51 | &self + &rhs 52 | } 53 | } 54 | 55 | impl Add<&P256Point> for P256Point { 56 | type Output = Self; 57 | 58 | fn add(self, rhs: &P256Point) -> Self::Output { 59 | &self + rhs 60 | } 61 | } 62 | 63 | impl Add<&P256Point> for &P256Point { 64 | type Output = P256Point; 65 | 66 | fn add(self, rhs: &P256Point) -> Self::Output { 67 | P256Point(self.0 + rhs.0) 68 | } 69 | } 70 | 71 | impl AddAssign for P256Point { 72 | fn add_assign(&mut self, rhs: Self) { 73 | self.0 = self.0 + rhs.0; 74 | } 75 | } 76 | 77 | impl Sub for P256Point { 78 | type Output = Self; 79 | 80 | fn sub(self, rhs: Self) -> Self::Output { 81 | &self - &rhs 82 | } 83 | } 84 | 85 | impl SubAssign for P256Point { 86 | fn sub_assign(&mut self, rhs: Self) { 87 | self.0 = self.0 - rhs.0 88 | } 89 | } 90 | 91 | impl Sub<&P256Point> for P256Point { 92 | type Output = Self; 93 | 94 | fn sub(self, rhs: &P256Point) -> Self::Output { 95 | &self - rhs 96 | } 97 | } 98 | 99 | impl Sub<&P256Point> for &P256Point { 100 | type Output = P256Point; 101 | 102 | fn sub(self, rhs: &P256Point) -> Self::Output { 103 | P256Point(self.0 - rhs.0) 104 | } 105 | } 106 | 107 | impl Group for P256Point {} 108 | 109 | impl Serializable for P256Point { 110 | type Error = CryptoCoreError; 111 | 112 | fn length(&self) -> usize { 113 | 33 114 | } 115 | 116 | fn write(&self, ser: &mut Serializer) -> Result { 117 | ser.write_array(&self.0.to_bytes()) 118 | } 119 | 120 | fn read(de: &mut Deserializer) -> Result { 121 | let bytes = de.read_array::<33>()?; 122 | let point = ProjectivePoint::from_bytes(&bytes.into()) 123 | .into_option() 124 | .ok_or_else(|| { 125 | CryptoCoreError::GenericDeserializationError("cannot deserialize point".to_string()) 126 | })?; 127 | Ok(Self(point)) 128 | } 129 | } 130 | 131 | impl Sum for P256Point { 132 | fn sum>(iter: I) -> Self { 133 | iter.fold(Self::zero(), |a, p| a + p) 134 | } 135 | } 136 | 137 | #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] 138 | pub struct P256Scalar(Scalar); 139 | 140 | impl Hash for P256Scalar { 141 | fn hash(&self, state: &mut H) { 142 | state.write(&self.0.to_bytes()); 143 | } 144 | } 145 | 146 | impl Zero for P256Scalar { 147 | fn zero() -> Self { 148 | Self(Scalar::ZERO) 149 | } 150 | 151 | fn is_zero(&self) -> bool { 152 | self.0.ct_eq(&Scalar::ZERO).into() 153 | } 154 | } 155 | 156 | impl One for P256Scalar { 157 | fn one() -> Self { 158 | Self(Scalar::ONE) 159 | } 160 | 161 | fn is_one(&self) -> bool { 162 | self.0.ct_eq(&Scalar::ONE).into() 163 | } 164 | } 165 | 166 | impl Add for P256Scalar { 167 | type Output = Self; 168 | 169 | fn add(self, rhs: Self) -> Self::Output { 170 | &self + &rhs 171 | } 172 | } 173 | 174 | impl AddAssign for P256Scalar { 175 | fn add_assign(&mut self, rhs: Self) { 176 | self.0 = self.0 + rhs.0; 177 | } 178 | } 179 | 180 | impl Add<&P256Scalar> for P256Scalar { 181 | type Output = Self; 182 | 183 | fn add(self, rhs: &P256Scalar) -> Self::Output { 184 | &self + rhs 185 | } 186 | } 187 | 188 | impl Add<&P256Scalar> for &P256Scalar { 189 | type Output = P256Scalar; 190 | 191 | fn add(self, rhs: &P256Scalar) -> Self::Output { 192 | P256Scalar(self.0 + rhs.0) 193 | } 194 | } 195 | 196 | impl Sub for P256Scalar { 197 | type Output = Self; 198 | 199 | fn sub(self, rhs: Self) -> Self::Output { 200 | &self - &rhs 201 | } 202 | } 203 | 204 | impl SubAssign for P256Scalar { 205 | fn sub_assign(&mut self, rhs: Self) { 206 | self.0 = self.0 - rhs.0 207 | } 208 | } 209 | 210 | impl Sub<&P256Scalar> for P256Scalar { 211 | type Output = Self; 212 | 213 | fn sub(self, rhs: &P256Scalar) -> Self::Output { 214 | &self - rhs 215 | } 216 | } 217 | 218 | impl Sub<&P256Scalar> for &P256Scalar { 219 | type Output = P256Scalar; 220 | 221 | fn sub(self, rhs: &P256Scalar) -> Self::Output { 222 | P256Scalar(self.0 - rhs.0) 223 | } 224 | } 225 | 226 | impl Mul for P256Scalar { 227 | type Output = Self; 228 | 229 | fn mul(self, rhs: Self) -> Self::Output { 230 | &self * &rhs 231 | } 232 | } 233 | 234 | impl MulAssign for P256Scalar { 235 | fn mul_assign(&mut self, rhs: Self) { 236 | self.0 = self.0 * rhs.0 237 | } 238 | } 239 | 240 | impl Mul<&P256Scalar> for P256Scalar { 241 | type Output = Self; 242 | 243 | fn mul(self, rhs: &P256Scalar) -> Self::Output { 244 | &self * rhs 245 | } 246 | } 247 | 248 | impl Mul<&P256Scalar> for &P256Scalar { 249 | type Output = P256Scalar; 250 | 251 | fn mul(self, rhs: &P256Scalar) -> Self::Output { 252 | P256Scalar(self.0 * rhs.0) 253 | } 254 | } 255 | 256 | impl Div for P256Scalar { 257 | type Output = Result; 258 | 259 | fn div(self, rhs: Self) -> Self::Output { 260 | &self / &rhs 261 | } 262 | } 263 | 264 | impl Div<&P256Scalar> for P256Scalar { 265 | type Output = Result; 266 | 267 | fn div(self, rhs: &P256Scalar) -> Self::Output { 268 | &self / rhs 269 | } 270 | } 271 | 272 | impl Div<&P256Scalar> for &P256Scalar { 273 | type Output = Result; 274 | 275 | fn div(self, rhs: &P256Scalar) -> Self::Output { 276 | rhs.0 277 | .invert() 278 | .map(|rhs| self.0 * rhs) 279 | .map(P256Scalar) 280 | .into_option() 281 | .ok_or_else(|| Error::OperationNotPermitted("Division by zero".to_string())) 282 | } 283 | } 284 | 285 | impl Sum for P256Scalar { 286 | fn sum>(iter: I) -> Self { 287 | iter.fold(Self::zero(), |a, s| a + s) 288 | } 289 | } 290 | 291 | impl Group for P256Scalar {} 292 | 293 | impl Ring for P256Scalar { 294 | type DivError = Error; 295 | } 296 | 297 | impl Serializable for P256Scalar { 298 | type Error = CryptoCoreError; 299 | 300 | fn length(&self) -> usize { 301 | 32 302 | } 303 | 304 | fn write(&self, ser: &mut Serializer) -> Result { 305 | ser.write_array(&self.0.to_bytes()) 306 | } 307 | 308 | fn read(de: &mut Deserializer) -> Result { 309 | let bytes = de.read_array::<32>()?; 310 | let scalar = Scalar::from_repr(bytes.into()) 311 | .into_option() 312 | .ok_or_else(|| { 313 | CryptoCoreError::GenericDeserializationError( 314 | "cannot deserialize scalar".to_string(), 315 | ) 316 | })?; 317 | Ok(Self(scalar)) 318 | } 319 | } 320 | 321 | impl Sampling for P256Scalar { 322 | fn random(rng: &mut impl CryptoRngCore) -> Self { 323 | Self(Scalar::random(rng)) 324 | } 325 | 326 | fn hash(seed: &[u8]) -> Self { 327 | let mut i = 0u32; 328 | loop { 329 | let mut hasher = Sha3::v256(); 330 | let mut bytes = [0; 32]; 331 | hasher.update(seed); 332 | hasher.update(&i.to_be_bytes()); 333 | hasher.finalize(&mut bytes); 334 | let s = Self::deserialize(&bytes); 335 | bytes.zeroize(); 336 | if let Ok(s) = s { 337 | return s; 338 | } else { 339 | i += 1; 340 | } 341 | } 342 | } 343 | } 344 | 345 | impl From<&P256Scalar> for P256Point { 346 | fn from(s: &P256Scalar) -> Self { 347 | P256Point(ProjectivePoint::GENERATOR * s.0) 348 | } 349 | } 350 | 351 | impl Mul for P256Point { 352 | type Output = Self; 353 | 354 | fn mul(self, rhs: P256Scalar) -> Self::Output { 355 | &self * &rhs 356 | } 357 | } 358 | 359 | impl MulAssign for P256Point { 360 | fn mul_assign(&mut self, rhs: P256Scalar) { 361 | self.0 = self.0 * rhs.0 362 | } 363 | } 364 | 365 | impl Mul<&P256Scalar> for P256Point { 366 | type Output = Self; 367 | 368 | fn mul(self, rhs: &P256Scalar) -> Self::Output { 369 | &self * rhs 370 | } 371 | } 372 | 373 | impl Mul<&P256Scalar> for &P256Point { 374 | type Output = P256Point; 375 | 376 | fn mul(self, rhs: &P256Scalar) -> Self::Output { 377 | P256Point(self.0 * rhs.0) 378 | } 379 | } 380 | 381 | pub struct P256; 382 | 383 | impl Nike for P256 { 384 | type SecretKey = P256Scalar; 385 | type PublicKey = P256Point; 386 | type SessionKey = P256Point; 387 | type Error = Error; 388 | 389 | fn keygen( 390 | rng: &mut impl CryptoRngCore, 391 | ) -> Result<(Self::SecretKey, Self::PublicKey), Self::Error> { 392 | let sk = Self::SecretKey::random(rng); 393 | let pk = Self::PublicKey::from(&sk); 394 | Ok((sk, pk)) 395 | } 396 | 397 | fn session_key( 398 | sk: &Self::SecretKey, 399 | pk: &Self::PublicKey, 400 | ) -> Result { 401 | Ok(pk * sk) 402 | } 403 | } 404 | 405 | impl KeyHomomorphicNike for P256 {} 406 | 407 | #[cfg(test)] 408 | mod tests { 409 | use cosmian_crypto_core::{ 410 | bytes_ser_de::test_serialization, reexport::rand_core::SeedableRng, CsRng, 411 | }; 412 | 413 | use super::*; 414 | 415 | #[test] 416 | fn test_p256() { 417 | let mut rng = CsRng::from_entropy(); 418 | let (sk1, pk1) = P256::keygen(&mut rng).unwrap(); 419 | let (sk2, pk2) = P256::keygen(&mut rng).unwrap(); 420 | test_serialization(&sk1).unwrap(); 421 | test_serialization(&pk1).unwrap(); 422 | test_serialization(&sk2).unwrap(); 423 | test_serialization(&pk2).unwrap(); 424 | let ss1 = P256::session_key(&sk1, &pk2).unwrap(); 425 | let ss2 = P256::session_key(&sk2, &pk1).unwrap(); 426 | assert_eq!(ss1, ss2); 427 | } 428 | } 429 | -------------------------------------------------------------------------------- /src/core/nike/r25519.rs: -------------------------------------------------------------------------------- 1 | use std::iter::Sum; 2 | use std::ops::Add; 3 | use std::ops::AddAssign; 4 | use std::ops::Deref; 5 | use std::ops::Div; 6 | use std::ops::Mul; 7 | use std::ops::MulAssign; 8 | use std::ops::Sub; 9 | use std::ops::SubAssign; 10 | 11 | use cosmian_crypto_core::bytes_ser_de::Deserializer; 12 | use cosmian_crypto_core::bytes_ser_de::Serializable; 13 | use cosmian_crypto_core::bytes_ser_de::Serializer; 14 | use cosmian_crypto_core::reexport::rand_core::CryptoRngCore; 15 | 16 | use cosmian_crypto_core::CryptoCoreError; 17 | pub use cosmian_crypto_core::R25519PrivateKey as Scalar; 18 | pub use cosmian_crypto_core::R25519PublicKey as EcPoint; 19 | use tiny_keccak::Hasher; 20 | use tiny_keccak::Sha3; 21 | use zeroize::Zeroize; 22 | 23 | use crate::traits::Group; 24 | use crate::traits::KeyHomomorphicNike; 25 | use crate::traits::Nike; 26 | use crate::traits::One; 27 | use crate::traits::Ring; 28 | use crate::traits::Sampling; 29 | use crate::traits::Zero; 30 | use crate::Error; 31 | 32 | #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] 33 | pub struct R25519Point(EcPoint); 34 | 35 | impl Zero for R25519Point { 36 | fn zero() -> Self { 37 | Self(EcPoint::identity()) 38 | } 39 | 40 | fn is_zero(&self) -> bool { 41 | self == &Self::zero() 42 | } 43 | } 44 | 45 | impl Add for R25519Point { 46 | type Output = Self; 47 | 48 | fn add(self, rhs: Self) -> Self::Output { 49 | Self(self.0 + &rhs.0) 50 | } 51 | } 52 | 53 | impl Add<&R25519Point> for R25519Point { 54 | type Output = Self; 55 | 56 | fn add(self, rhs: &R25519Point) -> Self::Output { 57 | Self(self.0 + &rhs.0) 58 | } 59 | } 60 | 61 | impl Add<&R25519Point> for &R25519Point { 62 | type Output = R25519Point; 63 | 64 | fn add(self, rhs: &R25519Point) -> Self::Output { 65 | R25519Point(&self.0 + &rhs.0) 66 | } 67 | } 68 | 69 | impl AddAssign for R25519Point { 70 | fn add_assign(&mut self, rhs: Self) { 71 | self.0 = &self.0 + &rhs.0; 72 | } 73 | } 74 | 75 | impl Sub for R25519Point { 76 | type Output = Self; 77 | 78 | fn sub(self, rhs: Self) -> Self::Output { 79 | Self(&self.0 - &rhs.0) 80 | } 81 | } 82 | 83 | impl SubAssign for R25519Point { 84 | fn sub_assign(&mut self, rhs: Self) { 85 | self.0 = &self.0 - &rhs.0 86 | } 87 | } 88 | 89 | impl Sub<&R25519Point> for R25519Point { 90 | type Output = Self; 91 | 92 | fn sub(self, rhs: &R25519Point) -> Self::Output { 93 | Self(&self.0 - &rhs.0) 94 | } 95 | } 96 | 97 | impl Sub<&R25519Point> for &R25519Point { 98 | type Output = R25519Point; 99 | 100 | fn sub(self, rhs: &R25519Point) -> Self::Output { 101 | R25519Point(&self.0 - &rhs.0) 102 | } 103 | } 104 | 105 | impl Group for R25519Point {} 106 | 107 | impl Serializable for R25519Point { 108 | type Error = Error; 109 | 110 | fn length(&self) -> usize { 111 | self.0.length() 112 | } 113 | 114 | fn write(&self, ser: &mut Serializer) -> Result { 115 | self.0.write(ser).map_err(Self::Error::from) 116 | } 117 | 118 | fn read(de: &mut Deserializer) -> Result { 119 | de.read().map(Self).map_err(Self::Error::from) 120 | } 121 | } 122 | 123 | impl Sum for R25519Point { 124 | fn sum>(iter: I) -> Self { 125 | iter.fold(Self::zero(), |a, p| a + p) 126 | } 127 | } 128 | 129 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] 130 | pub struct R25519Scalar(Scalar); 131 | 132 | impl Deref for R25519Scalar { 133 | type Target = [u8]; 134 | 135 | fn deref(&self) -> &Self::Target { 136 | self.0.as_bytes() 137 | } 138 | } 139 | 140 | impl Zero for R25519Scalar { 141 | fn zero() -> Self { 142 | Self(Scalar::zero()) 143 | } 144 | 145 | fn is_zero(&self) -> bool { 146 | self == &Self::zero() 147 | } 148 | } 149 | 150 | impl One for R25519Scalar { 151 | fn one() -> Self { 152 | Self(Scalar::one()) 153 | } 154 | 155 | fn is_one(&self) -> bool { 156 | self == &Self::one() 157 | } 158 | } 159 | 160 | impl Add for R25519Scalar { 161 | type Output = Self; 162 | 163 | fn add(self, rhs: Self) -> Self::Output { 164 | Self(self.0 + rhs.0) 165 | } 166 | } 167 | 168 | impl AddAssign for R25519Scalar { 169 | fn add_assign(&mut self, rhs: Self) { 170 | self.0 = &self.0 + &rhs.0; 171 | } 172 | } 173 | 174 | impl Add<&R25519Scalar> for R25519Scalar { 175 | type Output = Self; 176 | 177 | fn add(self, rhs: &R25519Scalar) -> Self::Output { 178 | Self(&self.0 + &rhs.0) 179 | } 180 | } 181 | 182 | impl Add<&R25519Scalar> for &R25519Scalar { 183 | type Output = R25519Scalar; 184 | 185 | fn add(self, rhs: &R25519Scalar) -> Self::Output { 186 | R25519Scalar(&self.0 + &rhs.0) 187 | } 188 | } 189 | 190 | impl Sub for R25519Scalar { 191 | type Output = Self; 192 | 193 | fn sub(self, rhs: Self) -> Self::Output { 194 | Self(&self.0 - &rhs.0) 195 | } 196 | } 197 | 198 | impl SubAssign for R25519Scalar { 199 | fn sub_assign(&mut self, rhs: Self) { 200 | self.0 = &self.0 - &rhs.0 201 | } 202 | } 203 | 204 | impl Sub<&R25519Scalar> for R25519Scalar { 205 | type Output = Self; 206 | 207 | fn sub(self, rhs: &R25519Scalar) -> Self::Output { 208 | Self(&self.0 - &rhs.0) 209 | } 210 | } 211 | 212 | impl Sub<&R25519Scalar> for &R25519Scalar { 213 | type Output = R25519Scalar; 214 | 215 | fn sub(self, rhs: &R25519Scalar) -> Self::Output { 216 | R25519Scalar(&self.0 - &rhs.0) 217 | } 218 | } 219 | 220 | impl Mul for R25519Scalar { 221 | type Output = Self; 222 | 223 | fn mul(self, rhs: Self) -> Self::Output { 224 | Self(&self.0 * &rhs.0) 225 | } 226 | } 227 | 228 | impl MulAssign for R25519Scalar { 229 | fn mul_assign(&mut self, rhs: Self) { 230 | self.0 = &self.0 * &rhs.0 231 | } 232 | } 233 | 234 | impl Mul<&R25519Scalar> for R25519Scalar { 235 | type Output = Self; 236 | 237 | fn mul(self, rhs: &R25519Scalar) -> Self::Output { 238 | Self(&self.0 * &rhs.0) 239 | } 240 | } 241 | 242 | impl Mul<&R25519Scalar> for &R25519Scalar { 243 | type Output = R25519Scalar; 244 | 245 | fn mul(self, rhs: &R25519Scalar) -> Self::Output { 246 | R25519Scalar(&self.0 * &rhs.0) 247 | } 248 | } 249 | 250 | impl Div for R25519Scalar { 251 | type Output = Result; 252 | 253 | fn div(self, rhs: Self) -> Self::Output { 254 | &self / &rhs 255 | } 256 | } 257 | 258 | impl Div<&R25519Scalar> for R25519Scalar { 259 | type Output = Result; 260 | 261 | fn div(self, rhs: &R25519Scalar) -> Self::Output { 262 | &self / rhs 263 | } 264 | } 265 | 266 | impl Div<&R25519Scalar> for &R25519Scalar { 267 | type Output = Result; 268 | 269 | fn div(self, rhs: &R25519Scalar) -> Self::Output { 270 | (&self.0 / &rhs.0).map(R25519Scalar) 271 | } 272 | } 273 | 274 | impl Sum for R25519Scalar { 275 | fn sum>(iter: I) -> Self { 276 | iter.fold(Self::zero(), |a, s| a + s) 277 | } 278 | } 279 | 280 | impl Group for R25519Scalar {} 281 | 282 | impl Ring for R25519Scalar { 283 | type DivError = CryptoCoreError; 284 | } 285 | 286 | impl Serializable for R25519Scalar { 287 | type Error = Error; 288 | 289 | fn length(&self) -> usize { 290 | self.0.length() 291 | } 292 | 293 | fn write(&self, ser: &mut Serializer) -> Result { 294 | self.0.write(ser).map_err(Self::Error::from) 295 | } 296 | 297 | fn read(de: &mut Deserializer) -> Result { 298 | de.read().map(Self).map_err(Self::Error::from) 299 | } 300 | } 301 | 302 | impl Sampling for R25519Scalar { 303 | fn random(rng: &mut impl CryptoRngCore) -> Self { 304 | Self(Scalar::new(rng)) 305 | } 306 | 307 | fn hash(seed: &[u8]) -> Self { 308 | let mut hasher = Sha3::v512(); 309 | let mut bytes = [0; 512 / 8]; 310 | hasher.update(seed); 311 | hasher.finalize(&mut bytes); 312 | let s = Self(Scalar::from_raw_bytes(&bytes)); 313 | bytes.zeroize(); 314 | s 315 | } 316 | } 317 | 318 | impl From<&R25519Scalar> for R25519Point { 319 | fn from(s: &R25519Scalar) -> Self { 320 | Self(EcPoint::from(&s.0)) 321 | } 322 | } 323 | 324 | impl Mul for R25519Point { 325 | type Output = Self; 326 | 327 | fn mul(self, rhs: R25519Scalar) -> Self::Output { 328 | Self(&self.0 * &rhs.0) 329 | } 330 | } 331 | 332 | impl MulAssign for R25519Point { 333 | fn mul_assign(&mut self, rhs: R25519Scalar) { 334 | self.0 = &self.0 * &rhs.0 335 | } 336 | } 337 | 338 | impl Mul<&R25519Scalar> for R25519Point { 339 | type Output = Self; 340 | 341 | fn mul(self, rhs: &R25519Scalar) -> Self::Output { 342 | Self(&self.0 * &rhs.0) 343 | } 344 | } 345 | 346 | impl Mul<&R25519Scalar> for &R25519Point { 347 | type Output = R25519Point; 348 | 349 | fn mul(self, rhs: &R25519Scalar) -> Self::Output { 350 | R25519Point(&self.0 * &rhs.0) 351 | } 352 | } 353 | 354 | pub struct R25519; 355 | 356 | impl Nike for R25519 { 357 | type SecretKey = R25519Scalar; 358 | type PublicKey = R25519Point; 359 | type SessionKey = R25519Point; 360 | type Error = Error; 361 | 362 | fn keygen( 363 | rng: &mut impl CryptoRngCore, 364 | ) -> Result<(Self::SecretKey, Self::PublicKey), Self::Error> { 365 | let sk = Self::SecretKey::random(rng); 366 | let pk = Self::PublicKey::from(&sk); 367 | Ok((sk, pk)) 368 | } 369 | 370 | fn session_key( 371 | sk: &Self::SecretKey, 372 | pk: &Self::PublicKey, 373 | ) -> Result { 374 | Ok(pk * sk) 375 | } 376 | } 377 | 378 | impl KeyHomomorphicNike for R25519 {} 379 | 380 | #[cfg(test)] 381 | mod tests { 382 | use cosmian_crypto_core::{ 383 | bytes_ser_de::test_serialization, reexport::rand_core::SeedableRng, CsRng, 384 | }; 385 | 386 | use super::*; 387 | 388 | #[test] 389 | fn test_r25519() { 390 | let mut rng = CsRng::from_entropy(); 391 | let (sk1, pk1) = R25519::keygen(&mut rng).unwrap(); 392 | let (sk2, pk2) = R25519::keygen(&mut rng).unwrap(); 393 | test_serialization(&sk1).unwrap(); 394 | test_serialization(&pk1).unwrap(); 395 | test_serialization(&sk2).unwrap(); 396 | test_serialization(&pk2).unwrap(); 397 | let ss1 = R25519::session_key(&sk1, &pk2).unwrap(); 398 | let ss2 = R25519::session_key(&sk2, &pk1).unwrap(); 399 | assert_eq!(ss1, ss2); 400 | } 401 | } 402 | -------------------------------------------------------------------------------- /src/core/tests.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | 3 | use cosmian_crypto_core::{reexport::rand_core::SeedableRng, Aes256Gcm, CsRng}; 4 | 5 | use crate::{ 6 | abe_policy::{AccessPolicy, AttributeStatus, EncryptionHint, Right}, 7 | api::Covercrypt, 8 | core::primitives::{decaps, encaps, refresh, rekey, update_msk}, 9 | test_utils::cc_keygen, 10 | traits::{KemAc, PkeAc}, 11 | }; 12 | 13 | use super::{ 14 | primitives::{setup, usk_keygen}, 15 | MIN_TRACING_LEVEL, 16 | }; 17 | 18 | /// This test asserts that it is possible to encapsulate a key for a given 19 | /// coordinate and that different users which key is associated with this 20 | /// coordinate can open the resulting encapsulation. 21 | #[test] 22 | fn test_encapsulation() { 23 | let mut rng = CsRng::from_entropy(); 24 | let other_coordinate = Right::random(&mut rng); 25 | let target_coordinate = Right::random(&mut rng); 26 | 27 | let mut msk = setup(MIN_TRACING_LEVEL, &mut rng).unwrap(); 28 | update_msk( 29 | &mut rng, 30 | &mut msk, 31 | HashMap::from_iter([ 32 | ( 33 | other_coordinate.clone(), 34 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 35 | ), 36 | ( 37 | target_coordinate.clone(), 38 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 39 | ), 40 | ]), 41 | ) 42 | .unwrap(); 43 | let mpk = msk.mpk().unwrap(); 44 | 45 | let (key, enc) = encaps( 46 | &mut rng, 47 | &mpk, 48 | &HashSet::from_iter([target_coordinate.clone()]), 49 | ) 50 | .unwrap(); 51 | assert_eq!(enc.count(), 1); 52 | 53 | for _ in 0..3 { 54 | let usk = usk_keygen( 55 | &mut rng, 56 | &mut msk, 57 | HashSet::from_iter([target_coordinate.clone()]), 58 | ) 59 | .unwrap(); 60 | assert_eq!(usk.secrets.len(), 1); 61 | assert_eq!(Some(&key), decaps(&mut rng, &usk, &enc).unwrap().as_ref()); 62 | } 63 | 64 | let usk = usk_keygen( 65 | &mut rng, 66 | &mut msk, 67 | HashSet::from_iter([other_coordinate.clone()]), 68 | ) 69 | .unwrap(); 70 | assert_eq!(usk.secrets.len(), 1); 71 | assert_eq!(None, decaps(&mut rng, &usk, &enc).unwrap().as_ref()); 72 | } 73 | 74 | /// This test verifies that the correct number of keys is added/removed upon 75 | /// updating the MSK. It also check that the correct number of coordinate keys 76 | /// are given to the MPK, and removed upon deprecation. 77 | #[test] 78 | fn test_update() { 79 | let mut rng = CsRng::from_entropy(); 80 | 81 | let mut msk = setup(MIN_TRACING_LEVEL, &mut rng).unwrap(); 82 | assert_eq!(msk.tsk.users.len(), 0); 83 | assert_eq!(msk.tsk.tracing_level(), MIN_TRACING_LEVEL); 84 | assert_eq!(msk.secrets.len(), 0); 85 | 86 | let mpk = msk.mpk().unwrap(); 87 | assert_eq!(mpk.tpk.tracing_level(), MIN_TRACING_LEVEL); 88 | assert_eq!(mpk.encryption_keys.len(), 0); 89 | 90 | // Add 30 new random coordinates and verifies the correct number of 91 | // coordinate keys is added to the MSK (and the MPK). 92 | let mut coordinates = (0..30) 93 | .map(|_| { 94 | ( 95 | Right::random(&mut rng), 96 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 97 | ) 98 | }) 99 | .collect::>(); 100 | update_msk(&mut rng, &mut msk, coordinates.clone()).unwrap(); 101 | assert_eq!(msk.secrets.len(), 30); 102 | 103 | let mpk = msk.mpk().unwrap(); 104 | assert_eq!(mpk.encryption_keys.len(), 30); 105 | 106 | // Deprecate half coordinates. 107 | // 108 | // Be careful to iterate on the original structure not to change the 109 | // iteration order. Otherwise the next test may fail. 110 | coordinates 111 | .iter_mut() 112 | .enumerate() 113 | .for_each(|(i, (_, (_, status)))| { 114 | if i % 2 == 0 { 115 | *status = AttributeStatus::DecryptOnly; 116 | } 117 | }); 118 | update_msk(&mut rng, &mut msk, coordinates.clone()).unwrap(); 119 | assert_eq!(msk.secrets.len(), 30); 120 | let mpk = msk.mpk().unwrap(); 121 | assert_eq!(mpk.encryption_keys.len(), 15); 122 | 123 | // Keep only 10 coordinates. 124 | let coordinates = coordinates.into_iter().take(10).collect::>(); 125 | update_msk(&mut rng, &mut msk, coordinates).unwrap(); 126 | assert_eq!(msk.secrets.len(), 10); 127 | let mpk = msk.mpk().unwrap(); 128 | assert_eq!(mpk.encryption_keys.len(), 5); 129 | } 130 | 131 | /// This test asserts that re-keyed coordinates allow creating encapsulations 132 | /// using the new keys: old USK cannot open the new ones and new USK cannot open 133 | /// the old ones. 134 | #[test] 135 | fn test_rekey() { 136 | let mut rng = CsRng::from_entropy(); 137 | let coordinate_1 = Right::random(&mut rng); 138 | let coordinate_2 = Right::random(&mut rng); 139 | let subspace_1 = HashSet::from_iter([coordinate_1.clone()]); 140 | let subspace_2 = HashSet::from_iter([coordinate_2.clone()]); 141 | let universe = HashSet::from_iter([coordinate_1.clone(), coordinate_2.clone()]); 142 | 143 | let mut msk = setup(MIN_TRACING_LEVEL, &mut rng).unwrap(); 144 | update_msk( 145 | &mut rng, 146 | &mut msk, 147 | HashMap::from_iter([ 148 | ( 149 | coordinate_1.clone(), 150 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 151 | ), 152 | ( 153 | coordinate_2.clone(), 154 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 155 | ), 156 | ]), 157 | ) 158 | .unwrap(); 159 | let mpk = msk.mpk().unwrap(); 160 | let mut usk_1 = usk_keygen(&mut rng, &mut msk, subspace_1.clone()).unwrap(); 161 | let mut usk_2 = usk_keygen(&mut rng, &mut msk, subspace_2.clone()).unwrap(); 162 | 163 | let (old_key_1, old_enc_1) = encaps(&mut rng, &mpk, &subspace_1).unwrap(); 164 | let (old_key_2, old_enc_2) = encaps(&mut rng, &mpk, &subspace_2).unwrap(); 165 | 166 | // Old USK can open encapsulations associated with their coordinate. 167 | assert_eq!( 168 | Some(&old_key_1), 169 | decaps(&mut rng, &usk_1, &old_enc_1).unwrap().as_ref() 170 | ); 171 | assert_eq!(None, decaps(&mut rng, &usk_1, &old_enc_2).unwrap()); 172 | assert_eq!( 173 | Some(old_key_2), 174 | decaps(&mut rng, &usk_2, &old_enc_2).unwrap() 175 | ); 176 | assert_eq!(None, decaps(&mut rng, &usk_2, &old_enc_1).unwrap()); 177 | 178 | // Re-key all space coordinates. 179 | rekey(&mut rng, &mut msk, universe).unwrap(); 180 | let mpk = msk.mpk().unwrap(); 181 | 182 | let (new_key_1, new_enc_1) = encaps(&mut rng, &mpk, &subspace_1).unwrap(); 183 | let (new_key_2, new_enc_2) = encaps(&mut rng, &mpk, &subspace_2).unwrap(); 184 | 185 | // Old USK cannot open new encapsulations. 186 | assert_eq!(None, decaps(&mut rng, &usk_1, &new_enc_1).unwrap()); 187 | assert_eq!(None, decaps(&mut rng, &usk_1, &new_enc_2).unwrap()); 188 | assert_eq!(None, decaps(&mut rng, &usk_2, &new_enc_2).unwrap()); 189 | assert_eq!(None, decaps(&mut rng, &usk_2, &new_enc_1).unwrap()); 190 | 191 | // Refresh USK. 192 | // Only the first one keeps its old rights. 193 | refresh(&mut rng, &mut msk, &mut usk_1, true).unwrap(); 194 | refresh(&mut rng, &mut msk, &mut usk_2, false).unwrap(); 195 | 196 | // Refreshed USK can open the new encapsulation. 197 | assert_eq!( 198 | Some(new_key_1), 199 | decaps(&mut rng, &usk_1, &new_enc_1).unwrap() 200 | ); 201 | assert_eq!(None, decaps(&mut rng, &usk_1, &new_enc_2).unwrap()); 202 | assert_eq!( 203 | Some(new_key_2), 204 | decaps(&mut rng, &usk_2, &new_enc_2).unwrap() 205 | ); 206 | assert_eq!(None, decaps(&mut rng, &usk_2, &new_enc_1).unwrap()); 207 | 208 | // Only USK 1 can still open the old encapsulation. 209 | assert_eq!( 210 | Some(old_key_1), 211 | decaps(&mut rng, &usk_1, &old_enc_1).unwrap() 212 | ); 213 | assert_eq!(None, decaps(&mut rng, &usk_1, &old_enc_2).unwrap()); 214 | assert_eq!(None, decaps(&mut rng, &usk_2, &old_enc_2).unwrap()); 215 | assert_eq!(None, decaps(&mut rng, &usk_2, &old_enc_1).unwrap()); 216 | } 217 | 218 | /// This test asserts that forged USK cannot be refreshed. 219 | #[test] 220 | fn test_integrity_check() { 221 | let mut rng = CsRng::from_entropy(); 222 | let coordinate_1 = Right::random(&mut rng); 223 | let coordinate_2 = Right::random(&mut rng); 224 | let subspace_1 = HashSet::from_iter([coordinate_1.clone()]); 225 | let subspace_2 = HashSet::from_iter([coordinate_2.clone()]); 226 | 227 | let mut msk = setup(MIN_TRACING_LEVEL, &mut rng).unwrap(); 228 | update_msk( 229 | &mut rng, 230 | &mut msk, 231 | HashMap::from_iter([ 232 | ( 233 | coordinate_1.clone(), 234 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 235 | ), 236 | ( 237 | coordinate_2.clone(), 238 | (EncryptionHint::Classic, AttributeStatus::EncryptDecrypt), 239 | ), 240 | ]), 241 | ) 242 | .unwrap(); 243 | let usk_1 = usk_keygen(&mut rng, &mut msk, subspace_1.clone()).unwrap(); 244 | let usk_2 = usk_keygen(&mut rng, &mut msk, subspace_2.clone()).unwrap(); 245 | 246 | // Here we are trying to get access to both USK1 and USK2 rights. 247 | let mut old_forged_usk = usk_1.clone(); 248 | for (key, chain) in usk_2.secrets.iter() { 249 | old_forged_usk 250 | .secrets 251 | .insert_new_chain(key.clone(), chain.clone()); 252 | } 253 | assert_eq!( 254 | old_forged_usk.secrets.count_elements(), 255 | usk_1.secrets.count_elements() + usk_2.secrets.count_elements() 256 | ); 257 | 258 | // The forged key refresh is rejected: no modification is performed on it. 259 | let mut new_forged_usk = old_forged_usk.clone(); 260 | assert!(refresh(&mut rng, &mut msk, &mut new_forged_usk, true).is_err()); 261 | assert_eq!(new_forged_usk, old_forged_usk); 262 | } 263 | 264 | #[test] 265 | fn test_reencrypt_with_msk() { 266 | let ap = AccessPolicy::parse("DPT::FIN && SEC::TOP").unwrap(); 267 | let cc = Covercrypt::default(); 268 | 269 | let mut rng = CsRng::from_entropy(); 270 | 271 | let (mut msk, _) = cc_keygen(&cc, false).unwrap(); 272 | let mpk = cc.update_msk(&mut msk).expect("cannot update master keys"); 273 | let mut usk = cc 274 | .generate_user_secret_key(&mut msk, &ap) 275 | .expect("cannot generate usk"); 276 | 277 | let (old_key, old_enc) = cc.encaps(&mpk, &ap).unwrap(); 278 | assert_eq!( 279 | Some(&old_key), 280 | decaps(&mut rng, &usk, &old_enc).unwrap().as_ref() 281 | ); 282 | 283 | cc.rekey(&mut msk, &ap).unwrap(); 284 | let new_mpk = msk.mpk().unwrap(); 285 | let (new_key, new_enc) = cc.recaps(&msk, &new_mpk, &old_enc).unwrap(); 286 | cc.refresh_usk(&mut msk, &mut usk, true).unwrap(); 287 | assert_eq!(Some(new_key), decaps(&mut rng, &usk, &new_enc).unwrap()); 288 | assert_ne!(Some(old_key), decaps(&mut rng, &usk, &new_enc).unwrap()); 289 | } 290 | 291 | #[test] 292 | fn test_covercrypt_kem() { 293 | let ap = AccessPolicy::parse("DPT::FIN && SEC::TOP").unwrap(); 294 | let cc = Covercrypt::default(); 295 | let (mut msk, _mpk) = cc_keygen(&cc, false).unwrap(); 296 | let mpk = cc.update_msk(&mut msk).expect("cannot update master keys"); 297 | let usk = cc 298 | .generate_user_secret_key(&mut msk, &ap) 299 | .expect("cannot generate usk"); 300 | let (secret, enc) = cc.encaps(&mpk, &ap).unwrap(); 301 | let res = cc.decaps(&usk, &enc).unwrap(); 302 | assert_eq!(secret, res.unwrap()); 303 | } 304 | 305 | #[test] 306 | fn test_covercrypt_pke() { 307 | let ap = AccessPolicy::parse("DPT::FIN && SEC::TOP").unwrap(); 308 | let cc = Covercrypt::default(); 309 | let (mut msk, mpk) = cc_keygen(&cc, false).unwrap(); 310 | 311 | let ptx = "testing encryption/decryption".as_bytes(); 312 | 313 | let ctx = PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::encrypt(&cc, &mpk, &ap, ptx) 314 | .expect("cannot encrypt!"); 315 | let usk = cc 316 | .generate_user_secret_key(&mut msk, &ap) 317 | .expect("cannot generate usk"); 318 | let ptx1 = PkeAc::<{ Aes256Gcm::KEY_LENGTH }, Aes256Gcm>::decrypt(&cc, &usk, &ctx) 319 | .expect("cannot decrypt the ciphertext"); 320 | assert_eq!(ptx, &*ptx1.unwrap()); 321 | } 322 | -------------------------------------------------------------------------------- /src/data_struct/README.md: -------------------------------------------------------------------------------- 1 | # Data structures to store Covercrypt objects 2 | 3 | ## Overview 4 | 5 | ### Dictionary 6 | 7 | A `Dictionary` is a `HashMap` keeping insertion order inspired by Python dictionary. 8 | It is used to store ordered `Dimension` (also named axis) inside the `Policy` object. 9 | 10 | Pros: 11 | 12 | - the hierarchical order of the attributes is kept by design 13 | 14 | - same serialized size 15 | 16 | - accessing elements is almost as fast as an `HashMap` with one additional memory access 17 | 18 | - updating the key of an element (e.g. renaming an attribute) can be performed in constant time without modifying the order 19 | 20 | Cons: 21 | 22 | - more space in memory 23 | 24 | - removing an element is `O(n)` but ordered dimensions do not allow this modification 25 | 26 | ### RevisionMap 27 | 28 | A `RevisionMap` is a `HashMap` which keys are mapped to sequences of values. 29 | Upon insertion for an existing key, the new value is prepended to the sequence of older values instead of replacing it. 30 | 31 | It is used to store master secret key where each coordinate is mapped to a list of keys. 32 | When a coordinate is rekeyed, a new key is generated and added to the front of the associated list inside the `RevisionMap`. 33 | 34 | Note: the master public key is a regular `HashMap` only storing the most recent key for any coordinate as one only wants to encrypt new data with the newest key. 35 | 36 | Pros: 37 | 38 | - constant time access to the most recent key for each coordinate 39 | 40 | - adding a new key to the front (rekeying) is performed in constant time 41 | 42 | - key history for any coordinate is preserved by design (useful to refresh user keys) 43 | 44 | Cons: 45 | 46 | - following linked list pointers can be slower than iterating a regular vector 47 | 48 | - serialization requires following each linked list 49 | 50 | ### RevisionVec 51 | 52 | A `RevisionVec` is a vector that stores pairs containing a key and a sequence of values. 53 | 54 | Inserting a new value in the sequence associated to an existing key prepends this value to the sequence. 55 | 56 | It is used to store user secret key where each coordinate is stored with a list of keys. 57 | When refreshing the user key with a given master secret key, each coordinate is updated by comparing the list of user subkeys with the master ones. 58 | 59 | Pros: 60 | 61 | - accessing the most recent keys is faster than older ones 62 | 63 | - updating the user key with a given master secret key is performed by only iterating each linked list once 64 | 65 | - key history for any coordinate is preserved by design (useful to refresh user keys) 66 | 67 | Cons: 68 | 69 | - no direct access to a given coordinate's keys (would be a nice to have but not really needed in practice) 70 | 71 | - multiple insertions with the same coordinate will result in multiple entries in the vector thus corrupting the structure 72 | 73 | - following linked list pointers can be slower than iterating a regular vector 74 | 75 | - serialization requires following each linked list 76 | -------------------------------------------------------------------------------- /src/data_struct/dictionary.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | borrow::Borrow, 3 | collections::{hash_map::Entry, HashMap}, 4 | fmt::{self, Debug}, 5 | hash::Hash, 6 | marker::PhantomData, 7 | mem::swap, 8 | }; 9 | 10 | use serde::{ 11 | de::{MapAccess, Visitor}, 12 | ser::SerializeMap, 13 | Deserialize, Deserializer, Serialize, 14 | }; 15 | 16 | use super::error::Error; 17 | 18 | type Index = usize; 19 | /// `HashMap` keeping insertion order inspired by Python dictionary. 20 | #[derive(Default, Clone, Eq, PartialEq, Debug)] 21 | pub struct Dict 22 | where 23 | K: Hash + PartialEq + Eq + Clone + Debug, 24 | { 25 | indices: HashMap, 26 | entries: Vec<(K, V)>, 27 | } 28 | 29 | impl Dict 30 | where 31 | K: Hash + PartialEq + Eq + Clone + Debug, 32 | { 33 | #[must_use] 34 | pub fn new() -> Self { 35 | Self { 36 | indices: HashMap::new(), 37 | entries: Vec::new(), 38 | } 39 | } 40 | 41 | #[must_use] 42 | pub fn with_capacity(capacity: usize) -> Self { 43 | Self { 44 | indices: HashMap::with_capacity(capacity), 45 | entries: Vec::with_capacity(capacity), 46 | } 47 | } 48 | 49 | pub fn len(&self) -> usize { 50 | self.indices.len() 51 | } 52 | 53 | pub fn is_empty(&self) -> bool { 54 | self.len() == 0 55 | } 56 | 57 | /// Inserts a new entry with a given key. 58 | /// If a given key already exists, the entry will be overwritten without 59 | /// changing the order. 60 | /// Otherwise, new entries are simply pushed at the end. 61 | pub fn insert(&mut self, key: K, value: V) -> Option { 62 | match self.indices.entry(key.clone()) { 63 | Entry::Occupied(e) => { 64 | // replace existing entry value in vector 65 | Some(std::mem::replace(&mut self.entries[*e.get()].1, value)) 66 | } 67 | Entry::Vacant(e) => { 68 | let new_index = self.entries.len(); 69 | self.entries.push((key, value)); 70 | e.insert(new_index); 71 | None 72 | } 73 | } 74 | } 75 | 76 | /// Removes the entry corresponding to the given key. 77 | /// To maintain order, all inserted entries after the removed one will be 78 | /// shifted by one and the indices map will be updated accordingly. 79 | /// Compared to a regular `HashMap`, this operation is O(n). 80 | pub fn remove(&mut self, key: &K) -> Option { 81 | let entry_index = self.indices.remove(key)?; 82 | 83 | self.indices 84 | .iter_mut() 85 | .filter(|(_, index)| **index > entry_index) 86 | .for_each(|(_, index)| *index -= 1); 87 | 88 | Some(self.entries.remove(entry_index).1) 89 | } 90 | 91 | /// Updates the key for a given entry while retaining the current order. 92 | pub fn update_key(&mut self, old_key: &K, mut new_key: K) -> Result<(), Error> { 93 | // Get index from old_key 94 | let index_entry = *self 95 | .indices 96 | .get(old_key) 97 | .ok_or(Error::missing_entry(old_key))?; 98 | 99 | match self.indices.entry(new_key.clone()) { 100 | Entry::Occupied(e) => Err(Error::existing_entry(e.key())), 101 | Entry::Vacant(e) => { 102 | // Insert new key inside indices 103 | e.insert(index_entry); 104 | // Remove old key from indices 105 | let _ = self.indices.remove(old_key); 106 | // Replace old_key with new_key inside entries 107 | swap(&mut self.entries[index_entry].0, &mut new_key); 108 | Ok(()) 109 | } 110 | } 111 | } 112 | 113 | pub fn contains_key(&self, key: &Q) -> bool 114 | where 115 | K: Borrow, 116 | Q: Hash + Eq + ?Sized, 117 | { 118 | self.indices.contains_key(key) 119 | } 120 | 121 | pub fn get(&self, key: &Q) -> Option<&V> 122 | where 123 | K: Borrow, 124 | Q: Hash + Eq + ?Sized, 125 | { 126 | let entry_index = self.indices.get(key)?; 127 | self.entries.get(*entry_index).map(|(_, v)| v) 128 | } 129 | 130 | pub fn get_mut(&mut self, key: &Q) -> Option<&mut V> 131 | where 132 | K: Borrow, 133 | Q: Hash + Eq + ?Sized, 134 | { 135 | let entry_index = self.indices.get(key)?; 136 | self.entries.get_mut(*entry_index).map(|(_, v)| v) 137 | } 138 | 139 | pub fn get_key_value(&self, key: &Q) -> Option<(&K, &V)> 140 | where 141 | K: Borrow, 142 | Q: Hash + Eq + ?Sized, 143 | { 144 | let entry_index = self.indices.get(key)?; 145 | let (key, value) = self.entries.get(*entry_index)?; 146 | Some((key, value)) 147 | } 148 | 149 | /// Returns an iterator over keys and values in insertion order. 150 | pub fn iter(&self) -> impl Iterator { 151 | self.entries.iter().map(|(k, v)| (k, v)) 152 | } 153 | 154 | /// Returns an iterator over values in insertion order 155 | pub fn values(&self) -> impl Iterator { 156 | self.entries.iter().map(|(_, v)| v) 157 | } 158 | 159 | /// Returns an iterator over keys in insertion order. 160 | pub fn keys(&self) -> impl Iterator { 161 | self.entries.iter().map(|(k, _)| k) 162 | } 163 | } 164 | 165 | impl IntoIterator for Dict 166 | where 167 | K: Hash + PartialEq + Eq + Clone + Debug, 168 | { 169 | type IntoIter = std::vec::IntoIter<(K, V)>; 170 | type Item = (K, V); 171 | 172 | /// Returns an iterator over keys and values in insertion order. 173 | fn into_iter(self) -> Self::IntoIter { 174 | self.entries.into_iter() 175 | } 176 | } 177 | 178 | impl FromIterator<(K, V)> for Dict 179 | where 180 | K: Hash + PartialEq + Eq + Clone + Debug, 181 | { 182 | fn from_iter>(iter: T) -> Self { 183 | let iterator = iter.into_iter(); 184 | let mut dict = Self::with_capacity(iterator.size_hint().0); 185 | for (key, value) in iterator { 186 | dict.insert(key, value); 187 | } 188 | dict 189 | } 190 | } 191 | 192 | impl Serialize for Dict 193 | where 194 | K: Hash + PartialEq + Eq + Clone + Debug + Serialize, 195 | V: Serialize, 196 | { 197 | fn serialize(&self, serializer: S) -> Result 198 | where 199 | S: serde::Serializer, 200 | { 201 | let mut map = serializer.serialize_map(Some(self.len()))?; 202 | for (k, v) in self.iter() { 203 | map.serialize_entry(k, v)?; 204 | } 205 | map.end() 206 | } 207 | } 208 | 209 | struct DictVisitor 210 | where 211 | K: Hash + PartialEq + Eq + Clone + Debug, 212 | { 213 | marker: PhantomData Dict>, 214 | } 215 | 216 | impl DictVisitor 217 | where 218 | K: Hash + PartialEq + Eq + Clone + Debug, 219 | { 220 | fn new() -> Self { 221 | Self { 222 | marker: PhantomData, 223 | } 224 | } 225 | } 226 | 227 | impl<'de, K, V> Visitor<'de> for DictVisitor 228 | where 229 | K: Hash + PartialEq + Eq + Clone + Debug + Deserialize<'de>, 230 | V: Deserialize<'de>, 231 | { 232 | type Value = Dict; 233 | 234 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 235 | formatter.write_str("a Dict") 236 | } 237 | 238 | // Create a `Dict` from an abstract map provided by the Deserializer. 239 | // This abstract map should preserve the item's order during the 240 | // deserialization. 241 | fn visit_map(self, mut access: M) -> Result 242 | where 243 | M: MapAccess<'de>, 244 | { 245 | let mut map = Dict::with_capacity(access.size_hint().unwrap_or(0)); 246 | 247 | while let Some((key, value)) = access.next_entry()? { 248 | map.insert(key, value); 249 | } 250 | 251 | Ok(map) 252 | } 253 | } 254 | 255 | impl<'de, K, V> Deserialize<'de> for Dict 256 | where 257 | K: Hash + PartialEq + Eq + Clone + Debug + Deserialize<'de>, 258 | V: Deserialize<'de>, 259 | { 260 | fn deserialize(deserializer: D) -> Result 261 | where 262 | D: Deserializer<'de>, 263 | { 264 | deserializer.deserialize_map(DictVisitor::new()) 265 | } 266 | } 267 | 268 | #[cfg(test)] 269 | mod tests { 270 | use super::*; 271 | 272 | #[test] 273 | fn test_dict() -> Result<(), Error> { 274 | let mut d: Dict = Dict::new(); 275 | assert!(d.is_empty()); 276 | 277 | // Insertions 278 | d.insert("ID1".to_string(), "Foo".to_string()); 279 | d.insert("ID2".to_string(), "Bar".to_string()); 280 | d.insert("ID3".to_string(), "Baz".to_string()); 281 | assert_eq!(d.len(), 3); 282 | 283 | // Get 284 | assert_eq!( 285 | d.get_key_value("ID2").unwrap(), 286 | (&"ID2".to_string(), &"Bar".to_string()) 287 | ); 288 | 289 | // Edit 290 | // Overwrite value without changing order 291 | d.insert("ID1".to_string(), "Foox".to_string()); 292 | 293 | // Update key without changing order 294 | d.update_key(&"ID2".to_string(), "ID2_bis".to_string())?; 295 | assert!(d.get_key_value(&String::from("ID2")).is_none()); 296 | assert_eq!( 297 | d.get_key_value(&"ID2_bis".to_string()).unwrap(), 298 | (&"ID2_bis".to_string(), &"Bar".to_string()) 299 | ); 300 | 301 | // Update key error cases 302 | // missing old key 303 | assert!(d.update_key(&"Bad".to_string(), "New".to_string()).is_err()); 304 | // existing new key 305 | assert!(d.update_key(&"ID1".to_string(), "ID3".to_string()).is_err()); 306 | 307 | // Iterators 308 | assert_eq!(d.values().collect::>(), vec!["Foox", "Bar", "Baz"]); 309 | 310 | assert_eq!( 311 | d.iter().collect::>(), 312 | vec![ 313 | (&String::from("ID1"), &String::from("Foox")), 314 | (&String::from("ID2_bis"), &String::from("Bar")), 315 | (&String::from("ID3"), &String::from("Baz")), 316 | ] 317 | ); 318 | 319 | // Remove 320 | assert!(d.remove(&String::from("Missing")).is_none()); 321 | assert_eq!(d.remove(&String::from("ID2_bis")), Some("Bar".to_string())); 322 | assert_eq!(d.len(), 2); 323 | 324 | // Check order is maintained 325 | assert_eq!(d.values().collect::>(), vec!["Foox", "Baz"]); 326 | 327 | // Insertion after remove 328 | d.insert(String::from("ID4"), String::from("Test")); 329 | assert_eq!(d.values().collect::>(), vec!["Foox", "Baz", "Test"]); 330 | 331 | Ok(()) 332 | } 333 | 334 | #[test] 335 | fn test_dict_serialization() { 336 | // Init dict 337 | let mut d: Dict = Dict::new(); 338 | d.insert("ID1".to_string(), "Foo".to_string()); 339 | d.insert("ID2".to_string(), "Bar".to_string()); 340 | d.insert("ID3".to_string(), "Baz".to_string()); 341 | d.remove(&"ID2".to_string()); 342 | d.insert("ID4".to_string(), "Bar2".to_string()); 343 | 344 | // serialize 345 | let data = serde_json::to_vec(&d).unwrap(); 346 | 347 | // can be read as a hashmap but this the order will be lost 348 | let map: HashMap = serde_json::from_slice(&data).unwrap(); 349 | assert_eq!(map.len(), d.len()); 350 | assert!(map.contains_key("ID1")); 351 | assert!(map.contains_key("ID3")); 352 | assert!(map.contains_key("ID4")); 353 | 354 | // deserialization as dict will keep the order 355 | let d2: Dict = serde_json::from_slice(&data).unwrap(); 356 | assert_eq!(d2.len(), d.len()); 357 | assert_eq!(d2.iter().collect::>(), d.iter().collect::>()); 358 | } 359 | } 360 | -------------------------------------------------------------------------------- /src/data_struct/error.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Debug, Display}; 2 | 3 | type Key = String; 4 | 5 | #[derive(Debug)] 6 | pub enum Error { 7 | EntryNotFound(Key), 8 | ExistingEntry(Key), 9 | AlreadyHasChild(Key), 10 | } 11 | 12 | impl Display for Error { 13 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 14 | match &self { 15 | Self::EntryNotFound(key) => write!(f, "Entry not found with key: {key}."), 16 | Self::ExistingEntry(key) => write!(f, "Already existing entry with key: {key}."), 17 | Self::AlreadyHasChild(key) => { 18 | write!(f, "Entry with key {key} already has a child.") 19 | } 20 | } 21 | } 22 | } 23 | 24 | impl Error { 25 | pub fn missing_entry(key: &T) -> Self 26 | where 27 | T: Debug, 28 | { 29 | Self::EntryNotFound(format!("{key:?}")) 30 | } 31 | 32 | pub fn existing_entry(key: &T) -> Self 33 | where 34 | T: Debug, 35 | { 36 | Self::ExistingEntry(format!("{key:?}")) 37 | } 38 | 39 | pub fn already_has_child(key: &T) -> Self 40 | where 41 | T: Debug, 42 | { 43 | Self::AlreadyHasChild(format!("{key:?}")) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/data_struct/mod.rs: -------------------------------------------------------------------------------- 1 | // Data-structures implemented here are voluntarily more exhaustive than they need to be for the 2 | // sake of Covercrypt. 3 | #![allow(dead_code)] 4 | 5 | mod dictionary; 6 | mod revision_map; 7 | mod revision_vec; 8 | 9 | pub mod error; 10 | pub use dictionary::Dict; 11 | pub use revision_map::RevisionMap; 12 | pub use revision_vec::RevisionVec; 13 | -------------------------------------------------------------------------------- /src/data_struct/revision_map.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | borrow::Borrow, 3 | collections::{ 4 | hash_map::{Entry, OccupiedEntry, VacantEntry}, 5 | HashMap, LinkedList, 6 | }, 7 | fmt::Debug, 8 | hash::Hash, 9 | }; 10 | 11 | /// A `RevisionMap` is a `HashMap` which keys are mapped to sequences of values. 12 | /// Upon insertion for an existing key, the new value is prepended to the 13 | /// sequence of older values instead of replacing it. 14 | /// 15 | /// Map { 16 | /// key2: b 17 | /// key1: a" -> a' > a 18 | /// key3: c' -> c 19 | /// } 20 | /// 21 | /// Insertions are only allowed at the front of the linked list. 22 | /// Deletions can only happen at the end of the linked list. 23 | /// 24 | /// This guarantees that the entry versions are always ordered. 25 | #[derive(Debug, PartialEq, Eq)] 26 | pub struct RevisionMap 27 | where 28 | K: Debug + PartialEq + Eq + Hash, 29 | V: Debug, 30 | { 31 | pub(crate) map: HashMap>, 32 | } 33 | 34 | impl Default for RevisionMap 35 | where 36 | K: Hash + PartialEq + Eq + Clone + Debug, 37 | V: Clone + Debug, 38 | { 39 | fn default() -> Self { 40 | Self { 41 | map: HashMap::default(), 42 | } 43 | } 44 | } 45 | 46 | impl RevisionMap 47 | where 48 | K: Hash + PartialEq + Eq + Clone + Debug, 49 | V: Clone + Debug, 50 | { 51 | #[must_use] 52 | pub fn new() -> Self { 53 | Self { 54 | map: HashMap::new(), 55 | } 56 | } 57 | 58 | #[must_use] 59 | pub fn with_capacity(capacity: usize) -> Self { 60 | Self { 61 | map: HashMap::with_capacity(capacity), 62 | } 63 | } 64 | 65 | /// Returns the number of chains stored. 66 | #[must_use] 67 | pub fn len(&self) -> usize { 68 | self.map.len() 69 | } 70 | 71 | /// Returns the total number of elements stored. 72 | pub fn count_elements(&self) -> usize { 73 | self.map.values().map(LinkedList::len).sum() 74 | } 75 | 76 | pub fn chain_length(&self, key: &K) -> usize { 77 | self.map 78 | .get(key) 79 | .map_or(0, std::collections::LinkedList::len) 80 | } 81 | 82 | #[must_use] 83 | pub fn is_empty(&self) -> bool { 84 | self.map.is_empty() 85 | } 86 | 87 | fn insert_new_chain(entry: VacantEntry>, value: V) { 88 | let mut new_chain = LinkedList::new(); 89 | new_chain.push_front(value); 90 | entry.insert(new_chain); 91 | } 92 | 93 | fn insert_in_chain(mut entry: OccupiedEntry>, value: V) { 94 | let chain = entry.get_mut(); 95 | chain.push_front(value); 96 | } 97 | 98 | /// Inserts value at the front of the chain for a given key 99 | pub fn insert(&mut self, key: K, value: V) { 100 | match self.map.entry(key) { 101 | Entry::Occupied(entry) => Self::insert_in_chain(entry, value), 102 | Entry::Vacant(entry) => Self::insert_new_chain(entry, value), 103 | } 104 | } 105 | 106 | /// Returns the last revised value for a given key. 107 | pub fn get_latest(&self, key: &Q) -> Option<&V> 108 | where 109 | K: Borrow, 110 | Q: Hash + Eq + ?Sized, 111 | { 112 | self.map.get(key).and_then(LinkedList::front) 113 | } 114 | 115 | /// Returns a mutable reference to the last revised value for a given key. 116 | pub fn get_latest_mut(&mut self, key: &Q) -> Option<&mut V> 117 | where 118 | K: Borrow, 119 | Q: Hash + Eq + ?Sized, 120 | { 121 | self.map.get_mut(key).and_then(LinkedList::front_mut) 122 | } 123 | 124 | /// Returns true if the given key is bound to some value. 125 | pub fn contains_key(&self, key: &K) -> bool { 126 | self.map.contains_key(key) 127 | } 128 | 129 | /// Iterates through all keys in arbitrary order. 130 | pub fn keys(&self) -> impl Iterator { 131 | self.map.keys() 132 | } 133 | 134 | /// Iterates through all key/value couples in arbitrary order. 135 | pub fn iter(&self) -> impl Iterator)> { 136 | self.map.iter() 137 | } 138 | 139 | /// Iterates through all revisions of a given key starting with the more 140 | /// recent one. 141 | pub fn get(&self, key: &Q) -> Option<&LinkedList> 142 | where 143 | K: Borrow, 144 | Q: Hash + Eq + ?Sized, 145 | { 146 | self.map.get(key) //.map(RevisionList::iter) 147 | } 148 | 149 | /// Removes and returns an iterator over all revisions from a given key. 150 | pub fn remove(&mut self, key: &Q) -> Option> 151 | where 152 | K: Borrow, 153 | Q: Hash + Eq + ?Sized, 154 | { 155 | self.map.remove(key).map(LinkedList::into_iter) 156 | } 157 | 158 | /// Keeps the n more recent values for a given key and returns an the list 159 | /// of removed values if the key was found. 160 | pub fn keep(&mut self, key: &Q, n: usize) -> Option> 161 | where 162 | K: Borrow, 163 | Q: Hash + Eq + ?Sized, 164 | { 165 | let chain = self.map.get_mut(key)?; 166 | if n <= chain.len() { 167 | Some(chain.split_off(n).into_iter()) 168 | } else { 169 | None 170 | } 171 | } 172 | 173 | /// Retains only the elements with a key validating the given predicate. 174 | pub fn retain(&mut self, f: impl Fn(&K) -> bool) { 175 | self.map.retain(|key, _| f(key)); 176 | } 177 | } 178 | 179 | #[cfg(test)] 180 | mod tests { 181 | #![allow(clippy::unnecessary_to_owned)] 182 | use std::collections::HashSet; 183 | 184 | use super::*; 185 | 186 | #[test] 187 | fn test_revision_map() { 188 | let mut map: RevisionMap = RevisionMap::new(); 189 | assert!(map.is_empty()); 190 | 191 | // Insertions 192 | map.insert("Part1".to_string(), "Part1V1".to_string()); 193 | assert_eq!(map.count_elements(), 1); 194 | assert_eq!(map.len(), 1); 195 | map.insert("Part1".to_string(), "Part1V2".to_string()); 196 | assert_eq!(map.count_elements(), 2); 197 | // two elements in the same chain 198 | assert_eq!(map.len(), 1); 199 | 200 | map.insert("Part2".to_string(), "Part2V1".to_string()); 201 | map.insert("Part2".to_string(), "Part2V2".to_string()); 202 | map.insert("Part2".to_string(), "Part2V3".to_string()); 203 | assert_eq!(map.len(), 2); 204 | assert_eq!(map.count_elements(), 5); 205 | 206 | map.insert("Part3".to_string(), "Part3V1".to_string()); 207 | assert_eq!(map.count_elements(), 6); 208 | 209 | // Get 210 | assert_eq!(map.get_latest("Part1").unwrap(), "Part1V2"); 211 | assert_eq!(map.get_latest("Part2").unwrap(), "Part2V3"); 212 | assert!(map.get_latest("Missing").is_none()); 213 | 214 | // Iterators 215 | let vec: Vec<_> = map.get("Part1").unwrap().iter().collect(); 216 | assert_eq!(vec, vec!["Part1V2", "Part1V1"]); 217 | 218 | let keys_set = map.keys().collect::>(); 219 | assert!(keys_set.contains(&"Part1".to_string())); 220 | assert!(keys_set.contains(&"Part2".to_string())); 221 | 222 | // Remove values 223 | let vec: Vec<_> = map.remove("Part1").unwrap().collect(); 224 | assert_eq!(vec, vec!["Part1V2".to_string(), "Part1V1".to_string()]); 225 | assert_eq!(map.count_elements(), 4); 226 | assert_eq!(map.len(), 2); 227 | 228 | // Remove older values in a chain 229 | let vec: Vec<_> = map.keep("Part2", 1).unwrap().collect(); 230 | assert_eq!(vec, vec!["Part2V2".to_string(), "Part2V1".to_string()]); 231 | assert_eq!(map.count_elements(), 2); 232 | let vec: Vec<_> = map.remove("Part2").unwrap().collect(); 233 | assert_eq!(vec, vec!["Part2V3".to_string()]); 234 | // Empty pop tail 235 | assert!(map.keep("Part3", 1).unwrap().next().is_none()); 236 | 237 | // Retain 238 | map.retain(|_| true); 239 | assert_eq!(map.count_elements(), 1); 240 | map.retain(|_| false); 241 | assert!(map.is_empty()); 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /src/data_struct/revision_vec.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{ 2 | linked_list::{self, Iter}, 3 | LinkedList, VecDeque, 4 | }; 5 | 6 | /// A `RevisionVec` is a vector that stores pairs containing a key 7 | /// and a sequence of values. Inserting a new value in the sequence 8 | /// associated to an existing key prepends this value to the sequence. 9 | /// 10 | /// Vec [ 11 | /// 0: key -> a" -> a' -> a 12 | /// 1: key -> b 13 | /// 2: key -> c' -> c 14 | /// ] 15 | /// 16 | /// Insertions are only allowed at the front of the linked list. 17 | /// Deletions can only happen at the end of the linked list. 18 | /// 19 | /// This guarantees that the entry versions are always ordered. 20 | #[derive(Clone, Debug, PartialEq, Eq)] 21 | pub struct RevisionVec { 22 | chains: Vec<(K, LinkedList)>, 23 | } 24 | 25 | pub struct RevisionIterator<'a, K, T> { 26 | ks: Vec<&'a K>, 27 | ls: Vec>, 28 | } 29 | 30 | impl<'a, K, T> Iterator for RevisionIterator<'a, K, T> { 31 | type Item = Vec<(&'a K, &'a T)>; 32 | 33 | fn next(&mut self) -> Option { 34 | self.ks 35 | .iter() 36 | .zip(self.ls.iter_mut()) 37 | .map(|(k, it)| it.next().map(|t| (*k, t))) 38 | .collect() 39 | } 40 | } 41 | 42 | impl Default for RevisionVec { 43 | fn default() -> Self { 44 | Self { 45 | chains: Default::default(), 46 | } 47 | } 48 | } 49 | 50 | impl RevisionVec { 51 | #[must_use] 52 | pub fn new() -> Self { 53 | Self { chains: Vec::new() } 54 | } 55 | 56 | #[must_use] 57 | pub fn with_capacity(capacity: usize) -> Self { 58 | Self { 59 | chains: Vec::with_capacity(capacity), 60 | } 61 | } 62 | 63 | /// Returns the number of chains stored. 64 | #[must_use] 65 | pub fn len(&self) -> usize { 66 | self.chains.len() 67 | } 68 | 69 | /// Returns the total number of elements stored. 70 | #[must_use] 71 | pub fn count_elements(&self) -> usize { 72 | self.chains.iter().map(|(_, chain)| chain.len()).sum() 73 | } 74 | 75 | #[must_use] 76 | pub fn is_empty(&self) -> bool { 77 | self.chains.is_empty() 78 | } 79 | 80 | /// Creates and insert a new chain with a single value. 81 | pub fn create_chain_with_single_value(&mut self, key: K, val: T) { 82 | // Be aware that inserting a value for a key that is already associated to a 83 | // chain breaks the CoverCrypt scheme as two chains will exist for the same key. 84 | 85 | let mut new_chain = LinkedList::new(); 86 | new_chain.push_front(val); 87 | self.chains.push((key, new_chain)); 88 | } 89 | 90 | /// Inserts a new chain with a corresponding key. 91 | pub fn insert_new_chain(&mut self, key: K, new_chain: LinkedList) { 92 | // Be aware that inserting a new chain for a key that is already associated to a 93 | // chain breaks the CoverCrypt scheme as two chains will exist for the same key. 94 | 95 | if !new_chain.is_empty() { 96 | self.chains.push((key, new_chain)); 97 | } 98 | } 99 | 100 | pub fn clear(&mut self) { 101 | self.chains.clear(); 102 | } 103 | 104 | /// Retains only the elements with a key validating the given predicate. 105 | pub fn retain(&mut self, f: impl Fn(&K) -> bool) { 106 | self.chains.retain(|(key, _)| f(key)); 107 | } 108 | 109 | /// Returns an iterator over each key-chains pair 110 | pub fn iter(&self) -> impl Iterator)> { 111 | self.chains.iter().map(|(key, chain)| (key, chain)) 112 | } 113 | 114 | /// Returns an iterator over each key-chains pair that allow modifying chain 115 | pub fn iter_mut(&mut self) -> impl Iterator)> { 116 | self.chains.iter_mut().map(|(ref key, chain)| (key, chain)) 117 | } 118 | 119 | /// Iterates through all versions of all entries in a depth-first manner. 120 | /// Returns the key and value for each entry. 121 | pub fn flat_iter(&self) -> impl Iterator { 122 | self.chains 123 | .iter() 124 | .flat_map(|(key, chain)| chain.iter().map(move |val| (key, val))) 125 | } 126 | 127 | pub fn revisions(&self) -> impl Iterator> { 128 | let (ks, ls) = self.chains.iter().map(|(k, l)| (k, l.iter())).unzip(); 129 | RevisionIterator { ks, ls } 130 | } 131 | 132 | /// Iterates through all versions of all entry in a breadth-first manner. 133 | #[must_use] 134 | pub fn bfs(&self) -> BfsQueue { 135 | BfsQueue::new(self) 136 | } 137 | 138 | pub fn into_keys(self) -> impl Iterator { 139 | self.chains.into_iter().map(|(k, _)| k) 140 | } 141 | } 142 | 143 | impl IntoIterator for RevisionVec { 144 | type IntoIter = std::vec::IntoIter<(K, LinkedList)>; 145 | type Item = (K, LinkedList); 146 | 147 | fn into_iter(self) -> Self::IntoIter { 148 | self.chains.into_iter() 149 | } 150 | } 151 | 152 | /// Breadth-first search iterator for `RevisionVec`. 153 | pub struct BfsQueue<'a, T> { 154 | queue: VecDeque>, 155 | } 156 | 157 | impl<'a, T> BfsQueue<'a, T> { 158 | pub fn new(revision_vec: &'a RevisionVec) -> Self { 159 | // add all chain heads to the iterator queue 160 | Self { 161 | queue: revision_vec 162 | .chains 163 | .iter() 164 | .map(|(_, chain)| chain.iter()) 165 | .collect(), 166 | } 167 | } 168 | } 169 | 170 | impl<'a, T> Iterator for BfsQueue<'a, T> { 171 | type Item = &'a T; 172 | 173 | fn next(&mut self) -> Option { 174 | // get first non-empty iterator in the queue 175 | while let Some(mut iterator) = self.queue.pop_front() { 176 | if let Some(element) = iterator.next() { 177 | // put back the iterator at the end of the queue 178 | self.queue.push_back(iterator); 179 | return Some(element); 180 | } 181 | } 182 | None 183 | } 184 | } 185 | 186 | impl FromIterator<(K, LinkedList)> for RevisionVec { 187 | fn from_iter)>>(iter: I) -> Self { 188 | Self { 189 | chains: iter.into_iter().collect(), 190 | } 191 | } 192 | } 193 | 194 | impl FromIterator<(K, T)> for RevisionVec { 195 | fn from_iter>(iter: I) -> Self { 196 | Self { 197 | chains: iter 198 | .into_iter() 199 | .map(|(k, v)| (k, LinkedList::from_iter([v]))) 200 | .collect(), 201 | } 202 | } 203 | } 204 | 205 | #[cfg(test)] 206 | mod tests { 207 | 208 | use super::*; 209 | 210 | #[test] 211 | fn test_revision_vec() { 212 | let mut revision_vec: RevisionVec = RevisionVec::new(); 213 | assert!(revision_vec.is_empty()); 214 | assert_eq!(revision_vec.len(), 0); 215 | 216 | // Insert 217 | revision_vec.insert_new_chain( 218 | 1, 219 | vec!["a\"".to_string(), "a'".to_string(), "a".to_string()] 220 | .into_iter() 221 | .collect(), 222 | ); 223 | revision_vec.create_chain_with_single_value(2, "b".to_string()); 224 | revision_vec.insert_new_chain( 225 | 3, 226 | vec!["c'".to_string(), "c".to_string()] 227 | .into_iter() 228 | .collect(), 229 | ); 230 | 231 | assert_eq!(revision_vec.count_elements(), 6); 232 | assert_eq!(revision_vec.len(), 3); 233 | 234 | // Iterators 235 | let depth_iter: Vec<_> = revision_vec.flat_iter().collect(); 236 | assert_eq!( 237 | depth_iter, 238 | vec![ 239 | (&1, &"a\"".to_string()), 240 | (&1, &"a'".to_string()), 241 | (&1, &"a".to_string()), 242 | (&2, &"b".to_string()), 243 | (&3, &"c'".to_string()), 244 | (&3, &"c".to_string()), 245 | ] 246 | ); 247 | 248 | let breadth_iter: Vec<_> = revision_vec.bfs().collect(); 249 | assert_eq!( 250 | breadth_iter, 251 | vec![ 252 | &"a\"".to_string(), 253 | &"b".to_string(), 254 | &"c'".to_string(), 255 | &"a'".to_string(), 256 | &"c".to_string(), 257 | &"a".to_string(), 258 | ] 259 | ); 260 | 261 | // Retain 262 | revision_vec.retain(|key| key == &1); 263 | assert_eq!(revision_vec.count_elements(), 3); 264 | assert_eq!(revision_vec.len(), 1); 265 | 266 | // Clear 267 | revision_vec.clear(); 268 | assert!(revision_vec.is_empty()); 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /src/encrypted_header.rs: -------------------------------------------------------------------------------- 1 | use cosmian_crypto_core::{ 2 | kdf256, Aes256Gcm, CryptoCoreError, Dem, FixedSizeCBytes, Instantiable, Nonce, 3 | RandomFixedSizeCBytes, Secret, SymmetricKey, 4 | }; 5 | 6 | use crate::{ 7 | abe_policy::AccessPolicy, api::Covercrypt, core::SHARED_SECRET_LENGTH, traits::KemAc, Error, 8 | MasterPublicKey, UserSecretKey, XEnc, 9 | }; 10 | 11 | /// Encrypted header holding a `Covercrypt` encapsulation of a 256-byte secret, and metadata 12 | /// encrypted under the scheme AES256Gcm using a key derived from the encapsulated secret. 13 | #[derive(Debug, PartialEq)] 14 | pub struct EncryptedHeader { 15 | pub encapsulation: XEnc, 16 | pub encrypted_metadata: Option>, 17 | } 18 | 19 | impl EncryptedHeader { 20 | /// Generates a new encrypted header for a random secret and the given metadata. 21 | /// Returns the encrypted header along with the secret. 22 | /// 23 | /// - `cc` : `Covercrypt` object 24 | /// - `mpk` : `Covercrypt` public key 25 | /// - `ap` : access policy used for the encapsulation 26 | /// - `header_metadata` : additional data symmetrically encrypted in the header 27 | /// - `authentication_data` : authentication data used in the DEM encryption 28 | pub fn generate( 29 | cc: &Covercrypt, 30 | mpk: &MasterPublicKey, 31 | ap: &AccessPolicy, 32 | metadata: Option<&[u8]>, 33 | authentication_data: Option<&[u8]>, 34 | ) -> Result<(Secret, Self), Error> { 35 | let (seed, encapsulation) = cc.encaps(mpk, ap)?; 36 | 37 | let encrypted_metadata = metadata 38 | .map(|bytes| { 39 | let key = SymmetricKey::derive(&seed, &[0u8])?; 40 | let nonce = Nonce::new(&mut *cc.rng()); 41 | let ctx = Aes256Gcm::new(&key).encrypt(&nonce, bytes, authentication_data)?; 42 | Ok::<_, Error>([nonce.as_bytes(), &ctx].concat()) 43 | }) 44 | .transpose()?; 45 | 46 | let mut secret = Secret::default(); 47 | kdf256!(&mut *secret, &*seed, &[1u8]); 48 | 49 | Ok(( 50 | secret, 51 | Self { 52 | encapsulation, 53 | encrypted_metadata, 54 | }, 55 | )) 56 | } 57 | 58 | /// Decrypts the header with the given user secret key. 59 | /// 60 | /// - `cc` : `Covercrypt` object 61 | /// - `usk` : `Covercrypt` user secret key 62 | /// - `authentication_data` : authentication data used in the DEM encryption 63 | pub fn decrypt( 64 | &self, 65 | cc: &Covercrypt, 66 | usk: &UserSecretKey, 67 | authentication_data: Option<&[u8]>, 68 | ) -> Result, Error> { 69 | cc.decaps(usk, &self.encapsulation)? 70 | .map(|seed| { 71 | let metadata = self 72 | .encrypted_metadata 73 | .as_ref() 74 | .map(|ctx| { 75 | if ctx.len() < Aes256Gcm::NONCE_LENGTH { 76 | Err(CryptoCoreError::CiphertextTooSmallError { 77 | ciphertext_len: ctx.len(), 78 | min: Aes256Gcm::NONCE_LENGTH as u64, 79 | }) 80 | } else { 81 | let key = SymmetricKey::derive(&seed, &[0u8])?; 82 | Aes256Gcm::new(&key).decrypt( 83 | &Nonce::try_from_slice(&ctx[..Aes256Gcm::NONCE_LENGTH])?, 84 | &ctx[Aes256Gcm::NONCE_LENGTH..], 85 | authentication_data, 86 | ) 87 | } 88 | }) 89 | .transpose()?; 90 | 91 | let mut secret = Secret::::default(); 92 | kdf256!(&mut *secret, &*seed, &[1u8]); 93 | 94 | Ok(CleartextHeader { secret, metadata }) 95 | }) 96 | .transpose() 97 | } 98 | } 99 | 100 | /// Structure containing all data encrypted in an `EncryptedHeader`. 101 | #[derive(Debug, PartialEq, Eq)] 102 | pub struct CleartextHeader { 103 | pub secret: Secret, 104 | pub metadata: Option>, 105 | } 106 | 107 | mod serialization { 108 | 109 | use super::*; 110 | use cosmian_crypto_core::bytes_ser_de::{ 111 | to_leb128_len, Deserializer, Serializable, Serializer, 112 | }; 113 | 114 | impl Serializable for EncryptedHeader { 115 | type Error = Error; 116 | 117 | fn length(&self) -> usize { 118 | self.encapsulation.length() 119 | + if let Some(metadata) = &self.encrypted_metadata { 120 | to_leb128_len(metadata.len()) + metadata.len() 121 | } else { 122 | 1 123 | } 124 | } 125 | 126 | fn write(&self, ser: &mut Serializer) -> Result { 127 | let mut n = self.encapsulation.write(ser)?; 128 | match &self.encrypted_metadata { 129 | Some(bytes) => n += ser.write_vec(bytes)?, 130 | None => n += ser.write_vec(&[])?, 131 | } 132 | Ok(n) 133 | } 134 | 135 | fn read(de: &mut Deserializer) -> Result { 136 | let encapsulation = de.read::()?; 137 | let ciphertext = de.read_vec()?; 138 | let encrypted_metadata = if ciphertext.is_empty() { 139 | None 140 | } else { 141 | Some(ciphertext) 142 | }; 143 | Ok(Self { 144 | encapsulation, 145 | encrypted_metadata, 146 | }) 147 | } 148 | } 149 | 150 | impl Serializable for CleartextHeader { 151 | type Error = Error; 152 | 153 | fn length(&self) -> usize { 154 | SHARED_SECRET_LENGTH 155 | + to_leb128_len( 156 | self.metadata 157 | .as_ref() 158 | .map(std::vec::Vec::len) 159 | .unwrap_or_default(), 160 | ) 161 | + self 162 | .metadata 163 | .as_ref() 164 | .map(std::vec::Vec::len) 165 | .unwrap_or_default() 166 | } 167 | 168 | fn write(&self, ser: &mut Serializer) -> Result { 169 | let mut n = ser.write_array(&self.secret[..SHARED_SECRET_LENGTH])?; 170 | match &self.metadata { 171 | Some(bytes) => n += ser.write_vec(bytes)?, 172 | None => n += ser.write_vec(&[])?, 173 | } 174 | Ok(n) 175 | } 176 | 177 | fn read(de: &mut Deserializer) -> Result { 178 | let seed = 179 | Secret::from_unprotected_bytes(&mut de.read_array::()?); 180 | let metadata = de.read_vec()?; 181 | let metadata = if metadata.is_empty() { 182 | None 183 | } else { 184 | Some(metadata) 185 | }; 186 | Ok(Self { 187 | secret: seed, 188 | metadata, 189 | }) 190 | } 191 | } 192 | 193 | #[test] 194 | fn test_ser() { 195 | use crate::test_utils::cc_keygen; 196 | use cosmian_crypto_core::bytes_ser_de::test_serialization; 197 | 198 | let cc = Covercrypt::default(); 199 | let (mut msk, mpk) = cc_keygen(&cc, false).unwrap(); 200 | 201 | let ap = AccessPolicy::parse("(DPT::MKG || DPT::FIN) && SEC::TOP").unwrap(); 202 | let usk = cc.generate_user_secret_key(&mut msk, &ap).unwrap(); 203 | 204 | // 205 | // Simple ciphertext. 206 | // 207 | 208 | let test_encrypted_header = |ap, metadata, authentication_data| { 209 | let (secret, encrypted_header) = 210 | EncryptedHeader::generate(&cc, &mpk, &ap, metadata, authentication_data).unwrap(); 211 | test_serialization(&encrypted_header) 212 | .expect("failed serialization test for the encrypted header"); 213 | let decrypted_header = encrypted_header 214 | .decrypt(&cc, &usk, authentication_data) 215 | .unwrap(); 216 | let decrypted_header = decrypted_header.unwrap(); 217 | test_serialization(&decrypted_header) 218 | .expect("failed serialization test for the cleartext header"); 219 | assert_eq!( 220 | secret, decrypted_header.secret, 221 | "failed secret equality test" 222 | ); 223 | assert_eq!( 224 | metadata, 225 | decrypted_header.metadata.as_deref(), 226 | "failed metadata equality test" 227 | ); 228 | }; 229 | 230 | test_encrypted_header(AccessPolicy::parse("DPT::MKG").unwrap(), None, None); 231 | test_encrypted_header( 232 | AccessPolicy::parse("DPT::MKG").unwrap(), 233 | Some("metadata".as_bytes()), 234 | None, 235 | ); 236 | test_encrypted_header( 237 | AccessPolicy::parse("DPT::MKG").unwrap(), 238 | Some("metadata".as_bytes()), 239 | Some("authentication data".as_bytes()), 240 | ); 241 | test_encrypted_header( 242 | AccessPolicy::parse("DPT::MKG").unwrap(), 243 | None, 244 | Some("authentication data".as_bytes()), 245 | ); 246 | } 247 | } 248 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | //! Error type for the crate. 2 | 3 | use core::{fmt::Display, num::TryFromIntError}; 4 | 5 | use cosmian_crypto_core::CryptoCoreError; 6 | 7 | #[derive(Debug)] 8 | pub enum Error { 9 | Kem(String), 10 | CryptoCoreError(CryptoCoreError), 11 | KeyError(String), 12 | AttributeNotFound(String), 13 | ExistingDimension(String), 14 | OperationNotPermitted(String), 15 | InvalidBooleanExpression(String), 16 | InvalidAttribute(String), 17 | DimensionNotFound(String), 18 | ConversionFailed(String), 19 | Tracing(String), 20 | } 21 | 22 | impl Display for Error { 23 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 24 | match self { 25 | Self::Kem(err) => write!(f, "Kyber error: {err}"), 26 | Self::CryptoCoreError(err) => write!(f, "CryptoCore error{err}"), 27 | Self::KeyError(err) => write!(f, "{err}"), 28 | Self::AttributeNotFound(err) => write!(f, "attribute not found: {err}"), 29 | Self::ExistingDimension(dimension) => { 30 | write!(f, "dimension {dimension} already exists") 31 | } 32 | Self::InvalidBooleanExpression(expr_str) => { 33 | write!(f, "invalid boolean expression: {expr_str}") 34 | } 35 | Self::InvalidAttribute(attr) => write!(f, "invalid attribute: {attr}"), 36 | Self::DimensionNotFound(dim_str) => write!(f, "cannot find dimension: {dim_str}"), 37 | Self::ConversionFailed(err) => write!(f, "Conversion failed: {err}"), 38 | Self::OperationNotPermitted(err) => write!(f, "Operation not permitted: {err}"), 39 | Self::Tracing(err) => write!(f, "tracing error: {err}"), 40 | } 41 | } 42 | } 43 | 44 | impl From for Error { 45 | fn from(e: TryFromIntError) -> Self { 46 | Self::ConversionFailed(e.to_string()) 47 | } 48 | } 49 | 50 | impl From for Error { 51 | fn from(e: CryptoCoreError) -> Self { 52 | Self::CryptoCoreError(e) 53 | } 54 | } 55 | 56 | impl std::error::Error for Error {} 57 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! This instantiation of Covercrypt is based on Curve25519 and Kyber512, and as such delivers 128 2 | //! bits of both pre- and post-quantum CCA security. 3 | //! 4 | //! The KEMAC defined in [1] is extended by a PKE interface using AES256-GCM as DEM in the KEM/DEM 5 | //! framework described in [2]. 6 | //! 7 | //! [1] "Covercrypt: an Efficient Early-Abort KEM for Hidden Access Policies with Traceability from 8 | //! the DDH and LWE", T. Brézot, P. de Perthuis and D. Pointcheval 2023. 9 | //! [2] "A Proposal for an ISO Standard for Public Key Encryption (version 2.1)", Shoup 2001. 10 | 11 | mod error; 12 | 13 | mod abe_policy; 14 | mod ae; 15 | mod core; 16 | mod data_struct; 17 | mod encrypted_header; 18 | 19 | pub mod api; 20 | pub mod traits; 21 | 22 | pub use abe_policy::{AccessStructure, EncryptionHint, QualifiedAttribute}; 23 | 24 | #[cfg(any(test, feature = "test-utils"))] 25 | pub mod test_utils; 26 | 27 | #[cfg(feature = "test-utils")] 28 | pub use abe_policy::gen_structure; 29 | 30 | #[cfg(feature = "test-utils")] 31 | pub use test_utils::cc_keygen; 32 | 33 | pub use self::core::{MasterPublicKey, MasterSecretKey, UserSecretKey, XEnc}; 34 | pub use abe_policy::AccessPolicy; 35 | pub use encrypted_header::{CleartextHeader, EncryptedHeader}; 36 | pub use error::Error; 37 | -------------------------------------------------------------------------------- /src/test_utils/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{abe_policy::gen_structure, api::Covercrypt, Error, MasterPublicKey, MasterSecretKey}; 2 | 3 | //pub mod non_regression; 4 | 5 | /// Creates the test access structure. 6 | pub fn cc_keygen( 7 | cc: &Covercrypt, 8 | complete: bool, 9 | ) -> Result<(MasterSecretKey, MasterPublicKey), Error> { 10 | let (mut msk, _) = cc.setup()?; 11 | gen_structure(&mut msk.access_structure, complete)?; 12 | let mpk = cc.update_msk(&mut msk)?; 13 | Ok((msk, mpk)) 14 | } 15 | 16 | #[cfg(test)] 17 | mod tests { 18 | 19 | use super::*; 20 | use crate::{ 21 | abe_policy::{AccessPolicy, EncryptionHint, QualifiedAttribute}, 22 | api::Covercrypt, 23 | traits::KemAc, 24 | EncryptedHeader, 25 | }; 26 | 27 | #[test] 28 | fn test_add_attribute() -> Result<(), Error> { 29 | let cc = Covercrypt::default(); 30 | let (mut msk, _mpk) = cc_keygen(&cc, false)?; 31 | 32 | let decryption_policy = AccessPolicy::parse("SEC::LOW")?; 33 | let mut low_secret_usk = cc.generate_user_secret_key(&mut msk, &decryption_policy)?; 34 | 35 | let _ = &mut msk.access_structure.add_attribute( 36 | QualifiedAttribute::new("DPT", "Sales"), 37 | EncryptionHint::Classic, 38 | None, 39 | )?; 40 | let mpk = cc.update_msk(&mut msk)?; 41 | 42 | let secret_sales_ap = AccessPolicy::parse("SEC::LOW && DPT::Sales")?; 43 | let (_, encrypted_header) = 44 | EncryptedHeader::generate(&cc, &mpk, &secret_sales_ap, None, None)?; 45 | 46 | // User cannot decrypt new message without refreshing its key 47 | assert!(encrypted_header 48 | .decrypt(&cc, &low_secret_usk, None) 49 | .unwrap() 50 | .is_none()); 51 | 52 | cc.refresh_usk(&mut msk, &mut low_secret_usk, false)?; 53 | 54 | assert!(encrypted_header 55 | .decrypt(&cc, &low_secret_usk, None) 56 | .unwrap() 57 | .is_none()); 58 | 59 | Ok(()) 60 | } 61 | 62 | #[test] 63 | fn test_delete_attribute() -> Result<(), Error> { 64 | let cc = Covercrypt::default(); 65 | let (mut msk, mpk) = cc_keygen(&cc, false)?; 66 | 67 | // New user secret key 68 | let decryption_policy = AccessPolicy::parse("SEC::TOP && (DPT::FIN || DPT::HR)")?; 69 | let mut top_secret_fin_usk = cc.generate_user_secret_key(&mut msk, &decryption_policy)?; 70 | 71 | // Encrypt 72 | let top_secret_ap = AccessPolicy::parse("SEC::TOP && DPT::FIN")?; 73 | let (_, encrypted_header) = 74 | EncryptedHeader::generate(&cc, &mpk, &top_secret_ap, None, None)?; 75 | 76 | // remove the FIN department 77 | msk.access_structure 78 | .del_attribute(&QualifiedAttribute::new("DPT", "FIN"))?; 79 | 80 | // update the master keys 81 | let _ = cc.update_msk(&mut msk)?; 82 | 83 | assert!(encrypted_header 84 | .decrypt(&cc, &top_secret_fin_usk, None) 85 | .unwrap() 86 | .is_some()); 87 | 88 | let _new_decryption_policy = AccessPolicy::parse("SEC::TOP && DPT::HR")?; 89 | 90 | // Refreshing the USK removes the keys associated to rights that do not exist anymore in 91 | // the MSK, even if it is asked to preserve the old secrets. 92 | cc.refresh_usk(&mut msk, &mut top_secret_fin_usk, true)?; 93 | assert!(encrypted_header 94 | .decrypt(&cc, &top_secret_fin_usk, None) 95 | .unwrap() 96 | .is_none()); 97 | 98 | Ok(()) 99 | } 100 | 101 | #[test] 102 | fn test_deactivate_attribute() -> Result<(), Error> { 103 | let cc = Covercrypt::default(); 104 | let (mut msk, mpk) = cc_keygen(&cc, false)?; 105 | 106 | // 107 | // New user secret key 108 | let decryption_policy = AccessPolicy::parse("SEC::TOP && (DPT::FIN || DPT::HR)")?; 109 | let mut top_secret_fin_usk = cc.generate_user_secret_key(&mut msk, &decryption_policy)?; 110 | 111 | // 112 | // Encrypt 113 | let top_secret_ap = AccessPolicy::parse("SEC::TOP && DPT::FIN")?; 114 | let (_, encrypted_header) = 115 | EncryptedHeader::generate(&cc, &mpk, &top_secret_ap, None, None)?; 116 | 117 | // remove the FIN department 118 | msk.access_structure 119 | .disable_attribute(&QualifiedAttribute::new("DPT", "FIN"))?; 120 | 121 | // update the master keys 122 | let mpk = cc.update_msk(&mut msk)?; 123 | 124 | assert!(encrypted_header 125 | .decrypt(&cc, &top_secret_fin_usk, None) 126 | .unwrap() 127 | .is_some()); 128 | 129 | // Can not encrypt using deactivated attribute 130 | let top_secret_ap = AccessPolicy::parse("SEC::TOP && DPT::FIN")?; 131 | 132 | assert!(EncryptedHeader::generate(&cc, &mpk, &top_secret_ap, None, None).is_err()); 133 | 134 | // refresh the user key and preserve old secrets 135 | cc.refresh_usk(&mut msk, &mut top_secret_fin_usk, true)?; 136 | assert!(encrypted_header 137 | .decrypt(&cc, &top_secret_fin_usk, None) 138 | .unwrap() 139 | .is_some()); 140 | 141 | // refresh the user key and remove old secrets 142 | cc.refresh_usk(&mut msk, &mut top_secret_fin_usk, false)?; 143 | assert!(encrypted_header 144 | .decrypt(&cc, &top_secret_fin_usk, None) 145 | .unwrap() 146 | .is_some()); 147 | 148 | Ok(()) 149 | } 150 | 151 | #[test] 152 | fn test_rename_attribute() -> Result<(), Error> { 153 | let cc = Covercrypt::default(); 154 | let (mut msk, mpk) = cc_keygen(&cc, false)?; 155 | 156 | // New user secret key 157 | let decryption_policy = AccessPolicy::parse("SEC::TOP && DPT::FIN")?; 158 | let mut top_secret_fin_usk = cc.generate_user_secret_key(&mut msk, &decryption_policy)?; 159 | 160 | // Encrypt 161 | let top_secret_ap = AccessPolicy::parse("SEC::TOP && DPT::FIN")?; 162 | let (_, encrypted_header) = 163 | EncryptedHeader::generate(&cc, &mpk, &top_secret_ap, None, None)?; 164 | 165 | // remove the FIN department 166 | msk.access_structure.rename_attribute( 167 | &QualifiedAttribute::new("DPT", "FIN"), 168 | "Finance".to_string(), 169 | )?; 170 | 171 | // update the master keys 172 | let _ = cc.update_msk(&mut msk)?; 173 | 174 | assert!(encrypted_header 175 | .decrypt(&cc, &top_secret_fin_usk, None) 176 | .unwrap() 177 | .is_some()); 178 | 179 | // refresh the user key and preserve old secrets 180 | let _new_decryption_policy = AccessPolicy::parse("SEC::TOP && DPT::Finance")?; 181 | cc.refresh_usk(&mut msk, &mut top_secret_fin_usk, false)?; 182 | assert!(encrypted_header 183 | .decrypt(&cc, &top_secret_fin_usk, None) 184 | .unwrap() 185 | .is_some()); 186 | 187 | Ok(()) 188 | } 189 | 190 | #[test] 191 | fn encrypt_decrypt_sym_key() -> Result<(), Error> { 192 | let access_policy = AccessPolicy::parse("(DPT::MKG || DPT::FIN) && SEC::TOP").unwrap(); 193 | let cc = Covercrypt::default(); 194 | let (mut msk, mpk) = cc_keygen(&cc, false)?; 195 | let ap = AccessPolicy::parse("DPT::MKG && SEC::TOP")?; 196 | let (sym_key, encrypted_key) = cc.encaps(&mpk, &ap)?; 197 | let usk = cc.generate_user_secret_key(&mut msk, &access_policy)?; 198 | let recovered_key = cc.decaps(&usk, &encrypted_key)?; 199 | assert_eq!(Some(sym_key), recovered_key, "Wrong decryption of the key!"); 200 | Ok(()) 201 | } 202 | 203 | #[test] 204 | fn test_single_attribute_in_access_policy() -> Result<(), Error> { 205 | let cc = Covercrypt::default(); 206 | let (mut msk, _mpk) = cc_keygen(&cc, false)?; 207 | 208 | // 209 | // New user secret key 210 | let _user_key = cc.generate_user_secret_key(&mut msk, &AccessPolicy::parse("SEC::TOP")?)?; 211 | 212 | Ok(()) 213 | } 214 | 215 | #[test] 216 | fn test_rotate_then_encrypt() -> Result<(), Error> { 217 | let top_secret_ap = &AccessPolicy::parse("SEC::TOP")?; 218 | 219 | let cc = Covercrypt::default(); 220 | let (mut msk, mpk) = cc_keygen(&cc, false)?; 221 | 222 | // 223 | // New user secret key 224 | let mut top_secret_fin_usk = 225 | cc.generate_user_secret_key(&mut msk, &AccessPolicy::parse("SEC::TOP && DPT::FIN")?)?; 226 | 227 | // 228 | // Encrypt 229 | let (_, encrypted_header) = 230 | EncryptedHeader::generate(&cc, &mpk, &top_secret_ap.clone(), None, None)?; 231 | 232 | let _plaintext_header = encrypted_header.decrypt(&cc, &top_secret_fin_usk, None)?; 233 | 234 | assert!(_plaintext_header.is_some()); 235 | 236 | // 237 | // Rotate argument (must update master keys) 238 | let rekey_ap = AccessPolicy::Term(QualifiedAttribute::from(("SEC", "TOP"))); 239 | let mpk = cc.rekey(&mut msk, &rekey_ap)?; 240 | 241 | // 242 | // Encrypt with new attribute 243 | let (_, encrypted_header) = 244 | EncryptedHeader::generate(&cc, &mpk, &top_secret_ap.clone(), None, None)?; 245 | 246 | // Decryption fails without refreshing the user key 247 | assert!(encrypted_header 248 | .decrypt(&cc, &top_secret_fin_usk, None) 249 | .unwrap() 250 | .is_none()); 251 | 252 | cc.refresh_usk(&mut msk, &mut top_secret_fin_usk, false)?; 253 | 254 | // The refreshed key can decrypt the header 255 | assert!(encrypted_header 256 | .decrypt(&cc, &top_secret_fin_usk, None) 257 | .unwrap() 258 | .is_some()); 259 | 260 | Ok(()) 261 | } 262 | 263 | #[test] 264 | fn test_broadcast() { 265 | let cc = Covercrypt::default(); 266 | let ap = AccessPolicy::parse("*").unwrap(); 267 | let (mut msk, mpk) = cc.setup().unwrap(); 268 | let usk = cc.generate_user_secret_key(&mut msk, &ap).unwrap(); 269 | let (secret, bc) = cc.encaps(&mpk, &ap).unwrap(); 270 | let res = cc.decaps(&usk, &bc).unwrap(); 271 | assert_eq!(Some(secret), res); 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /src/test_utils/non_regression.rs: -------------------------------------------------------------------------------- 1 | use base64::{ 2 | alphabet::STANDARD, 3 | engine::{GeneralPurpose, GeneralPurposeConfig}, 4 | Engine, 5 | }; 6 | use cosmian_crypto_core::bytes_ser_de::{Deserializer, Serializable}; 7 | 8 | use super::policy; 9 | use crate::{ 10 | abe_policy::AccessPolicy, 11 | core::{MasterPublicKey, MasterSecretKey, UserSecretKey}, 12 | Covercrypt, EncryptedHeader, Error, 13 | }; 14 | 15 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 16 | pub struct EncryptionTestVector { 17 | encryption_policy: String, 18 | plaintext: String, 19 | ciphertext: String, 20 | header_metadata: String, 21 | authentication_data: String, 22 | } 23 | 24 | impl EncryptionTestVector { 25 | pub fn decrypt(&self, user_key: &str) -> Result<(), Error> { 26 | let config: GeneralPurposeConfig = GeneralPurposeConfig::default(); 27 | let transcoder: GeneralPurpose = GeneralPurpose::new(&STANDARD, config); 28 | 29 | let user_key = UserSecretKey::deserialize(&transcoder.decode(user_key).unwrap())?; 30 | 31 | let ciphertext = transcoder.decode(&self.ciphertext).unwrap(); 32 | let expected_plaintext = transcoder.decode(&self.plaintext).unwrap(); 33 | 34 | let header_metadata = if !self.header_metadata.is_empty() { 35 | Some(transcoder.decode(&self.header_metadata).unwrap()) 36 | } else { 37 | None 38 | }; 39 | 40 | let authentication_data = if !self.authentication_data.is_empty() { 41 | transcoder.decode(&self.authentication_data).unwrap() 42 | } else { 43 | vec![] 44 | }; 45 | let authentication_data = if authentication_data.is_empty() { 46 | None 47 | } else { 48 | Some(authentication_data.as_slice()) 49 | }; 50 | 51 | let mut de = Deserializer::new(ciphertext.as_slice()); 52 | let encrypted_header = EncryptedHeader::read(&mut de)?; 53 | let ciphertext = de.finalize(); 54 | let cover_crypt = Covercrypt::default(); 55 | 56 | let plaintext_header = encrypted_header 57 | .decrypt(&cover_crypt, &user_key, authentication_data)? 58 | .ok_or_else(|| { 59 | Error::OperationNotPermitted( 60 | "insufficient rights to open encapsulation".to_string(), 61 | ) 62 | })?; 63 | 64 | assert_eq!(plaintext_header.metadata, header_metadata); 65 | let plaintext = cover_crypt.dem_decrypt( 66 | &plaintext_header.symmetric_key, 67 | &ciphertext, 68 | authentication_data, 69 | )?; 70 | assert_eq!(expected_plaintext, plaintext); 71 | 72 | Ok(()) 73 | } 74 | 75 | pub fn new( 76 | mpk: &MasterPublicKey, 77 | encryption_policy: &str, 78 | plaintext: &str, 79 | header_metadata: Option<&[u8]>, 80 | authentication_data: Option<&[u8]>, 81 | ) -> Result { 82 | let config: GeneralPurposeConfig = GeneralPurposeConfig::default(); 83 | let transcoder: GeneralPurpose = GeneralPurpose::new(&STANDARD, config); 84 | 85 | let cover_crypt = Covercrypt::default(); 86 | let (symmetric_key, encrypted_header) = EncryptedHeader::generate( 87 | &cover_crypt, 88 | mpk, 89 | &AccessPolicy::parse(encryption_policy)?, 90 | header_metadata, 91 | authentication_data, 92 | )?; 93 | 94 | let mut aes_ciphertext = 95 | cover_crypt.dem_encrypt(&symmetric_key, plaintext.as_bytes(), authentication_data)?; 96 | let mut encrypted_bytes = encrypted_header.serialize()?; 97 | encrypted_bytes.append(&mut aes_ciphertext); 98 | let header_metadata = match header_metadata { 99 | Some(ad) => transcoder.encode(ad), 100 | None => String::new(), 101 | }; 102 | let authentication_data = match authentication_data { 103 | Some(ad) => transcoder.encode(ad), 104 | None => String::new(), 105 | }; 106 | Ok(Self { 107 | encryption_policy: encryption_policy.to_string(), 108 | plaintext: transcoder.encode(plaintext), 109 | ciphertext: transcoder.encode(encrypted_bytes), 110 | header_metadata, 111 | authentication_data, 112 | }) 113 | } 114 | } 115 | 116 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 117 | struct UserSecretKeyTestVector { 118 | access_policy: String, 119 | key: String, 120 | } 121 | 122 | impl UserSecretKeyTestVector { 123 | pub fn new(msk: &MasterSecretKey, access_policy: &str) -> Result { 124 | let config: GeneralPurposeConfig = GeneralPurposeConfig::default(); 125 | let transcoder: GeneralPurpose = GeneralPurpose::new(&STANDARD, config); 126 | Ok(Self { 127 | key: transcoder.encode( 128 | Covercrypt::default() 129 | .generate_user_secret_key(msk, &AccessPolicy::parse(access_policy)?, policy)? 130 | .serialize()?, 131 | ), 132 | access_policy: access_policy.to_string(), 133 | }) 134 | } 135 | } 136 | 137 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 138 | pub struct NonRegressionTestVector { 139 | public_key: String, 140 | master_secret_key: String, 141 | policy: String, 142 | top_secret_mkg_fin_key: UserSecretKeyTestVector, 143 | medium_secret_mkg_key: UserSecretKeyTestVector, 144 | top_secret_fin_key: UserSecretKeyTestVector, 145 | low_secret_mkg_test_vector: EncryptionTestVector, 146 | top_secret_mkg_test_vector: EncryptionTestVector, 147 | low_secret_fin_test_vector: EncryptionTestVector, 148 | } 149 | 150 | impl NonRegressionTestVector { 151 | pub fn new() -> Result { 152 | let config: GeneralPurposeConfig = GeneralPurposeConfig::default(); 153 | let transcoder: GeneralPurpose = GeneralPurpose::new(&STANDARD, config); 154 | 155 | // 156 | // Policy settings 157 | // 158 | let policy = policy()?; 159 | 160 | // 161 | // Covercrypt setup 162 | // 163 | let cover_crypt = Covercrypt::default(); 164 | let (mut msk, _) = cover_crypt.setup()?; 165 | let mpk = cover_crypt.update_master_keys(&policy, &mut msk)?; 166 | 167 | // 168 | // Encryption header metadata 169 | let header_metadata = 1u32.to_be_bytes().to_vec(); 170 | let authentication_data = 2u32.to_be_bytes().to_vec(); 171 | 172 | let reg_vectors = Self { 173 | public_key: transcoder.encode(mpk.serialize()?), 174 | master_secret_key: transcoder.encode(msk.serialize()?), 175 | // 176 | // Create user decryption keys 177 | top_secret_mkg_fin_key: UserSecretKeyTestVector::new( 178 | &mut msk, 179 | &policy, 180 | "(DPT::MKG || DPT:: FIN) && SEC::TOP", 181 | )?, 182 | medium_secret_mkg_key: UserSecretKeyTestVector::new( 183 | &mut msk, 184 | &policy, 185 | "SEC::Medium Secret && DPT::MKG", 186 | )?, 187 | top_secret_fin_key: UserSecretKeyTestVector::new( 188 | &mut msk, 189 | &policy, 190 | "SEC::TOP && DPT::FIN", 191 | )?, 192 | // 193 | // Generate ciphertexts 194 | top_secret_mkg_test_vector: EncryptionTestVector::new( 195 | &mpk, 196 | "DPT::MKG && SEC::TOP", 197 | "top_secret_mkg_plaintext", 198 | Some(&header_metadata), 199 | Some(&authentication_data), 200 | )?, 201 | 202 | low_secret_mkg_test_vector: EncryptionTestVector::new( 203 | &mpk, 204 | "DPT::MKG && SEC::LOW", 205 | "low_secret_mkg_plaintext", 206 | Some(&header_metadata), 207 | None, 208 | )?, 209 | 210 | low_secret_fin_test_vector: EncryptionTestVector::new( 211 | &mpk, 212 | "DPT::FIN && SEC::LOW", 213 | "low_secret_fin_plaintext", 214 | None, 215 | None, 216 | )?, 217 | }; 218 | Ok(reg_vectors) 219 | } 220 | 221 | pub fn verify(&self) { 222 | // top_secret_fin_key 223 | self.low_secret_fin_test_vector 224 | .decrypt(&self.top_secret_fin_key.key) 225 | .unwrap(); 226 | assert!(self 227 | .low_secret_mkg_test_vector 228 | .decrypt(&self.top_secret_fin_key.key) 229 | .is_err()); 230 | assert!(self 231 | .top_secret_mkg_test_vector 232 | .decrypt(&self.top_secret_fin_key.key) 233 | .is_err()); 234 | 235 | // top_secret_mkg_fin_key 236 | self.low_secret_fin_test_vector 237 | .decrypt(&self.top_secret_mkg_fin_key.key) 238 | .unwrap(); 239 | self.low_secret_mkg_test_vector 240 | .decrypt(&self.top_secret_mkg_fin_key.key) 241 | .unwrap(); 242 | self.top_secret_mkg_test_vector 243 | .decrypt(&self.top_secret_mkg_fin_key.key) 244 | .unwrap(); 245 | 246 | assert!(self 247 | .low_secret_fin_test_vector 248 | .decrypt(&self.medium_secret_mkg_key.key) 249 | .is_err()); 250 | self.low_secret_mkg_test_vector 251 | .decrypt(&self.medium_secret_mkg_key.key) 252 | .unwrap(); 253 | assert!(self 254 | .top_secret_mkg_test_vector 255 | .decrypt(&self.medium_secret_mkg_key.key) 256 | .is_err()); 257 | } 258 | } 259 | 260 | #[cfg(test)] 261 | mod tests { 262 | use super::*; 263 | 264 | #[test] 265 | fn test_generate_non_regression_vector() -> Result<(), Error> { 266 | let _reg_vector = NonRegressionTestVector::new()?; 267 | std::fs::write( 268 | "target/non_regression_vector.json", 269 | serde_json::to_string(&_reg_vector).unwrap(), 270 | ) 271 | .unwrap(); 272 | 273 | let reg_vector: NonRegressionTestVector = 274 | serde_json::from_str(include_str!("../../target/non_regression_vector.json")).unwrap(); 275 | reg_vector.verify(); 276 | 277 | Ok(()) 278 | } 279 | 280 | #[test] 281 | fn test_non_regression() { 282 | let reg_vector: NonRegressionTestVector = 283 | serde_json::from_str(include_str!("./tests_data/non_regression_vector.json")).unwrap(); 284 | reg_vector.verify(); 285 | } 286 | } 287 | -------------------------------------------------------------------------------- /src/test_utils/tests_data/legacy_policy.json: -------------------------------------------------------------------------------- 1 | {"last_attribute_value":7,"max_attribute_creations":100,"axes":{"Department":[["R&D","HR","MKG","FIN"],false],"Security Level":[["Protected","Confidential","Top Secret"],true]},"attributes":{"Department::MKG":[6],"Department::FIN":[7],"Department::HR":[5],"Security Level::Top Secret":[3],"Security Level::Protected":[1],"Security Level::Confidential":[2],"Department::R&D":[4]}} 2 | -------------------------------------------------------------------------------- /src/test_utils/tests_data/policy_v1.json: -------------------------------------------------------------------------------- 1 | {"version":"V1","last_attribute_value":9,"max_attribute_creations":100,"axes":{"Security Level":{"attribute_names":["Protected","Low Secret","Medium Secret","High Secret","Top Secret"],"is_hierarchical":true},"Department":{"attribute_names":["R&D","HR","MKG","FIN"],"is_hierarchical":false}},"attributes":{"Department::MKG":{"values":[8],"encryption_hint":"Classic"},"Department::FIN":{"values":[9],"encryption_hint":"Classic"},"Security Level::Protected":{"values":[1],"encryption_hint":"Classic"},"Department::HR":{"values":[7],"encryption_hint":"Classic"},"Security Level::High Secret":{"values":[4],"encryption_hint":"Classic"},"Department::R&D":{"values":[6],"encryption_hint":"Classic"},"Security Level::Low Secret":{"values":[2],"encryption_hint":"Classic"},"Security Level::Medium Secret":{"values":[3],"encryption_hint":"Classic"},"Security Level::Top Secret":{"values":[5],"encryption_hint":"Hybridized"}}} -------------------------------------------------------------------------------- /src/test_utils/tests_data/policy_v2.json: -------------------------------------------------------------------------------- 1 | {"version":"V2","last_attribute_value":9,"dimensions":{"Department":{"Unordered":{"FIN":{"id":9,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"R&D":{"id":6,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"HR":{"id":7,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"MKG":{"id":8,"encryption_hint":"Classic","write_status":"EncryptDecrypt"}}},"Security Level":{"Ordered":{"Protected":{"id":1,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"Low Secret":{"id":2,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"Medium Secret":{"id":3,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"High Secret":{"id":4,"encryption_hint":"Classic","write_status":"EncryptDecrypt"},"Top Secret":{"id":5,"encryption_hint":"Hybridized","write_status":"EncryptDecrypt"}}}}} -------------------------------------------------------------------------------- /src/traits.rs: -------------------------------------------------------------------------------- 1 | use cosmian_crypto_core::{reexport::rand_core::CryptoRngCore, Secret, SymmetricKey}; 2 | use std::ops::Add; 3 | use std::ops::AddAssign; 4 | use std::ops::Div; 5 | use std::ops::Mul; 6 | use std::ops::MulAssign; 7 | use std::ops::Sub; 8 | use std::ops::SubAssign; 9 | use zeroize::Zeroizing; 10 | 11 | use crate::AccessPolicy; 12 | 13 | pub trait KemAc { 14 | type EncapsulationKey; 15 | type DecapsulationKey; 16 | type Encapsulation; 17 | type Error: std::error::Error; 18 | 19 | /// Generates a new encapsulation for the given access policy. 20 | /// 21 | /// # Error 22 | /// 23 | /// Returns an error if the access policy is not valid. 24 | fn encaps( 25 | &self, 26 | ek: &Self::EncapsulationKey, 27 | ap: &AccessPolicy, 28 | ) -> Result<(Secret, Self::Encapsulation), Self::Error>; 29 | 30 | /// Attempts opening the given encapsulation with the given key. Returns the encapsulated 31 | /// secret upon success or `None` if this key was not authorized to open this encapsulation. 32 | fn decaps( 33 | &self, 34 | dk: &Self::DecapsulationKey, 35 | enc: &Self::Encapsulation, 36 | ) -> Result>, Self::Error>; 37 | } 38 | 39 | pub trait AE { 40 | type Error: std::error::Error; 41 | 42 | /// Encrypts the given plaintext using the given key. 43 | fn encrypt( 44 | rng: &mut impl CryptoRngCore, 45 | key: &SymmetricKey, 46 | ptx: &[u8], 47 | ) -> Result, Self::Error>; 48 | 49 | /// Decrypts the given ciphertext using the given key. 50 | /// 51 | /// # Error 52 | /// 53 | /// Returns an error if the integrity of the ciphertext could not be verified. 54 | fn decrypt( 55 | key: &SymmetricKey, 56 | ctx: &[u8], 57 | ) -> Result>, Self::Error>; 58 | } 59 | 60 | pub trait PkeAc> { 61 | type EncryptionKey; 62 | type DecryptionKey; 63 | type Ciphertext; 64 | type Error: std::error::Error; 65 | 66 | /// Encrypts the given plaintext under the given access policy. 67 | /// 68 | /// # Error 69 | /// 70 | /// Returns an error if the access policy is not valid. 71 | fn encrypt( 72 | &self, 73 | ek: &Self::EncryptionKey, 74 | ap: &AccessPolicy, 75 | ptx: &[u8], 76 | ) -> Result; 77 | 78 | /// Attempts decrypting the given ciphertext with the given key. Returns the 79 | /// plaintext upon success, or `None` if this key was not authorized to 80 | /// decrypt this ciphertext. 81 | fn decrypt( 82 | &self, 83 | dk: &Self::DecryptionKey, 84 | ctx: &Self::Ciphertext, 85 | ) -> Result>>, Self::Error>; 86 | } 87 | 88 | pub trait Kem { 89 | type EncapsulationKey; 90 | type DecapsulationKey; 91 | type SessionKey; 92 | type Encapsulation; 93 | type Error: std::error::Error; 94 | 95 | /// Generates a new random keypair. 96 | fn keygen( 97 | rng: &mut impl CryptoRngCore, 98 | ) -> Result<(Self::DecapsulationKey, Self::EncapsulationKey), Self::Error>; 99 | 100 | /// Generates an encapsulation of a random session key, and returns both the 101 | /// key and its encapsulation. 102 | fn enc( 103 | ek: &Self::EncapsulationKey, 104 | rng: &mut impl CryptoRngCore, 105 | ) -> Result<(Self::SessionKey, Self::Encapsulation), Self::Error>; 106 | 107 | /// Attempts opening the given encapsulation. Upon failure to decapsulate, 108 | /// returns a random session key. 109 | fn dec( 110 | dk: &Self::DecapsulationKey, 111 | enc: &Self::Encapsulation, 112 | ) -> Result; 113 | } 114 | 115 | pub trait Nike { 116 | type SecretKey: Sampling; 117 | type PublicKey: for<'a> From<&'a Self::SecretKey>; 118 | type SessionKey; 119 | type Error: std::error::Error; 120 | 121 | /// Generates a new random keypair. 122 | fn keygen( 123 | rng: &mut impl CryptoRngCore, 124 | ) -> Result<(Self::SecretKey, Self::PublicKey), Self::Error>; 125 | 126 | /// Generates the session key associated to the given keypair. 127 | fn session_key( 128 | sk: &Self::SecretKey, 129 | pk: &Self::PublicKey, 130 | ) -> Result; 131 | } 132 | 133 | pub trait Sampling { 134 | fn random(rng: &mut impl CryptoRngCore) -> Self; 135 | fn hash(seed: &[u8]) -> Self; 136 | } 137 | 138 | pub trait Zero { 139 | fn zero() -> Self; 140 | fn is_zero(&self) -> bool; 141 | } 142 | 143 | pub trait One { 144 | fn one() -> Self; 145 | fn is_one(&self) -> bool; 146 | } 147 | 148 | pub trait Group: 149 | Sized 150 | + Zero 151 | + Add 152 | + AddAssign 153 | + Sub 154 | + SubAssign 155 | + for<'a> Add<&'a Self, Output = Self> 156 | + for<'a> Sub<&'a Self, Output = Self> 157 | where 158 | for<'a, 'b> &'a Self: Add<&'b Self, Output = Self>, 159 | for<'a, 'b> &'a Self: Sub<&'b Self, Output = Self>, 160 | { 161 | } 162 | 163 | pub trait Ring: 164 | Group 165 | + Zero 166 | + Mul 167 | + MulAssign 168 | + Div> 169 | + for<'a> Mul<&'a Self, Output = Self> 170 | + for<'a> Div<&'a Self, Output = Result> 171 | where 172 | for<'a, 'b> &'a Self: Add<&'b Self, Output = Self>, 173 | for<'a, 'b> &'a Self: Sub<&'b Self, Output = Self>, 174 | for<'a, 'b> &'a Self: Mul<&'b Self, Output = Self>, 175 | for<'a, 'b> &'a Self: Div<&'b Self, Output = Result>, 176 | { 177 | type DivError; 178 | } 179 | 180 | pub trait KeyHomomorphicNike: Nike 181 | where 182 | Self::PublicKey: Group, 183 | Self::SecretKey: Ring, 184 | Self::PublicKey: Mul, 185 | for<'a> Self::PublicKey: Mul<&'a Self::SecretKey, Output = Self::PublicKey>, 186 | for<'a, 'b> &'a Self::PublicKey: Add<&'b Self::PublicKey, Output = Self::PublicKey>, 187 | for<'a, 'b> &'a Self::PublicKey: Sub<&'b Self::PublicKey, Output = Self::PublicKey>, 188 | for<'a, 'b> &'a Self::SecretKey: Add<&'b Self::SecretKey, Output = Self::SecretKey>, 189 | for<'a, 'b> &'a Self::SecretKey: Sub<&'b Self::SecretKey, Output = Self::SecretKey>, 190 | for<'a, 'b> &'a Self::SecretKey: Mul<&'b Self::SecretKey, Output = Self::SecretKey>, 191 | for<'a, 'b> &'a Self::SecretKey: Div< 192 | &'b Self::SecretKey, 193 | Output = Result::DivError>, 194 | >, 195 | { 196 | } 197 | --------------------------------------------------------------------------------