├── .editorconfig ├── .github └── workflows │ ├── archive.yml │ ├── ghpages.yml │ ├── lint-python.yml │ ├── lint.yml │ ├── make-with-lints.sh │ ├── publish.yml │ ├── test.yml │ └── update.yml ├── .gitignore ├── .note.xml ├── CODEOWNERS ├── CONTRIBUTING.md ├── LICENSE.md ├── Makefile ├── README.md ├── draft-irtf-cfrg-vdaf.md ├── misc ├── prime-hunt.md ├── snippets.md └── special-syntax.md ├── package.json ├── poc ├── .mypy.ini ├── README.md ├── example_flp_shamir.py ├── gen_test_vec.py ├── plot_prio3_multiproof_robustness.py ├── pyproject.toml ├── tests │ ├── __init__.py │ ├── test_daf.py │ ├── test_field.py │ ├── test_flp.py │ ├── test_flp_bbcggi19.py │ ├── test_idpf_bbcggi21.py │ ├── test_vdaf_ping_pong.py │ ├── test_vdaf_poplar1.py │ ├── test_vdaf_prio3.py │ └── test_xof.py └── vdaf_poc │ ├── __init__.py │ ├── common.py │ ├── daf.py │ ├── field.py │ ├── flp.py │ ├── flp_bbcggi19.py │ ├── idpf.py │ ├── idpf_bbcggi21.py │ ├── py.typed │ ├── test_utils.py │ ├── vdaf.py │ ├── vdaf_ping_pong.py │ ├── vdaf_poplar1.py │ ├── vdaf_prio3.py │ └── xof.py ├── rejected_dictionary └── test_vec ├── IdpfBBCGGI21_0.json ├── XofFixedKeyAes128.json ├── XofTurboShake128.json └── vdaf ├── Poplar1_0.json ├── Poplar1_1.json ├── Poplar1_2.json ├── Poplar1_3.json ├── Poplar1_4.json ├── Poplar1_5.json ├── Poplar1_bad_corr_inner.json ├── Prio3Count_0.json ├── Prio3Count_1.json ├── Prio3Count_2.json ├── Prio3Count_bad_gadget_poly.json ├── Prio3Count_bad_helper_seed.json ├── Prio3Count_bad_meas_share.json ├── Prio3Count_bad_wire_seed.json ├── Prio3Histogram_0.json ├── Prio3Histogram_1.json ├── Prio3Histogram_2.json ├── Prio3Histogram_bad_helper_jr_blind.json ├── Prio3Histogram_bad_leader_jr_blind.json ├── Prio3Histogram_bad_prep_msg.json ├── Prio3Histogram_bad_public_share.json ├── Prio3MultihotCountVec_0.json ├── Prio3MultihotCountVec_1.json ├── Prio3MultihotCountVec_2.json ├── Prio3SumVecWithMultiproof_0.json ├── Prio3SumVecWithMultiproof_1.json ├── Prio3SumVec_0.json ├── Prio3SumVec_1.json ├── Prio3Sum_0.json ├── Prio3Sum_1.json └── Prio3Sum_2.json /.editorconfig: -------------------------------------------------------------------------------- 1 | # See http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*.{md,xml,org}] 6 | charset = utf-8 7 | insert_final_newline = true 8 | trim_trailing_whitespace = true 9 | -------------------------------------------------------------------------------- /.github/workflows/archive.yml: -------------------------------------------------------------------------------- 1 | name: "Archive Issues and Pull Requests" 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 0,2,4' 6 | repository_dispatch: 7 | types: [archive] 8 | workflow_dispatch: 9 | inputs: 10 | archive_full: 11 | description: 'Recreate the archive from scratch' 12 | default: false 13 | type: boolean 14 | 15 | jobs: 16 | build: 17 | name: "Archive Issues and Pull Requests" 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: "Checkout" 21 | uses: actions/checkout@v4 22 | 23 | # Note: No caching for this build! 24 | 25 | - name: "Update Archive" 26 | uses: martinthomson/i-d-template@v1 27 | env: 28 | ARCHIVE_FULL: ${{ inputs.archive_full }} 29 | with: 30 | make: archive 31 | token: ${{ github.token }} 32 | 33 | - name: "Update GitHub Pages" 34 | uses: martinthomson/i-d-template@v1 35 | with: 36 | make: gh-archive 37 | token: ${{ github.token }} 38 | 39 | - name: "Save Archive" 40 | uses: actions/upload-artifact@v4 41 | with: 42 | path: archive.json 43 | -------------------------------------------------------------------------------- /.github/workflows/ghpages.yml: -------------------------------------------------------------------------------- 1 | name: "Update Editor's Copy" 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - README.md 7 | - CONTRIBUTING.md 8 | - LICENSE.md 9 | - .gitignore 10 | pull_request: 11 | paths-ignore: 12 | - README.md 13 | - CONTRIBUTING.md 14 | - LICENSE.md 15 | - .gitignore 16 | 17 | jobs: 18 | build: 19 | name: "Update Editor's Copy" 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: "Checkout" 23 | uses: actions/checkout@v4 24 | 25 | - name: "Setup" 26 | id: setup 27 | run: date -u "+date=%FT%T" >>"$GITHUB_OUTPUT" 28 | 29 | - name: "Caching" 30 | uses: actions/cache@v4 31 | with: 32 | path: | 33 | .refcache 34 | .venv 35 | .gems 36 | node_modules 37 | .targets.mk 38 | key: i-d-${{ steps.setup.outputs.date }} 39 | restore-keys: i-d- 40 | 41 | - name: "Build Drafts" 42 | uses: martinthomson/i-d-template@v1 43 | with: 44 | token: ${{ github.token }} 45 | 46 | - name: "Update GitHub Pages" 47 | uses: martinthomson/i-d-template@v1 48 | if: ${{ github.event_name == 'push' }} 49 | with: 50 | make: gh-pages 51 | token: ${{ github.token }} 52 | 53 | - name: "Archive Built Drafts" 54 | uses: actions/upload-artifact@v4 55 | with: 56 | path: | 57 | draft-*.html 58 | draft-*.txt 59 | -------------------------------------------------------------------------------- /.github/workflows/lint-python.yml: -------------------------------------------------------------------------------- 1 | name: Lint Python 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '.github/workflows/lint-python.yml' 9 | - 'poc/**' 10 | pull_request: 11 | paths: 12 | - '.github/workflows/lint-python.yml' 13 | - 'poc/**' 14 | 15 | jobs: 16 | test: 17 | name: "Check formatting" 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | 23 | - name: Setup Python 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: '3.12' 27 | 28 | - name: Install tools 29 | run: pip install pyflakes autopep8 isort pylint 30 | 31 | - name: Run pyflakes 32 | working-directory: poc 33 | run: pyflakes *.py vdaf_poc/*.py tests/*.py 34 | 35 | - name: Run autopep8 36 | working-directory: poc 37 | run: autopep8 --diff --exit-code *.py vdaf_poc/*.py tests/*.py 38 | 39 | - name: Run isort 40 | working-directory: poc 41 | run: isort --check . 42 | 43 | - name: Run pylint 44 | working-directory: poc 45 | run: pylint --disable=all --enable=redefined-outer-name *.py vdaf_poc/*.py tests/*.py 46 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: "Lint Document" 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | build: 9 | name: "Lint document" 10 | runs-on: ubuntu-latest 11 | container: 12 | image: docker://ghcr.io/martinthomson/i-d-template-action:latest 13 | steps: 14 | - name: "Checkout" 15 | uses: actions/checkout@v4 16 | 17 | - name: "Check for typos" 18 | run: "bash -c '! grep -r --file=rejected_dictionary draft-irtf-cfrg-vdaf.md poc'" 19 | 20 | - name: "Check for warnings emitted by xml2rfc" 21 | run: .github/workflows/make-with-lints.sh 22 | -------------------------------------------------------------------------------- /.github/workflows/make-with-lints.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | make |& (! grep -E "Warning|Error") 6 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: "Publish New Draft Version" 2 | 3 | on: 4 | push: 5 | tags: 6 | - "draft-*" 7 | workflow_dispatch: 8 | inputs: 9 | email: 10 | description: "Submitter email" 11 | default: "" 12 | type: string 13 | 14 | jobs: 15 | build: 16 | name: "Publish New Draft Version" 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: "Checkout" 20 | uses: actions/checkout@v4 21 | 22 | # See https://github.com/actions/checkout/issues/290 23 | - name: "Get Tag Annotations" 24 | run: git fetch -f origin ${{ github.ref }}:${{ github.ref }} 25 | 26 | - name: "Setup" 27 | id: setup 28 | run: date -u "+date=%FT%T" >>"$GITHUB_OUTPUT" 29 | 30 | - name: "Caching" 31 | uses: actions/cache@v4 32 | with: 33 | path: | 34 | .refcache 35 | .venv 36 | .gems 37 | node_modules 38 | .targets.mk 39 | key: i-d-${{ steps.setup.outputs.date }} 40 | restore-keys: i-d- 41 | 42 | - name: "Build Drafts" 43 | uses: martinthomson/i-d-template@v1 44 | with: 45 | token: ${{ github.token }} 46 | 47 | - name: "Upload to Datatracker" 48 | uses: martinthomson/i-d-template@v1 49 | with: 50 | make: upload 51 | env: 52 | UPLOAD_EMAIL: ${{ inputs.email }} 53 | 54 | - name: "Archive Submitted Drafts" 55 | uses: actions/upload-artifact@v4 56 | with: 57 | path: "versioned/draft-*-[0-9][0-9].*" 58 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '.github/workflows/test.yml' 9 | - 'poc/**' 10 | pull_request: 11 | paths: 12 | - '.github/workflows/test.yml' 13 | - 'poc/**' 14 | 15 | jobs: 16 | test: 17 | name: "Run unit tests for reference code" 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | 23 | - name: Set up Python 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: '3.12' 27 | cache: pip 28 | 29 | - name: Install dependencies 30 | run: python -m pip install pycryptodomex mypy 31 | 32 | - name: Run tests 33 | working-directory: poc 34 | run: python -m unittest 35 | 36 | - name: Regenerate test vectors 37 | working-directory: poc 38 | run: python gen_test_vec.py 39 | 40 | - name: Enforce type hints 41 | working-directory: poc 42 | run: python -m mypy *.py vdaf_poc/*.py tests/*.py 43 | -------------------------------------------------------------------------------- /.github/workflows/update.yml: -------------------------------------------------------------------------------- 1 | name: "Update Generated Files" 2 | # This rule is not run automatically. 3 | # It can be run manually to update all of the files that are part 4 | # of the template, specifically: 5 | # - README.md 6 | # - CONTRIBUTING.md 7 | # - .note.xml 8 | # - .github/CODEOWNERS 9 | # - Makefile 10 | # 11 | # 12 | # This might be useful if you have: 13 | # - added, removed, or renamed drafts (including after adoption) 14 | # - added, removed, or changed draft editors 15 | # - changed the title of drafts 16 | # 17 | # Note that this removes any customizations you have made to 18 | # the affected files. 19 | on: workflow_dispatch 20 | 21 | jobs: 22 | build: 23 | name: "Update Files" 24 | runs-on: ubuntu-latest 25 | steps: 26 | - name: "Checkout" 27 | uses: actions/checkout@v4 28 | 29 | - name: "Update Generated Files" 30 | uses: martinthomson/i-d-template@v1 31 | with: 32 | make: update-files 33 | token: ${{ github.token }} 34 | 35 | - name: "Push Update" 36 | run: git push 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | /*-[0-9][0-9].xml 3 | archive.json 4 | *.bak 5 | draft-irtf-cfrg-vdaf.xml 6 | *.egg-info 7 | Gemfile.lock 8 | /.gems/ 9 | *.html 10 | /lib 11 | lib 12 | /node_modules/ 13 | package-lock.json 14 | *.pdf 15 | *.pyc 16 | __pycache__ 17 | *.pyo 18 | *.redxml 19 | .refcache 20 | /.refcache 21 | report.xml 22 | *.swp 23 | .tags 24 | .targets.mk 25 | /.targets.mk 26 | *.txt 27 | *.upload 28 | /.venv/ 29 | venv/ 30 | /versioned/ 31 | /.vscode/ 32 | !requirements.txt 33 | -------------------------------------------------------------------------------- /.note.xml: -------------------------------------------------------------------------------- 1 | 2 | Discussion of this document takes place on the 3 | Crypto Forum Research Group mailing list (cfrg@ietf.org), 4 | which is archived at . 5 | Source for this draft and an issue tracker can be found at 6 | . 7 | 8 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @bifurcation @cjpatton @divergentdave @schoppmp 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This repository relates to activities in the Internet Engineering Task Force 4 | ([IETF](https://www.ietf.org/)). All material in this repository is considered 5 | Contributions to the IETF Standards Process, as defined in the intellectual 6 | property policies of IETF currently designated as 7 | [BCP 78](https://www.rfc-editor.org/info/bcp78), 8 | [BCP 79](https://www.rfc-editor.org/info/bcp79) and the 9 | [IETF Trust Legal Provisions (TLP) Relating to IETF Documents](http://trustee.ietf.org/trust-legal-provisions.html). 10 | 11 | Any edit, commit, pull request, issue, comment or other change made to this 12 | repository constitutes Contributions to the IETF Standards Process 13 | (https://www.ietf.org/). 14 | 15 | You agree to comply with all applicable IETF policies and procedures, including, 16 | BCP 78, 79, the TLP, and the TLP rules regarding code components (e.g. being 17 | subject to a Simplified BSD License) in Contributions. 18 | 19 | ## Working Group Information 20 | 21 | Discussion of this work occurs on the [Crypto Forum 22 | Research Group mailing list](mailto:cfrg@ietf.org) 23 | ([archive](https://mailarchive.ietf.org/arch/browse/cfrg), 24 | [subscribe](https://mailman.irtf.org/mailman/listinfo/cfrg)). 25 | In addition to contributions in GitHub, you are encouraged to participate in 26 | discussions there. 27 | 28 | **Note**: Some working groups adopt a policy whereby substantive discussion of 29 | technical issues needs to occur on the mailing list. 30 | 31 | You might also like to familiarize yourself with other 32 | [Research Group documents](https://datatracker.ietf.org/rg/cfrg/documents/). 33 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # License 2 | 3 | See the 4 | [guidelines for contributions](https://github.com/cfrg/draft-irtf-cfrg-vdaf/blob/main/CONTRIBUTING.md). 5 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | LIBDIR := lib 2 | include $(LIBDIR)/main.mk 3 | 4 | $(LIBDIR)/main.mk: 5 | ifneq (,$(shell grep "path *= *$(LIBDIR)" .gitmodules 2>/dev/null)) 6 | git submodule sync 7 | git submodule update $(CLONE_ARGS) --init 8 | else 9 | git clone -q --depth 10 $(CLONE_ARGS) \ 10 | -b main https://github.com/martinthomson/i-d-template $(LIBDIR) 11 | endif 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Verifiable Distributed Aggregation Functions 2 | 3 | This is the working area for the individual Internet-Draft, "Verifiable Distributed Aggregation Functions". 4 | 5 | * [Editor's Copy](https://cfrg.github.io/draft-irtf-cfrg-vdaf/#go.draft-irtf-cfrg-vdaf.html) 6 | * [Datatracker Page](https://datatracker.ietf.org/doc/draft-irtf-cfrg-vdaf) 7 | * [Individual Draft](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-vdaf) 8 | * [Compare Editor's Copy to Individual Draft](https://cfrg.github.io/draft-irtf-cfrg-vdaf/#go.draft-irtf-cfrg-vdaf.diff) 9 | 10 | 11 | ## Contributing 12 | 13 | See the 14 | [guidelines for contributions](https://github.com/cfrg/draft-irtf-cfrg-vdaf/blob/main/CONTRIBUTING.md). 15 | 16 | Contributions can be made by creating pull requests. 17 | The GitHub interface supports creating pull requests using the Edit (✏) button. 18 | 19 | 20 | ## Command Line Usage 21 | 22 | Formatted text and HTML versions of the draft can be built using `make`. 23 | 24 | ```sh 25 | $ make 26 | ``` 27 | 28 | Command line usage requires that you have the necessary software installed. See 29 | [the instructions](https://github.com/martinthomson/i-d-template/blob/main/doc/SETUP.md). 30 | 31 | -------------------------------------------------------------------------------- /misc/prime-hunt.md: -------------------------------------------------------------------------------- 1 | We're looking for primes `p` for which `(p-1)` has a factor of `2^k` for `k` of 2 | at least `20`. 3 | 4 | Here's a sage script for finding them: 5 | 6 | ``` 7 | def search(k, n, probes): 8 | q_max = 2^(n-k) 9 | for i in range(probes): 10 | q = q_max - i 11 | p = 2^k * q + 1 12 | if is_prime(p): 13 | score = float(2^n - p) / 2^n 14 | print(round(score, 6), p, factor(p-1)) 15 | ``` 16 | 17 | Here are some reasonable choices: 18 | 19 | ``` 20 | sage: search(64, 128, 100) 21 | 0.0 340282366920938462946865773367900766209 2^66 * 3 * 3491 * 440340496364689 22 | 0.0 340282366920938462024528569682423185409 2^65 * 31 * 10037 * 29643133428427 23 | sage: search(64, 126, 200) 24 | 0.0 85070591730234613320192969686023929857 2^65 * 138143 * 16691710830181 25 | 0.0 85070591730234613043491808580380655617 2^64 * 4611686018427387751 26 | 0.0 85070591730234612969704832285542449153 2^64 * 3^2 * 239 * 36353 * 58976509349 27 | sage: search(32, 80, 200) 28 | 0.0 1208925819614152433336321 2^32 * 5 * 56294995342109 29 | 0.0 1208925819614122368565249 2^33 * 3^2 * 13 * 2411 * 2591 * 192557 30 | 0.0 1208925819614032174252033 2^32 * 3 * 29 * 787 * 4110984193 31 | 0.0 1208925819613993519546369 2^34 * 3 * 7 * 13697 * 244644271 32 | 0.0 1208925819613903325233153 2^32 * 3 * 7 * 53 * 71 * 33757 * 105517 33 | 0.0 1208925819613791656083457 2^32 * 97 * 491 * 5909987543 34 | sage: search(32, 64, 20) 35 | 0.0 18446744069414584321 2^32 * 3 * 5 * 17 * 257 * 65537 36 | 0.0 18446744056529682433 2^34 * 3^2 * 7 * 11 * 31 * 151 * 331 37 | ``` 38 | -------------------------------------------------------------------------------- /misc/snippets.md: -------------------------------------------------------------------------------- 1 | 2 | # Distributed Aggregation Functions {#daf} 3 | 4 | ~~~~ 5 | client 6 | | input 7 | v 8 | +-----------------------------------------------------------+ 9 | | daf_input() | 10 | +-----------------------------------------------------------+ 11 | | input_shares[1] | input_shares[2] ... | input_shares[SHARES] 12 | v v v 13 | +---------------+ +---------------+ +---------------+ 14 | | daf_output() | | daf_output() | | daf_output() | 15 | +---------------+ +---------------+ +---------------+ 16 | | output_shares[1] | output_shares[2] ... | output_shares[SHARES] 17 | v v v 18 | aggregator 1 aggregator 2 aggregator SHARES 19 | ~~~~ 20 | {: #daf-flow title="Execution of a DAF."} 21 | 22 | A DAF is a multi-party protocol for executing an aggregation function over a set 23 | of user inputs. By distributing the input across multiple aggregators, the 24 | protocol ensures that individual inputs are never seen in the clear. 25 | Syntactically, a DAF is made up of two algorithms: 26 | 27 | * `daf_input(input) -> input_shares: Vec[bytes]` is the randomized 28 | input-distribution algorithm. It is run by the client in order to split its 29 | input into `SHARES` input shares (i.e., `len(input_shares) == SHARES`). Each 30 | input share is sent to one of the aggregators. 31 | 32 | * `daf_output(param, input_share) -> output_share` is the deterministic 33 | output-recovery algorithm. It is run be each aggregator in order to map an 34 | input share to an output share. This mapping has a parameter `param`, which 35 | can be used to "query" the input share multiple times with multiple 36 | parameters, getting a different output share each time. `param` is called the 37 | aggregation parameter. 38 | 39 | Execution of a DAF is illustrated in {{daf-flow}}. The client runs the 40 | input-distribution algorithm and sends an input share to each one of the 41 | aggregators. Next, the aggregators select an aggregation parameter for querying 42 | the input shares, and each runs the output-recover algorithm to obtain their 43 | share of the output. DAF schemes are designed to ensure that no proper subset of 44 | the aggregators can discern any information about the input or output given 45 | their view of the protocol. (See {{security-considerations}}.) 46 | 47 | Associated constants: 48 | 49 | * `SHARES: Unsigned` is the number of aggregators for which the DAF is defined. 50 | 51 | ## Aggregability 52 | 53 | 66 | 67 | Let `G(agg_param)` denote the support of the output-recovery algorithm for a 68 | given aggregation parameter `agg_param`. That is, set `G(agg_param)` contains 69 | the set of all possible outputs of the output-recovery algorithm when the first 70 | input is `agg_param` and the second is any input share. 71 | 72 | Correctness requires that, for every `agg_param`, the set `G(agg_param)` forms 73 | an additive group. This allows the aggregation function to be computed by having 74 | each aggregator sum up its output shares locally, then collectively computing 75 | the output by summing up their aggregated output shares. In particular, the 76 | aggregation function is computed by the following algorithm. (let 77 | `Zero(agg_param)` denote the identity element of `G(agg_param)`): 78 | 79 | ~~~ 80 | def run_daf(agg_param, inputs: Set[bytes]): 81 | output_shares = [ Zero(agg_param) for _ in range(SHARES) ] 82 | 83 | for input in inputs: 84 | # Each client runs the input-distribution algorithm. 85 | input_shares = daf_input(input) 86 | 87 | # Each aggregator runs the output-recvoery algorithm. 88 | for j in range(SHARES): 89 | output_shares[j] += daf_output(agg_param, input_shares[j]) 90 | 91 | # Aggregators compute the final output. 92 | return sum(output_shares) 93 | ~~~ 94 | {: #run-daf title="Definition of the aggregation function computed by a DAF."} 95 | -------------------------------------------------------------------------------- /misc/special-syntax.md: -------------------------------------------------------------------------------- 1 | One way of tightening the syntax in a way that fits both Prio and Heavy Hitters 2 | is the following. 3 | 4 | First, observe that validating the output requires a different number of rounds 5 | for each protocol: Prio requires 1 and Heavy Hitters requires 2. What they both 6 | have in common is that the computation of an aggregator's next message is a 7 | function of the aggregator's (private) state and the *sum* of the (public) 8 | messages from the previous round. Codifying this leads to a somewhat simpler 9 | syntax. 10 | 11 | As before, evaluation proceeds in rounds, where the messages output by each 12 | aggregator in one round are used as input for the next round. What we do instead 13 | is restrict the message space so that each aggregator outputs a vector of group 14 | elements, and we say that the input for the next round is the *sum* of the 15 | vectors. 16 | 17 | In the new syntax, the VDAF now specifies an additive group `G` and a sequence 18 | of "output lengths" `m` that determines the length of the output at each round. 19 | 20 | * `eval_setup() -> (pk, sk)`. Same as the old setup algorithm. Returns a 21 | public key used by the clients and the corresponding secret keys of the 22 | aggregators (`len(sk) == s`, where `s` is the number of aggregators). 23 | 24 | * `eval_input(pk, m) -> x`. Same as the old input-distribution algorithm. Takes 25 | in the input `m` and the public key and outputs a sequence of input shares 26 | (`len(x) == s`). 27 | 28 | * `eval_next(sk, n, p, x, y, i) -> out`. Subsumes the old verify-start, 29 | verify-next, and verify-finish algorithms of the current syntax. Inputs are: 30 | 31 | 1. `k` The aggregator's secret key 32 | 2. `n` The nonce 33 | 3. `p` The aggregation parameter 34 | 4. `x` The aggregator's input share 35 | 5. `y` The *sum* of the previous round of messages, a vector of elements 36 | of G 37 | 6. `i` The current round 38 | 39 | The output is a sequence of elements of `G` of length `m[i-1]`. 40 | 41 | Evaluation of the VDAF on aggregation parameter `p`, nonces `nonces`, and inputs 42 | `inputs` would look something like this (using Python as pseudocode): 43 | 44 | ``` 45 | def run(p, nonces, inputs): 46 | (pk, sk) = eval_setup() 47 | agg = [ 0 for j in range(s) ] # Aggregators' aggregate shares 48 | 49 | for (n, m) in zip(nonces, inputs): 50 | x = eval_input(pk, m) 51 | 52 | # Evaluate VDAF on input shares. 53 | y = 0 54 | out = [ 0 for j in range(s) ] # Aggregators' output shares 55 | for i in range(r+1): 56 | for j in range(s): 57 | out[j] = eval_next(sk[j], n, p, x[j], y, i) 58 | y = sum(out) # Input of next round is sum of this round. 59 | 60 | # Aggregate output shares. 61 | for j in range(s): 62 | agg[j] += out[j] 63 | 64 | # Recover aggregate. 65 | return sum(agg) 66 | ``` 67 | 68 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "aasvg": "^0.4.2" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /poc/.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | check_untyped_defs = True 3 | disallow_untyped_defs = True 4 | 5 | [mypy-matplotlib.*] 6 | ignore_missing_imports = True 7 | -------------------------------------------------------------------------------- /poc/README.md: -------------------------------------------------------------------------------- 1 | # VDAF reference implementations 2 | 3 | This directory contains a reference implementation of the VDAFs specified in 4 | [draft-irtf-cfrg-vdaf](https://datatracker.ietf.org/doc/draft-irtf-cfrg-vdaf/). 5 | It is not intended for production use: the code is not optimized for 6 | performance or resistance to side-channel attacks. Its primary purpose is to 7 | generate test vectors. 8 | 9 | ## Installation 10 | 11 | This code requires Python 3.12 or later to run. To install: 12 | 13 | ``` 14 | python -m pip install git+https://github.com/cfrg/draft-irtf-cfrg-vdaf@draft-irtf-cfrg-vdaf-11#subdirectory=poc 15 | ``` 16 | 17 | where draft-irtf-cfrg-vdaf-11 is the desired tag. The installed package is called `vdaf_poc`: 18 | 19 | ``` 20 | python -c "from vdaf_poc.field import Field64; print(Field64.MODULUS)" 21 | ``` 22 | 23 | ## Development 24 | 25 | To run unit tests, you'll first need to install 26 | [PyCryptodome](https://pycryptodome.readthedocs.io/en/latest/index.html) >= 27 | 3.20.0: 28 | 29 | ``` 30 | python -m pip install pycryptodomex 31 | ``` 32 | 33 | Now you should be able to run the unit tests: 34 | 35 | ``` 36 | python -m unittest 37 | ``` 38 | 39 | ## Generating test vectors 40 | 41 | To generate test vectors, run: 42 | 43 | ``` 44 | python gen_test_vec.py 45 | ``` 46 | 47 | Users can also specify a custom path to generate the test vectors in 48 | environment variable `TEST_VECTOR_PATH`: 49 | 50 | ``` 51 | TEST_VECTOR_PATH=path/to/test_vec python gen_test_vec.py 52 | ``` 53 | -------------------------------------------------------------------------------- /poc/example_flp_shamir.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Demonstration of the composition of FLP with Shamir's secret sharing. This 3 | could be the basis of a Prio3 variant that tolerates drop out of an Aggregator. 4 | 5 | Imagine a DAP setup [1] where a Leader wants to be able to run the protocol with 6 | one of two Helpers so that if the first Helper goes offline, it can continue 7 | the computation with the other. 8 | 9 | For this to work, the Client still needs to know the identities of the two 10 | Helpers prior to generating its report. 11 | 12 | [1] https://github.com/ietf-wg-ppm/draft-ietf-ppm-dap/issues/22 13 | ''' 14 | 15 | from typing import TypeVar 16 | from unittest import TestCase 17 | 18 | from vdaf_poc.common import vec_add 19 | from vdaf_poc.field import Field, Field64, Field128, poly_interp 20 | from vdaf_poc.flp_bbcggi19 import Count, FlpBBCGGI19, Sum, SumVec 21 | 22 | F = TypeVar("F", bound=Field) 23 | 24 | 25 | def shamir_shard(field: type[F], 26 | meas: list[F], 27 | unshard_threshold: int, 28 | num_shares: int) -> list[list[F]]: 29 | ''' 30 | Compute Shamir's threshold secret sharing of `meas`. `num_shares` specifies 31 | the total of number of shares; `unshard_threshold` specifies the number of 32 | shares needed to unshard. 33 | ''' 34 | # NOTE It would be more efficient to do "packed" Shamir by letting f(x) = 35 | # meas + rand_1 * x + rand_2 * x^2 + ..., and so on and letting the shares 36 | # be f(0), f(1), ..., and so on. That is: 37 | # 38 | # coeffs = [meas] 39 | # for _ in range(unshard_threshold): 40 | # coeffs.append(field.rand_vec(len(meas))) 41 | # 42 | # meas_shares = [] 43 | # for x in range(num_shares): 44 | # f = field.zeros(len(meas)) 45 | # for (i, c) in enumerate(coeffs): 46 | # for j in range(len(meas)): 47 | # f[j] += c[j] * field(x)**i 48 | # meas_shares.append(f) 49 | # 50 | # However we would need a version of `poly_interp()` that operates on 51 | # vectors over the field. 52 | meas_shares = [field.zeros(len(meas)) for _ in range(num_shares)] 53 | for i in range(len(meas)): 54 | coeffs = [meas[i]] + field.rand_vec(unshard_threshold-1) 55 | for j in range(num_shares): 56 | f = field(0) 57 | for (k, c) in enumerate(coeffs): 58 | x = field(j+1) 59 | f += c * x ** k 60 | meas_shares[j][i] = f 61 | return meas_shares 62 | 63 | 64 | def shamir_unshard(field: type[F], 65 | index: list[int], 66 | meas_shares: list[list[F]]) -> list[F]: 67 | ''' 68 | Combine Shamir secret shares `meas_share` into the underlying secret. 69 | `index` is indicates the index of each secret share. That is, `index[I]` is 70 | equal to the index of `meas_share[I]` in the output of `shamir_shard()`. 71 | ''' 72 | meas_len = len(meas_shares[0]) 73 | meas = [] 74 | for i in range(meas_len): 75 | xs = [] 76 | fs = [] 77 | for (j, share) in zip(index, meas_shares): 78 | x = field(j+1) 79 | f = share[i] 80 | xs.append(x) 81 | fs.append(f) 82 | coeffs = poly_interp(field, xs, fs) 83 | meas.append(coeffs[0]) 84 | return meas 85 | 86 | 87 | class TestShamir(TestCase): 88 | def test_shard_unshard(self) -> None: 89 | '''Test the basic functionality.''' 90 | meas = [Field64(13), Field64(37)] 91 | meas_shares = shamir_shard(Field64, meas, 2, 3) 92 | 93 | got = shamir_unshard(Field64, [0, 1], [meas_shares[0], meas_shares[1]]) 94 | self.assertEqual(got, meas) 95 | got = shamir_unshard(Field64, [0, 2], [meas_shares[0], meas_shares[2]]) 96 | self.assertEqual(got, meas) 97 | got = shamir_unshard(Field64, [1, 2], [meas_shares[1], meas_shares[2]]) 98 | self.assertEqual(got, meas) 99 | 100 | def test_aggregate(self) -> None: 101 | '''Show that Shamir secret sharing works with aggregation as well.''' 102 | unshard_threshold = 2 103 | num_shares = 3 104 | max_measurement = 100 105 | v = Sum(Field64, max_measurement) 106 | 107 | agg_shares = [v.field.zeros(v.OUTPUT_LEN)] * num_shares 108 | for x in range(max_measurement): 109 | meas = v.encode(x) 110 | meas_shares = shamir_shard(v.field, 111 | meas, 112 | unshard_threshold, 113 | num_shares) 114 | for j in range(num_shares): 115 | agg_shares[j] = vec_add( 116 | agg_shares[j], v.truncate(meas_shares[j])) 117 | 118 | agg = shamir_unshard(v.field, [0, 1], [agg_shares[0], agg_shares[1]]) 119 | agg_result = v.decode(agg, max_measurement) 120 | self.assertEqual(agg_result, max_measurement * (max_measurement-1) / 2) 121 | 122 | def test_flp_count(self) -> None: 123 | flp = FlpBBCGGI19(Count(Field64)) 124 | prove_rand = flp.field.rand_vec(flp.PROVE_RAND_LEN) 125 | query_rand = flp.field.rand_vec(flp.QUERY_RAND_LEN) 126 | 127 | meas = flp.encode(False) 128 | meas_shares = shamir_shard(flp.field, meas, 2, 3) 129 | 130 | proof = flp.prove(meas, prove_rand, []) 131 | proof_shares = shamir_shard(flp.field, proof, 2, 3) 132 | 133 | verifier = shamir_unshard( 134 | flp.field, 135 | [0, 2], 136 | [ 137 | flp.query(meas_shares[0], proof_shares[0], query_rand, [], 1), 138 | flp.query(meas_shares[2], proof_shares[2], query_rand, [], 1), 139 | ], 140 | ) 141 | self.assertTrue(flp.decide(verifier)) 142 | 143 | def test_flp_sum(self) -> None: 144 | flp = FlpBBCGGI19(Sum(Field64, 23)) 145 | prove_rand = flp.field.rand_vec(flp.PROVE_RAND_LEN) 146 | query_rand = flp.field.rand_vec(flp.QUERY_RAND_LEN) 147 | 148 | meas = flp.encode(22) 149 | meas_shares = shamir_shard(flp.field, meas, 2, 3) 150 | 151 | proof = flp.prove(meas, prove_rand, []) 152 | proof_shares = shamir_shard(flp.field, proof, 2, 3) 153 | 154 | verifier = shamir_unshard( 155 | flp.field, 156 | [0, 2], 157 | [ 158 | flp.query(meas_shares[0], proof_shares[0], query_rand, [], 1), 159 | flp.query(meas_shares[2], proof_shares[2], query_rand, [], 1), 160 | ], 161 | ) 162 | self.assertTrue(flp.decide(verifier)) 163 | 164 | def test_flp_sum_vec(self) -> None: 165 | flp = FlpBBCGGI19(SumVec(Field128, 100, 2, 10)) 166 | prove_rand = flp.field.rand_vec(flp.PROVE_RAND_LEN) 167 | query_rand = flp.field.rand_vec(flp.QUERY_RAND_LEN) 168 | joint_rand = flp.field.rand_vec(flp.JOINT_RAND_LEN) 169 | 170 | meas = flp.encode([1] * 100) 171 | meas_shares = shamir_shard(flp.field, meas, 2, 3) 172 | 173 | proof = flp.prove(meas, prove_rand, joint_rand) 174 | proof_shares = shamir_shard(flp.field, proof, 2, 3) 175 | 176 | verifier = shamir_unshard( 177 | flp.field, 178 | [0, 2], 179 | [ 180 | flp.query(meas_shares[0], proof_shares[0], 181 | query_rand, joint_rand, 1), 182 | flp.query(meas_shares[2], proof_shares[2], 183 | query_rand, joint_rand, 1), 184 | ], 185 | ) 186 | self.assertTrue(flp.decide(verifier)) 187 | -------------------------------------------------------------------------------- /poc/plot_prio3_multiproof_robustness.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Plot robustness bounds for various parameters. 3 | # 4 | # python plot_prio3_multiproof_robustness.py 5 | import math 6 | from typing import TypeVar 7 | 8 | import matplotlib.pyplot as plt 9 | 10 | from vdaf_poc.field import Field64, Field128, NttField 11 | from vdaf_poc.flp_bbcggi19 import FlpBBCGGI19, SumVec 12 | 13 | Measurement = TypeVar("Measurement") 14 | AggResult = TypeVar("AggResult") 15 | F = TypeVar("F", bound=NttField) 16 | 17 | 18 | def base_soundness(flp: FlpBBCGGI19[Measurement, AggResult, F]) -> float: 19 | ''' 20 | ia.cr/2019/188, Theorem 4.3 21 | ''' 22 | return sum((g_calls * g.DEGREE) / (flp.field.MODULUS - g_calls) 23 | for (g, g_calls) in zip(flp.valid.GADGETS, flp.valid.GADGET_CALLS)) 24 | 25 | 26 | def robustness( 27 | soundness: float, 28 | ro_queries: int, 29 | prep_queries: int, 30 | num_proofs: int) -> float: 31 | ''' 32 | ia.cr/2023/130, Theorem 1, assuming the bound can be modified by raising 33 | `epsilon` to the power of the number of FLPs. We're also assuming the first 34 | term dominates, i.e., we're ignoring the seed size. 35 | 36 | soundness - soundness of the FLP 37 | 38 | ro_queries - random oracle queries, a proxy for the amount of 39 | precomputation done by the adversary 40 | 41 | prep_queries - number of online attempts, a proxy for the batch size 42 | 43 | num_proofs - number of FLPs 44 | ''' 45 | return (ro_queries + prep_queries) * soundness**num_proofs 46 | 47 | 48 | def sum_vec(field: type[NttField], num_proofs: int, length: int) -> float: 49 | ''' 50 | Maximum probability of at least 1 in 1 billion attacks on Prio3SumVec 51 | robustness succeeding after doing 2^80 random oracle queries. 52 | ''' 53 | bits = 1 54 | chunk_length = max(1, length**(1/2)) 55 | flp = FlpBBCGGI19(SumVec(field, length, bits, chunk_length)) 56 | 57 | # Assuming we adopt the improvement from 58 | # https://github.com/cfrg/draft-irtf-cfrg-vdaf/issues/427 59 | soundness = chunk_length / field.MODULUS + base_soundness(flp) 60 | 61 | return robustness( 62 | soundness, 63 | 2**80, # ro queries 64 | 1_000_000_000, # prep queries 65 | num_proofs, 66 | ) 67 | 68 | 69 | if __name__ == '__main__': 70 | print(-math.log2(sum_vec(Field128, 1, 100_000))) 71 | print(-math.log2(sum_vec(Field64, 3, 100_000))) 72 | print(-math.log2(sum_vec(Field64, 2, 100_000))) 73 | print(-math.log2(sum_vec(Field64, 1, 100_000))) 74 | 75 | lengths = range(100, 10**6, 100) 76 | plt.plot( 77 | lengths, 78 | [sum_vec(Field128, 1, length) for length in lengths], 79 | label='Field128/1', 80 | ) 81 | plt.plot( 82 | lengths, 83 | [sum_vec(Field64, 3, length) for length in lengths], 84 | label='Field64/3', 85 | ) 86 | plt.plot( 87 | lengths, 88 | [sum_vec(Field64, 2, length) for length in lengths], 89 | label='Field64/2', 90 | ) 91 | # plt.plot( 92 | # lengths, 93 | # [sum_vec(Field64, 1, length) for length in lengths], 94 | # label='Field64/1', 95 | # ) 96 | 97 | plt.xscale('log', base=10) 98 | plt.yscale('log', base=2) 99 | plt.xlabel('Length') 100 | plt.ylabel('Prob') 101 | plt.title('Prio3SumvVec (field/number of proofs)') 102 | plt.legend() 103 | plt.grid() 104 | plt.show() 105 | -------------------------------------------------------------------------------- /poc/pyproject.toml: -------------------------------------------------------------------------------- 1 | requires = ["setuptools >= 61.0.0"] 2 | build-backend = "setuptools.build_meta" 3 | 4 | [project] 5 | name = "vdaf_poc" 6 | description = "Reference implementation of draft-irtf-cfrg-vdaf" 7 | readme = "README.md" 8 | version = "0.0.1" 9 | dependencies = [ 10 | "pycryptodomex >= 3.20.0", 11 | ] 12 | -------------------------------------------------------------------------------- /poc/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cfrg/draft-irtf-cfrg-vdaf/6054556f3dfe4d0dfe65f1c0e09fbf65ca675323/poc/tests/__init__.py -------------------------------------------------------------------------------- /poc/tests/test_daf.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from functools import reduce 3 | from typing import TypeVar 4 | 5 | from vdaf_poc.daf import Daf, run_daf 6 | from vdaf_poc.field import Field128 7 | from vdaf_poc.xof import XofTurboShake128 8 | 9 | 10 | class TestDaf( 11 | Daf[ 12 | int, # Measurement 13 | None, # AggParam 14 | None, # PublicShare 15 | Field128, # InputShare 16 | Field128, # OutShare 17 | Field128, # AggShare 18 | int, # AggResult 19 | ]): 20 | """A simple DAF used for testing.""" 21 | 22 | ID = 0xFFFFFFFF 23 | SHARES = 2 24 | NONCE_SIZE = 0 25 | RAND_SIZE = 32 26 | 27 | def shard( 28 | self, 29 | _ctx: bytes, 30 | measurement: int, 31 | nonce: bytes, 32 | rand: bytes) -> tuple[None, list[Field128]]: 33 | if len(nonce) != self.NONCE_SIZE: 34 | raise ValueError("incorrect nonce size") 35 | if len(rand) != self.RAND_SIZE: 36 | raise ValueError("incorrect size of random bytes argument") 37 | 38 | helper_shares = XofTurboShake128.expand_into_vec( 39 | Field128, 40 | rand, 41 | b'', 42 | b'', 43 | self.SHARES - 1, 44 | ) 45 | leader_share = Field128(measurement) 46 | for helper_share in helper_shares: 47 | leader_share -= helper_share 48 | input_shares = [leader_share] + helper_shares 49 | return (None, input_shares) 50 | 51 | def is_valid( 52 | self, 53 | _agg_param: None, 54 | _previous_agg_params: list[None]) -> bool: 55 | return True 56 | 57 | def prep( 58 | self, 59 | _ctx: bytes, 60 | _agg_id: int, 61 | _agg_param: None, 62 | nonce: bytes, 63 | _public_share: None, 64 | input_share: Field128) -> Field128: 65 | if len(nonce) != self.NONCE_SIZE: 66 | raise ValueError("incorrect nonce size") 67 | 68 | # For this simple test DAF, the output share is the same as the input 69 | # share. 70 | return input_share 71 | 72 | def agg_init(self, _agg_param: None) -> Field128: 73 | return Field128(0) 74 | 75 | def agg_update(self, 76 | _agg_param: None, 77 | agg_share: Field128, 78 | out_share: Field128) -> Field128: 79 | return agg_share + out_share 80 | 81 | def merge(self, 82 | _agg_param: None, 83 | agg_shares: list[Field128]) -> Field128: 84 | return reduce(lambda x, y: x + y, agg_shares) 85 | 86 | def unshard( 87 | self, 88 | _agg_param: None, 89 | agg_shares: list[Field128], 90 | _num_measurements: int) -> int: 91 | return reduce(lambda x, y: x + y, agg_shares).int() 92 | 93 | 94 | Measurement = TypeVar("Measurement") 95 | AggParam = TypeVar("AggParam") 96 | PublicShare = TypeVar("PublicShare") 97 | InputShare = TypeVar("InputShare") 98 | OutShare = TypeVar("OutShare") 99 | AggShare = TypeVar("AggShare") 100 | AggResult = TypeVar("AggResult") 101 | 102 | 103 | class TestDafCase(unittest.TestCase): 104 | def run_daf_test( 105 | self, 106 | daf: Daf[ 107 | Measurement, 108 | AggParam, 109 | PublicShare, 110 | InputShare, 111 | OutShare, 112 | AggShare, 113 | AggResult 114 | ], 115 | agg_param: AggParam, 116 | measurements: list[Measurement], 117 | expected_agg_result: AggResult) -> None: 118 | # Test that the algorithm identifier is in the correct range. 119 | self.assertTrue(0 <= daf.ID and daf.ID < 2 ** 32) 120 | 121 | # Run the DAF on the set of measurements. 122 | agg_result = run_daf(daf, 123 | b'some application', 124 | agg_param, 125 | measurements) 126 | self.assertEqual(agg_result, expected_agg_result) 127 | 128 | def test_test_daf(self) -> None: 129 | self.run_daf_test(TestDaf(), None, [1, 2, 3, 4], 10) 130 | -------------------------------------------------------------------------------- /poc/tests/test_field.py: -------------------------------------------------------------------------------- 1 | import random 2 | import unittest 3 | 4 | from vdaf_poc.field import (Field, Field64, Field96, Field128, Field255, 5 | NttField, poly_eval, poly_interp) 6 | 7 | 8 | class TestFields(unittest.TestCase): 9 | def run_field_test(self, cls: type[Field]) -> None: 10 | # Test constructing a field element from an integer. 11 | self.assertTrue(cls(1337).val == 1337) 12 | 13 | # Test generating a zero-vector. 14 | vec = cls.zeros(23) 15 | self.assertTrue(len(vec) == 23) 16 | for x in vec: 17 | self.assertTrue(x.val == 0) 18 | 19 | # Test generating a random vector. 20 | vec = cls.rand_vec(23) 21 | self.assertTrue(len(vec) == 23) 22 | 23 | # Test arithmetic. 24 | x = cls(random.randrange(0, cls.MODULUS)) 25 | y = cls(random.randrange(0, cls.MODULUS)) 26 | self.assertEqual(x + y, cls((x.val + y.val) % cls.MODULUS)) 27 | self.assertEqual(x - y, cls((x.val - y.val) % cls.MODULUS)) 28 | self.assertEqual(-x, cls((-x.val) % cls.MODULUS)) 29 | self.assertEqual(x * y, cls((x.val * y.val) % cls.MODULUS)) 30 | self.assertEqual(x.inv() * x, cls(1)) 31 | 32 | # Test serialization. 33 | want = cls.rand_vec(10) 34 | got = cls.decode_vec(cls.encode_vec(want)) 35 | self.assertTrue(got == want) 36 | 37 | # Test encoding integer as bit vector. 38 | vals = [i for i in range(15)] 39 | bits = 4 40 | for val in vals: 41 | encoded = cls.encode_into_bit_vec(val, bits) 42 | self.assertTrue(cls.decode_from_bit_vec( 43 | encoded).int() == val) 44 | 45 | def run_ntt_field_test(self, cls: type[NttField]) -> None: 46 | self.run_field_test(cls) 47 | 48 | # Test generator. 49 | self.assertTrue(cls.gen()**cls.GEN_ORDER == cls(1)) 50 | 51 | def test_field64(self) -> None: 52 | self.run_ntt_field_test(Field64) 53 | 54 | def test_field96(self) -> None: 55 | self.run_ntt_field_test(Field96) 56 | 57 | def test_field128(self) -> None: 58 | self.run_ntt_field_test(Field128) 59 | 60 | def test_field255(self) -> None: 61 | self.run_field_test(Field255) 62 | 63 | def test_interp(self) -> None: 64 | # Test polynomial interpolation. 65 | cls = Field64 66 | p = cls.rand_vec(10) 67 | xs = [cls(x) for x in range(10)] 68 | ys = [poly_eval(cls, p, x) for x in xs] 69 | q = poly_interp(cls, xs, ys) 70 | for x in xs: 71 | a = poly_eval(cls, p, x) 72 | b = poly_eval(cls, q, x) 73 | self.assertEqual(a, b) 74 | -------------------------------------------------------------------------------- /poc/tests/test_flp.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from copy import deepcopy 3 | from typing import TypeVar 4 | 5 | from vdaf_poc.field import Field, Field128 6 | from vdaf_poc.flp import Flp, run_flp 7 | 8 | F = TypeVar("F", bound=Field) 9 | 10 | 11 | class FlpTest(Flp[int, int, F]): 12 | """An insecure FLP used only for testing.""" 13 | PROVE_RAND_LEN = 2 14 | QUERY_RAND_LEN = 3 15 | MEAS_LEN = 2 16 | OUTPUT_LEN = 1 17 | PROOF_LEN = 2 18 | VERIFIER_LEN = 2 19 | 20 | meas_range = range(5) 21 | 22 | def __init__(self, field: type[F], joint_rand_len: int): 23 | self.field = field 24 | self.JOINT_RAND_LEN = joint_rand_len 25 | 26 | def encode(self, measurement: int) -> list[F]: 27 | return [self.field(measurement)] * 2 28 | 29 | def prove(self, meas: list[F], _prove_rand: list[F], _joint_rand: list[F]) -> list[F]: 30 | # The proof is the measurement itself for this trivially insecure FLP. 31 | return deepcopy(meas) 32 | 33 | def query( 34 | self, 35 | meas: list[F], 36 | proof: list[F], 37 | query_rand: list[F], 38 | joint_rand: list[F], 39 | _num_shares: int) -> list[F]: 40 | return deepcopy(proof) 41 | 42 | def decide(self, verifier: list[F]) -> bool: 43 | """Decide if a verifier message was generated from a valid 44 | measurement.""" 45 | if len(verifier) != 2 or \ 46 | verifier[0] != verifier[1] or \ 47 | verifier[0].int() not in self.meas_range: 48 | return False 49 | return True 50 | 51 | def truncate(self, meas: list[F]) -> list[F]: 52 | return [meas[0]] 53 | 54 | def decode(self, output: list[F], _num_measurements: int) -> int: 55 | return output[0].int() 56 | 57 | 58 | class TestFlp(unittest.TestCase): 59 | def test_flp(self) -> None: 60 | flp = FlpTest(Field128, 1) 61 | self.assertTrue(run_flp(flp, flp.encode(0), 3)) 62 | self.assertTrue(run_flp(flp, flp.encode(4), 3)) 63 | self.assertFalse(run_flp(flp, [Field128(1337)], 3)) 64 | -------------------------------------------------------------------------------- /poc/tests/test_flp_bbcggi19.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar 2 | 3 | from vdaf_poc.field import Field64, Field96, Field128, NttField 4 | from vdaf_poc.flp_bbcggi19 import (Count, FlpBBCGGI19, Histogram, Mul, 5 | MultihotCountVec, PolyEval, Sum, SumVec, 6 | Valid) 7 | from vdaf_poc.test_utils import TestFlpBBCGGI19 8 | 9 | Measurement = TypeVar("Measurement") 10 | AggResult = TypeVar("AggResult") 11 | F = TypeVar("F", bound=NttField) 12 | 13 | 14 | class MultiGadget(Valid[int, int, Field64]): 15 | # Associated parameters 16 | field = Field64 17 | GADGETS = [Mul(), Mul()] 18 | GADGET_CALLS = [1, 2] 19 | MEAS_LEN = 1 20 | JOINT_RAND_LEN = 0 21 | OUTPUT_LEN = 1 22 | EVAL_OUTPUT_LEN = 1 23 | 24 | def eval( 25 | self, 26 | meas: list[Field64], 27 | joint_rand: list[Field64], 28 | _num_shares: int) -> list[Field64]: 29 | self.check_valid_eval(meas, joint_rand) 30 | # Not a very useful circuit, obviously. We just want to do something. 31 | x = self.GADGETS[0].eval(self.field, [meas[0], meas[0]]) 32 | y = self.GADGETS[1].eval(self.field, [meas[0], x]) 33 | z = self.GADGETS[1].eval(self.field, [x, y]) 34 | return [z] 35 | 36 | def encode(self, measurement: int) -> list[Field64]: 37 | return [self.field(measurement)] 38 | 39 | def truncate(self, meas: list[Field64]) -> list[Field64]: 40 | return meas 41 | 42 | def decode(self, output: list[Field64], _num_measurements: int) -> int: 43 | return output[0].int() 44 | 45 | 46 | class TestAverage(Sum): 47 | """ 48 | Flp subclass that calculates the average of integers. The result is rounded 49 | down. 50 | """ 51 | 52 | def decode(self, output: list[Field64], num_measurements: int) -> int: 53 | total = super().decode(output, num_measurements) 54 | return total // num_measurements 55 | 56 | 57 | class TestCount(TestFlpBBCGGI19): 58 | def test(self) -> None: 59 | flp = FlpBBCGGI19(Count(Field64)) 60 | self.run_flp_test(flp, [ 61 | (flp.encode(0), True), 62 | (flp.encode(1), True), 63 | ([flp.field(1337)], False), 64 | ]) 65 | 66 | 67 | class TestSum(TestFlpBBCGGI19): 68 | def test(self) -> None: 69 | flp = FlpBBCGGI19(Sum(Field64, 10_000)) 70 | self.run_flp_test(flp, [ 71 | (flp.encode(0), True), 72 | (flp.encode(1337), True), 73 | (flp.encode(9999), True), 74 | (flp.encode(10000), True), 75 | (flp.field.zeros(flp.MEAS_LEN), False), 76 | ]) 77 | 78 | 79 | class TestHistogram(TestFlpBBCGGI19): 80 | def test(self) -> None: 81 | flp = FlpBBCGGI19(Histogram(Field128, 4, 2)) 82 | self.run_flp_test(flp, [ 83 | (flp.encode(0), True), 84 | (flp.encode(1), True), 85 | (flp.encode(2), True), 86 | (flp.encode(3), True), 87 | ([flp.field(0)] * 4, False), 88 | ([flp.field(1)] * 4, False), 89 | (flp.field.rand_vec(4), False), 90 | ]) 91 | 92 | 93 | class TestMultihotCountVec(TestFlpBBCGGI19): 94 | def test(self) -> None: 95 | valid = MultihotCountVec(Field128, 4, 2, 2) 96 | flp = FlpBBCGGI19(valid) 97 | 98 | # Successful cases: 99 | cases = [ 100 | (flp.encode([False, False, False, False]), True), 101 | (flp.encode([False, True, False, False]), True), 102 | (flp.encode([False, True, True, False]), True), 103 | (flp.encode([True, True, False, False]), True), 104 | ] 105 | # Failure cases: too many number of 1s, should fail weight check. 106 | cases += [ 107 | ( 108 | [flp.field(1)] * i 109 | + [flp.field(0)] * (valid.length - i) 110 | # Try to lie about the offset weight. 111 | + [flp.field(0)] * valid.bits_for_weight, 112 | False 113 | ) 114 | for i in range(valid.max_weight + 1, valid.length + 1) 115 | ] 116 | # Failure case: pass count check but fail bit check. 117 | cases += [ 118 | ( 119 | [ 120 | flp.field(flp.field.MODULUS - 1), 121 | flp.field(1), 122 | flp.field(0), 123 | flp.field(0), 124 | ] 125 | + [flp.field(0)] * valid.bits_for_weight, 126 | False 127 | ) 128 | ] 129 | self.run_flp_test(flp, cases) 130 | 131 | def test_small(self) -> None: 132 | flp = FlpBBCGGI19(MultihotCountVec(Field128, 1, 1, 1)) 133 | 134 | self.run_flp_test(flp, [ 135 | (flp.encode([False]), True), 136 | (flp.encode([True]), True), 137 | ([flp.field(0), flp.field(1337)], False), 138 | ([flp.field(1), flp.field(0)], False), 139 | ]) 140 | 141 | 142 | class TestSumVec(TestFlpBBCGGI19): 143 | def run_encode_truncate_decode_with_ntt_fields_test( 144 | self, 145 | measurements: list[list[int]], 146 | length: int, 147 | bits: int, 148 | chunk_length: int) -> None: 149 | for field in [Field64, Field96, Field128]: 150 | sumvec = SumVec[NttField](field, length, bits, chunk_length) 151 | self.assertEqual(sumvec.field, field) 152 | self.assertTrue(isinstance(sumvec, SumVec)) 153 | self.run_encode_truncate_decode_test( 154 | FlpBBCGGI19(sumvec), measurements) 155 | 156 | def test(self) -> None: 157 | # SumVec with length 2, bits 4, chunk len 1. 158 | self.run_encode_truncate_decode_with_ntt_fields_test( 159 | [[1, 2], [3, 4], [5, 6], [7, 8]], 160 | 2, 161 | 4, 162 | 1, 163 | ) 164 | 165 | 166 | class TestMultiGadget(TestFlpBBCGGI19): 167 | def test(self) -> None: 168 | flp = FlpBBCGGI19(MultiGadget()) 169 | self.run_flp_test(flp, [ 170 | (flp.encode(0), True), 171 | ]) 172 | 173 | 174 | class TestGadgets(TestFlpBBCGGI19): 175 | def test_poly_eval_range2(self) -> None: 176 | self.run_gadget_test(PolyEval([0, -1, 1]), Field128, 10) 177 | 178 | def test_poly_eval(self) -> None: 179 | self.run_gadget_test( 180 | PolyEval([0, -23, 1, 3]), 181 | Field128, 182 | 10, 183 | ) 184 | -------------------------------------------------------------------------------- /poc/tests/test_idpf_bbcggi21.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import unittest 3 | from functools import reduce 4 | from typing import Sequence, cast 5 | 6 | from vdaf_poc.common import gen_rand, vec_add 7 | from vdaf_poc.field import Field 8 | from vdaf_poc.idpf import Idpf 9 | from vdaf_poc.idpf_bbcggi21 import CorrectionWord, IdpfBBCGGI21 10 | 11 | 12 | class TestIdpfBBCGGI21(unittest.TestCase): 13 | def run_idpf_test(self, idpf: Idpf, alpha: tuple[bool, ...], level: int, prefixes: Sequence[tuple[bool, ...]]) -> None: 14 | """ 15 | Generate a set of IDPF keys and evaluate them on the given set of prefix. 16 | """ 17 | ctx = b'some context' 18 | beta_inner = [[idpf.field_inner(1)] * idpf.VALUE_LEN] * (idpf.BITS - 1) 19 | beta_leaf = [idpf.field_leaf(1)] * idpf.VALUE_LEN 20 | 21 | # Generate the IDPF keys. 22 | rand = gen_rand(idpf.RAND_SIZE) 23 | nonce = gen_rand(idpf.NONCE_SIZE) 24 | (public_share, keys) = idpf.gen( 25 | alpha, beta_inner, beta_leaf, ctx, nonce, rand) 26 | 27 | out = [idpf.current_field(level).zeros(idpf.VALUE_LEN)] * len(prefixes) 28 | for agg_id in range(idpf.SHARES): 29 | out_share = idpf.eval( 30 | agg_id, public_share, keys[agg_id], level, prefixes, ctx, nonce) 31 | for i in range(len(prefixes)): 32 | out[i] = vec_add(out[i], out_share[i]) 33 | 34 | for (got, prefix) in zip(out, prefixes): 35 | if idpf.is_prefix(prefix, alpha, level): 36 | if level < idpf.BITS - 1: 37 | want = beta_inner[level] 38 | else: 39 | want = beta_leaf 40 | else: 41 | want = idpf.current_field(level).zeros(idpf.VALUE_LEN) 42 | 43 | self.assertEqual(got, want) 44 | 45 | def run_idpf_exhaustive_test(self, idpf: Idpf, alpha: tuple[bool, ...]) -> None: 46 | """Generate a set of IDPF keys and test every possible output.""" 47 | 48 | # Generate random outputs with which to program the IDPF. 49 | beta_inner = [] 50 | for _ in range(idpf.BITS - 1): 51 | beta_inner.append(idpf.field_inner.rand_vec(idpf.VALUE_LEN)) 52 | beta_leaf = idpf.field_leaf.rand_vec(idpf.VALUE_LEN) 53 | 54 | # Generate the IDPF keys. 55 | rand = gen_rand(idpf.RAND_SIZE) 56 | ctx = b'some context' 57 | nonce = gen_rand(idpf.NONCE_SIZE) 58 | (public_share, keys) = idpf.gen( 59 | alpha, beta_inner, beta_leaf, ctx, nonce, rand) 60 | 61 | # Evaluate the IDPF at every node of the tree. 62 | for level in range(idpf.BITS): 63 | prefixes = tuple(itertools.product( 64 | *[(False, True)] * (level + 1) 65 | )) 66 | 67 | out_shares = [] 68 | for agg_id in range(idpf.SHARES): 69 | out_shares.append( 70 | idpf.eval(agg_id, public_share, 71 | keys[agg_id], level, prefixes, ctx, nonce)) 72 | 73 | # Check that each set of output shares for each prefix sums up to the 74 | # correct value. 75 | for i, prefix in enumerate(prefixes): 76 | got = reduce(lambda x, y: vec_add(x, y), 77 | map(lambda x: x[i], out_shares)) 78 | 79 | if idpf.is_prefix(prefix, alpha, level): 80 | if level < idpf.BITS - 1: 81 | want = beta_inner[level] 82 | else: 83 | want = beta_leaf 84 | else: 85 | want = idpf.current_field(level).zeros(idpf.VALUE_LEN) 86 | 87 | self.assertEqual(got, want) 88 | 89 | def test(self) -> None: 90 | self.run_idpf_test( 91 | IdpfBBCGGI21(2, 16), 92 | int_to_bit_string(0b1111000011110000, 16), 93 | 15, 94 | (int_to_bit_string(0b1111000011110000, 16),), 95 | ) 96 | self.run_idpf_test( 97 | IdpfBBCGGI21(2, 16), 98 | int_to_bit_string(0b1111000011110000, 16), 99 | 14, 100 | (int_to_bit_string(0b111100001111000, 15),), 101 | ) 102 | self.run_idpf_test( 103 | IdpfBBCGGI21(2, 16), 104 | int_to_bit_string(0b1111000011110000, 16), 105 | 13, 106 | (int_to_bit_string(0b11110000111100, 14),), 107 | ) 108 | self.run_idpf_test( 109 | IdpfBBCGGI21(2, 16), 110 | int_to_bit_string(0b1111000011110000, 16), 111 | 12, 112 | (int_to_bit_string(0b1111000011110, 13),), 113 | ) 114 | self.run_idpf_test( 115 | IdpfBBCGGI21(2, 16), 116 | int_to_bit_string(0b1111000011110000, 16), 117 | 11, 118 | (int_to_bit_string(0b111100001111, 12),), 119 | ) 120 | self.run_idpf_test( 121 | IdpfBBCGGI21(2, 16), 122 | int_to_bit_string(0b1111000011110000, 16), 123 | 10, 124 | (int_to_bit_string(0b11110000111, 11),), 125 | ) 126 | self.run_idpf_test( 127 | IdpfBBCGGI21(2, 16), 128 | int_to_bit_string(0b1111000011110000, 16), 129 | 5, 130 | (int_to_bit_string(0b111100, 6),), 131 | ) 132 | self.run_idpf_test( 133 | IdpfBBCGGI21(2, 16), 134 | int_to_bit_string(0b1111000011110000, 16), 135 | 4, 136 | (int_to_bit_string(0b11110, 5),), 137 | ) 138 | self.run_idpf_test( 139 | IdpfBBCGGI21(2, 16), 140 | int_to_bit_string(0b1111000011110000, 16), 141 | 3, 142 | (int_to_bit_string(0b1111, 4),), 143 | ) 144 | self.run_idpf_test( 145 | IdpfBBCGGI21(2, 16), 146 | int_to_bit_string(0b1111000011110000, 16), 147 | 2, 148 | (int_to_bit_string(0b111, 3),), 149 | ) 150 | self.run_idpf_test( 151 | IdpfBBCGGI21(2, 16), 152 | int_to_bit_string(0b1111000011110000, 16), 153 | 1, 154 | (int_to_bit_string(0b11, 2),), 155 | ) 156 | self.run_idpf_test( 157 | IdpfBBCGGI21(2, 16), 158 | int_to_bit_string(0b1111000011110000, 16), 159 | 0, 160 | (int_to_bit_string(0b1, 1),), 161 | ) 162 | self.run_idpf_test( 163 | IdpfBBCGGI21(2, 1000), 164 | tuple([False] * 1000), 165 | 999, 166 | (tuple([False] * 1000),), 167 | ) 168 | self.run_idpf_exhaustive_test( 169 | IdpfBBCGGI21(2, 1), 170 | (False,), 171 | ) 172 | self.run_idpf_exhaustive_test( 173 | IdpfBBCGGI21(2, 1), 174 | (True,), 175 | ) 176 | self.run_idpf_exhaustive_test( 177 | IdpfBBCGGI21(2, 8), 178 | int_to_bit_string(91, 8), 179 | ) 180 | 181 | def test_index_encoding(self) -> None: 182 | """ 183 | Ensure that the IDPF index is encoded in big-endian byte order. 184 | """ 185 | idpf = IdpfBBCGGI21(1, 32) 186 | ctx = b'some context' 187 | nonce = gen_rand(idpf.NONCE_SIZE) 188 | 189 | def shard(s: bytes) -> tuple[list[CorrectionWord], list[bytes]]: 190 | alpha = bytes_to_bit_string(s) 191 | beta_inner = [[idpf.field_inner(1)]] * (idpf.BITS - 1) 192 | beta_leaf = [idpf.field_leaf(1)] 193 | rand = gen_rand(idpf.RAND_SIZE) 194 | return idpf.gen(alpha, beta_inner, beta_leaf, ctx, nonce, rand) 195 | 196 | for (alpha_str, prefix, level) in [ 197 | ( 198 | b"\x01\x02\x03\x04", 199 | int_to_bit_string(0x010203, 24), 200 | 23, 201 | ), 202 | ( 203 | b"abcd", 204 | int_to_bit_string(0x61626364, 32), 205 | 31, 206 | ) 207 | ]: 208 | (public_share, keys) = shard(alpha_str) 209 | out_share_0 = cast(list[list[Field]], idpf.eval( 210 | 0, public_share, keys[0], level, (prefix,), ctx, nonce)) 211 | out_share_1 = cast(list[list[Field]], idpf.eval( 212 | 1, public_share, keys[1], level, (prefix,), ctx, nonce)) 213 | out = vec_add(out_share_0[0], out_share_1[0])[0] 214 | self.assertEqual(out.int(), 1) 215 | 216 | def test_is_prefix(self) -> None: 217 | idpf = IdpfBBCGGI21(1, 8) 218 | self.assertTrue(idpf.is_prefix( 219 | int_to_bit_string(0b1, 1), 220 | int_to_bit_string(0b11000001, 8), 221 | 0, 222 | )) 223 | self.assertTrue(idpf.is_prefix( 224 | int_to_bit_string(0b11, 2), 225 | int_to_bit_string(0b11000001, 8), 226 | 1, 227 | )) 228 | self.assertTrue(idpf.is_prefix( 229 | int_to_bit_string(0b110, 3), 230 | int_to_bit_string(0b11000001, 8), 231 | 2, 232 | )) 233 | self.assertTrue(idpf.is_prefix( 234 | int_to_bit_string(0b1100, 4), 235 | int_to_bit_string(0b11000001, 8), 236 | 3, 237 | )) 238 | self.assertFalse(idpf.is_prefix( 239 | int_to_bit_string(0b111, 3), 240 | int_to_bit_string(0b11000001, 8), 241 | 2, 242 | )) 243 | self.assertFalse(idpf.is_prefix( 244 | int_to_bit_string(0b1101, 4), 245 | int_to_bit_string(0b11000001, 8), 246 | 3, 247 | )) 248 | 249 | def test_public_share_roundtrip(self) -> None: 250 | idpf = IdpfBBCGGI21(1, 32) 251 | alpha = bytes_to_bit_string(b"cool") 252 | beta_inner = [[idpf.field_inner(23)]] * (idpf.BITS - 1) 253 | beta_leaf = [idpf.field_leaf(97)] 254 | ctx = b'some context' 255 | nonce = gen_rand(idpf.NONCE_SIZE) 256 | rand = gen_rand(idpf.RAND_SIZE) 257 | (public_share, _keys) = idpf.gen( 258 | alpha, beta_inner, beta_leaf, ctx, nonce, rand) 259 | self.assertEqual( 260 | idpf.decode_public_share(idpf.encode_public_share(public_share)), 261 | public_share, 262 | ) 263 | 264 | 265 | def bytes_to_bit_string(s: bytes) -> tuple[bool, ...]: 266 | return tuple(itertools.chain.from_iterable( 267 | ( 268 | byte & 0x80 != 0, 269 | byte & 0x40 != 0, 270 | byte & 0x20 != 0, 271 | byte & 0x10 != 0, 272 | byte & 0x08 != 0, 273 | byte & 0x04 != 0, 274 | byte & 0x02 != 0, 275 | byte & 0x01 != 0, 276 | ) for byte in s 277 | )) 278 | 279 | 280 | def int_to_bit_string(value: int, length: int) -> tuple[bool, ...]: 281 | return tuple( 282 | (value >> (length - 1 - i)) & 1 != 0 for i in range(length) 283 | ) 284 | -------------------------------------------------------------------------------- /poc/tests/test_vdaf_ping_pong.py: -------------------------------------------------------------------------------- 1 | import math 2 | import unittest 3 | from typing import Union, cast 4 | 5 | from vdaf_poc.common import from_be_bytes, to_be_bytes 6 | from vdaf_poc.test_utils import TestVdaf 7 | from vdaf_poc.vdaf import Vdaf 8 | from vdaf_poc.vdaf_ping_pong import (Continued, Finished, FinishedWithOutbound, 9 | PingPong, State) 10 | 11 | 12 | class PingPongTester( 13 | PingPong[ 14 | int, # Measurement 15 | int, # AggParam, 16 | str, # PublicShare 17 | int, # InputShare 18 | int, # OutShare 19 | int, # AggShare 20 | int, # AggResult 21 | tuple[int, int], # PrepState 22 | str, # PrepShare 23 | str, # PrepMessage 24 | ]): 25 | """ 26 | Computes the aggregation function f(agg_param, m[1], ..., m[N]) = agg_param 27 | * (m[1] + ... + m[N]). This VDAF is not secure, but is sufficient to 28 | exercise the code paths relevant to the ping pong topology. 29 | """ 30 | 31 | ID: int = 0xFFFFFFFF 32 | VERIFY_KEY_SIZE: int = 0 33 | NONCE_SIZE: int = 0 34 | RAND_SIZE: int = 0 35 | SHARES: int = 2 36 | ROUNDS: int 37 | 38 | def __init__(self, num_rounds: int) -> None: 39 | self.ROUNDS = num_rounds 40 | 41 | # `Vdaf` 42 | 43 | def shard(self, 44 | _ctx: bytes, 45 | measurement: int, 46 | _nonce: bytes, 47 | _rand: bytes) -> tuple[str, list[int]]: 48 | return ('public share', [measurement, measurement]) 49 | 50 | def is_valid(self, 51 | agg_param: int, 52 | previous_agg_params: list[int]) -> bool: 53 | return len(previous_agg_params) == 0 54 | 55 | def prep_init(self, 56 | _verify_key: bytes, 57 | _ctx: bytes, 58 | _agg_id: int, 59 | _agg_param: int, 60 | _nonce: bytes, 61 | public_share: str, 62 | input_share: int) -> tuple[tuple[int, int], str]: 63 | if public_share != 'public share': 64 | raise ValueError('unexpected public share') 65 | current_round = 0 66 | return ( 67 | (current_round, input_share), 68 | 'prep round {}'.format(current_round), 69 | ) 70 | 71 | def prep_shares_to_prep(self, 72 | _ctx: bytes, 73 | _agg_param: int, 74 | prep_shares: list[str]) -> str: 75 | for prep_share in prep_shares[1:]: 76 | if prep_share != prep_shares[0]: 77 | raise ValueError('unexpected prep share') 78 | return prep_shares[0] 79 | 80 | def prep_next(self, 81 | _ctx: bytes, 82 | prep_state: tuple[int, int], 83 | prep_msg: str) -> Union[tuple[tuple[int, int], str], int]: 84 | (current_round, out_share) = prep_state 85 | if prep_msg != "prep round {}".format(current_round): 86 | raise ValueError(f"unexpected prep message {prep_msg}") 87 | if current_round+1 == self.ROUNDS: 88 | return out_share 89 | return ( 90 | (current_round+1, out_share), 91 | "prep round {}".format(current_round+1), 92 | ) 93 | 94 | def agg_init(self, _agg_param: int) -> int: 95 | return 0 96 | 97 | def agg_update(self, 98 | _agg_param: int, 99 | agg_share: int, 100 | agg_delta: int) -> int: 101 | return agg_share + agg_delta 102 | 103 | def merge(self, 104 | _agg_param: int, 105 | _agg_shares: list[int]) -> int: 106 | raise NotImplementedError("not needed by tests") 107 | 108 | def unshard(self, 109 | agg_param: int, 110 | agg_shares: list[int], 111 | _num_measurements: int) -> int: 112 | return agg_param * sum(agg_shares) // self.SHARES 113 | 114 | def test_vec_encode_input_share(self, input_share: int) -> bytes: 115 | return to_be_bytes(input_share, 8) 116 | 117 | def test_vec_encode_public_share(self, public_share: str) -> bytes: 118 | return public_share.encode('utf-8') 119 | 120 | def test_vec_encode_agg_share(self, agg_share: int) -> bytes: 121 | return to_be_bytes(agg_share, 8) 122 | 123 | def test_vec_encode_prep_share(self, prep_share: str) -> bytes: 124 | return self.encode_prep_share(prep_share) 125 | 126 | def test_vec_encode_prep_msg(self, prep_msg: str) -> bytes: 127 | return self.encode_prep_msg(prep_msg) 128 | 129 | # `PingPong` 130 | 131 | def decode_public_share(self, encoded: bytes) -> str: 132 | return encoded.decode('utf-8') 133 | 134 | def decode_input_share(self, _agg_id: int, encoded: bytes) -> int: 135 | return from_be_bytes(encoded) 136 | 137 | def encode_prep_share(self, prep_share: str) -> bytes: 138 | return prep_share.encode('utf-8') 139 | 140 | def decode_prep_share(self, 141 | _prep_state: tuple[int, int], 142 | encoded: bytes) -> str: 143 | return encoded.decode('utf-8') 144 | 145 | def encode_prep_msg(self, prep_msg: str) -> bytes: 146 | return prep_msg.encode('utf-8') 147 | 148 | def decode_prep_msg(self, 149 | _prep_state: tuple[int, int], 150 | encoded: bytes) -> str: 151 | return encoded.decode('utf-8') 152 | 153 | def decode_agg_param(self, encoded: bytes) -> int: 154 | return from_be_bytes(encoded) 155 | 156 | def encode_agg_param(self, agg_param: int) -> bytes: 157 | return to_be_bytes(agg_param, 8) 158 | 159 | 160 | class TestPingPongTester(TestVdaf): 161 | def test(self) -> None: 162 | """Ensure `PingPongTester` correctly implements the `Vdaf` API.""" 163 | self.run_vdaf_test( 164 | cast(Vdaf, PingPongTester(10)), 165 | 23, # agg_param, 166 | [1, 2, 3, 4], # measurements 167 | 10 * 23, # expected_agg_result 168 | ) 169 | 170 | 171 | class TestPingPong(unittest.TestCase): 172 | def test_one_round(self) -> None: 173 | """Test the ping pong flow with a 1-round VDAF.""" 174 | vdaf = PingPongTester(1) 175 | verify_key = b'' 176 | 177 | measurement = 1337 178 | ctx = b'some context' 179 | nonce = b'' 180 | rand = b'' 181 | (public_share, input_shares) = vdaf.shard( 182 | ctx, 183 | measurement, 184 | nonce, 185 | rand, 186 | ) 187 | 188 | agg_param = 23 189 | leader_init_state = vdaf.ping_pong_leader_init( 190 | verify_key, 191 | ctx, 192 | vdaf.encode_agg_param(agg_param), 193 | nonce, 194 | vdaf.test_vec_encode_public_share(public_share), 195 | vdaf.test_vec_encode_input_share(input_shares[0]), 196 | ) 197 | assert isinstance(leader_init_state, Continued) 198 | self.assertEqual(leader_init_state.prep_round, 0) 199 | 200 | helper_state = vdaf.ping_pong_helper_init( 201 | verify_key, 202 | ctx, 203 | vdaf.encode_agg_param(agg_param), 204 | nonce, 205 | vdaf.test_vec_encode_public_share(public_share), 206 | vdaf.test_vec_encode_input_share(input_shares[1]), 207 | leader_init_state.outbound, 208 | ) 209 | assert isinstance(helper_state, FinishedWithOutbound) 210 | 211 | leader_state = vdaf.ping_pong_leader_continued( 212 | ctx, 213 | vdaf.encode_agg_param(agg_param), 214 | leader_init_state, 215 | helper_state.outbound, 216 | ) 217 | self.assertTrue(isinstance(leader_state, Finished)) 218 | 219 | def test_multi_round(self) -> None: 220 | """Test the ping pong flow with multiple rounds.""" 221 | verify_key = b'' 222 | measurement = 1337 223 | ctx = b'some application' 224 | nonce = b'' 225 | rand = b'' 226 | agg_param = 23 227 | 228 | for num_rounds in range(1, 10): 229 | num_steps = math.ceil((num_rounds+1) / 2) 230 | 231 | vdaf = PingPongTester(num_rounds) 232 | 233 | (public_share, input_shares) = vdaf.shard( 234 | ctx, 235 | measurement, 236 | nonce, 237 | rand, 238 | ) 239 | 240 | leader_state: State = vdaf.ping_pong_leader_init( 241 | verify_key, 242 | ctx, 243 | vdaf.encode_agg_param(agg_param), 244 | nonce, 245 | vdaf.test_vec_encode_public_share(public_share), 246 | vdaf.test_vec_encode_input_share(input_shares[0]), 247 | ) 248 | assert isinstance(leader_state, Continued) 249 | self.assertEqual(leader_state.prep_round, 0) 250 | 251 | for step in range(num_steps): 252 | if step == 0: 253 | assert isinstance(leader_state, Continued) 254 | helper_state: State = vdaf.ping_pong_helper_init( 255 | verify_key, 256 | ctx, 257 | vdaf.encode_agg_param(agg_param), 258 | nonce, 259 | vdaf.test_vec_encode_public_share(public_share), 260 | vdaf.test_vec_encode_input_share(input_shares[1]), 261 | leader_state.outbound, 262 | ) 263 | else: 264 | assert isinstance(leader_state, Continued) or \ 265 | isinstance(leader_state, FinishedWithOutbound) 266 | helper_state = vdaf.ping_pong_helper_continued( 267 | vdaf.encode_agg_param(agg_param), 268 | ctx, 269 | cast(Continued, helper_state), 270 | leader_state.outbound, 271 | ) 272 | 273 | if isinstance(leader_state, Continued): 274 | assert isinstance(helper_state, Continued) or \ 275 | isinstance(helper_state, FinishedWithOutbound) 276 | leader_state = vdaf.ping_pong_leader_continued( 277 | vdaf.encode_agg_param(agg_param), 278 | ctx, 279 | leader_state, 280 | helper_state.outbound, 281 | ) 282 | 283 | if num_rounds & 1 == 1: 284 | self.assertTrue(isinstance(leader_state, Finished)) 285 | self.assertTrue(isinstance(helper_state, FinishedWithOutbound)) 286 | else: 287 | self.assertTrue(isinstance(leader_state, FinishedWithOutbound)) 288 | self.assertTrue(isinstance(helper_state, Finished)) 289 | -------------------------------------------------------------------------------- /poc/tests/test_vdaf_prio3.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar 2 | 3 | from tests.test_flp import FlpTest 4 | from tests.test_flp_bbcggi19 import TestAverage 5 | from vdaf_poc.field import Field64, Field128, NttField 6 | from vdaf_poc.flp_bbcggi19 import FlpBBCGGI19 7 | from vdaf_poc.test_utils import TestVdaf 8 | from vdaf_poc.vdaf_prio3 import (Prio3, Prio3Count, Prio3Histogram, 9 | Prio3MultihotCountVec, Prio3Sum, Prio3SumVec, 10 | Prio3SumVecWithMultiproof) 11 | from vdaf_poc.xof import XofTurboShake128 12 | 13 | F = TypeVar("F", bound=NttField) 14 | 15 | 16 | class Prio3Average(Prio3): 17 | """ 18 | A Prio3 instantiation to test use of num_measurements in the Valid 19 | class's decode() method. 20 | """ 21 | 22 | xof = XofTurboShake128 23 | # NOTE 0xFFFFFFFF is reserved for testing. If we decide to standardize this 24 | # Prio3 variant, then we'll need to pick a real codepoint for it. 25 | ID = 0xFFFFFFFF 26 | VERIFY_KEY_SIZE = xof.SEED_SIZE 27 | 28 | def __init__(self, shares: int, bits: int): 29 | flp = FlpBBCGGI19(TestAverage(Field128, bits)) 30 | super().__init__(shares, flp, 1) 31 | 32 | 33 | class Prio3FlpTest(Prio3): 34 | ID = 0xFFFFFFFF 35 | xof = XofTurboShake128 36 | VERIFY_KEY_SIZE = xof.SEED_SIZE 37 | 38 | def __init__(self, joint_rand_len: int): 39 | flp = FlpTest(Field128, joint_rand_len) 40 | super().__init__(2, flp, 1) 41 | 42 | 43 | class TestPrio3FlpTest(TestVdaf): 44 | def test(self) -> None: 45 | prio3 = Prio3FlpTest(1) 46 | self.run_vdaf_test(prio3, None, [1, 2, 3, 4, 4], 14) 47 | 48 | # If JOINT_RAND_LEN == 0, then Fiat-Shamir isn't needed and we can skip 49 | # generating the joint randomness. 50 | prio3 = Prio3FlpTest(0) 51 | self.run_vdaf_test(prio3, None, [1, 2, 3, 4, 4], 14) 52 | 53 | 54 | class TestPrio3Count(TestVdaf): 55 | def test(self) -> None: 56 | prio3 = Prio3Count(2) 57 | self.assertEqual(prio3.ID, 0x00000001) 58 | self.run_vdaf_test(prio3, None, [0, 1, 1, 0, 1], 3) 59 | self.run_vdaf_test(prio3, None, [1], 1) 60 | 61 | def test_3_shares(self) -> None: 62 | prio3 = Prio3Count(3) 63 | self.run_vdaf_test(prio3, None, [1], 1) 64 | 65 | 66 | class TestPrio3Sum(TestVdaf): 67 | def test(self) -> None: 68 | prio3 = Prio3Sum(2, 147) 69 | self.assertEqual(prio3.ID, 0x00000002) 70 | self.run_vdaf_test(prio3, None, [0, 147, 1, 0, 11, 0], 159) 71 | self.run_vdaf_test(prio3, None, [100], 100) 72 | 73 | def test_3_shares(self) -> None: 74 | prio3 = Prio3Sum(3, 100) 75 | self.run_vdaf_test(prio3, None, [100], 100) 76 | 77 | 78 | class TestPrio3SumVec(TestVdaf): 79 | def test(self) -> None: 80 | prio3 = Prio3SumVec(2, 10, 8, 9) 81 | self.assertEqual(prio3.ID, 0x00000003) 82 | self.run_vdaf_test( 83 | prio3, 84 | None, 85 | [[1, 61, 86, 61, 23, 0, 255, 3, 2, 1]], 86 | [1, 61, 86, 61, 23, 0, 255, 3, 2, 1] 87 | ) 88 | self.run_vdaf_test( 89 | prio3, 90 | None, 91 | [ 92 | list(range(10)), 93 | [1] * 10, 94 | [255] * 10 95 | ], 96 | list(range(256, 266)), 97 | ) 98 | 99 | def test_3_shares(self) -> None: 100 | prio3 = Prio3SumVec(3, 3, 16, 7) 101 | self.run_vdaf_test( 102 | prio3, 103 | None, 104 | [ 105 | [10000, 32000, 9], 106 | [19342, 19615, 3061], 107 | [15986, 24671, 23910] 108 | ], 109 | [45328, 76286, 26980], 110 | ) 111 | 112 | 113 | class TestHistogram(TestVdaf): 114 | def test(self) -> None: 115 | prio3 = Prio3Histogram(2, 4, 2) 116 | self.assertEqual(prio3.ID, 0x00000004) 117 | self.run_vdaf_test(prio3, None, [0], [1, 0, 0, 0]) 118 | self.run_vdaf_test(prio3, None, [1], [0, 1, 0, 0]) 119 | self.run_vdaf_test(prio3, None, [2], [0, 0, 1, 0]) 120 | self.run_vdaf_test(prio3, None, [3], [0, 0, 0, 1]) 121 | self.run_vdaf_test(prio3, None, [0, 0, 1, 1, 2, 2, 3, 3], [2, 2, 2, 2]) 122 | self.run_vdaf_test(prio3, None, [2], [0, 0, 1, 0]) 123 | prio3 = Prio3Histogram(3, 11, 3) 124 | self.run_vdaf_test( 125 | prio3, 126 | None, 127 | [2], 128 | [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], 129 | ) 130 | 131 | 132 | class TestPrio3MultihotCountVec(TestVdaf): 133 | def test(self) -> None: 134 | # Prio3MultihotCountVec with length = 4, max_weight = 2, 135 | # chunk_length = 2. 136 | prio3 = Prio3MultihotCountVec(2, 4, 2, 2) 137 | self.assertEqual(prio3.ID, 0x00000005) 138 | self.run_vdaf_test( 139 | prio3, 140 | None, 141 | [[False, False, False, False]], 142 | [0, 0, 0, 0], 143 | ) 144 | self.run_vdaf_test( 145 | prio3, 146 | None, 147 | [[False, True, False, False]], 148 | [0, 1, 0, 0], 149 | ) 150 | self.run_vdaf_test( 151 | prio3, 152 | None, 153 | [[False, True, True, False]], 154 | [0, 1, 1, 0], 155 | ) 156 | self.run_vdaf_test( 157 | prio3, 158 | None, 159 | [[False, True, True, False], [False, True, False, True]], 160 | [0, 2, 1, 1], 161 | ) 162 | 163 | def test_3_shares(self) -> None: 164 | # Prio3MultihotCountVec with length = 11, max_weight = 5, 165 | # chunk_length = 3. 166 | prio3 = Prio3MultihotCountVec(3, 11, 5, 3) 167 | self.run_vdaf_test( 168 | prio3, 169 | None, 170 | [[True] * 5 + [False] * 6], 171 | [1] * 5 + [0] * 6, 172 | ) 173 | 174 | 175 | class TestPrio3Average(TestVdaf): 176 | def test(self) -> None: 177 | prio3 = Prio3Average(2, 5) 178 | self.run_vdaf_test(prio3, None, [1, 5, 1, 1, 4, 1, 3, 2], 2) 179 | 180 | def test_is_valid(self) -> None: 181 | prio3 = Prio3Average(2, 3) 182 | # Test `is_valid` returns True on empty previous_agg_params, and False 183 | # otherwise. 184 | self.assertTrue(prio3.is_valid(None, list([]))) 185 | self.assertFalse(prio3.is_valid(None, list([None]))) 186 | 187 | 188 | class TestPrio3SumVecWithMultiproof(TestVdaf): 189 | def test(self) -> None: 190 | for num_proofs in range(2, 5): 191 | multiproof = Prio3SumVecWithMultiproof( 192 | 2, Field64, num_proofs, 10, 8, 9) 193 | 194 | self.assertEqual(multiproof.ID, 0xFFFFFFFF) 195 | self.assertEqual(multiproof.PROOFS, num_proofs) 196 | 197 | self.run_vdaf_test( 198 | multiproof, 199 | None, 200 | [[1, 61, 86, 61, 23, 0, 255, 3, 2, 1]], 201 | [1, 61, 86, 61, 23, 0, 255, 3, 2, 1] 202 | ) 203 | self.run_vdaf_test( 204 | multiproof, 205 | None, 206 | [ 207 | list(range(10)), 208 | [1] * 10, 209 | [255] * 10 210 | ], 211 | list(range(256, 266)), 212 | ) 213 | 214 | prio3 = Prio3SumVec(3, 3, 16, 7) 215 | self.run_vdaf_test( 216 | prio3, 217 | None, 218 | [ 219 | [10000, 32000, 9], 220 | [19342, 19615, 3061], 221 | [15986, 24671, 23910] 222 | ], 223 | [45328, 76286, 26980], 224 | ) 225 | -------------------------------------------------------------------------------- /poc/tests/test_xof.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from vdaf_poc.common import format_dst, gen_rand 4 | from vdaf_poc.field import Field, Field64, Field128 5 | from vdaf_poc.xof import Xof, XofFixedKeyAes128, XofTurboShake128 6 | 7 | 8 | def test_xof(cls: type[Xof], field: type[Field], expanded_len: int) -> None: 9 | dst = format_dst(7, 1337, 2) 10 | binder = b'a string that binds some protocol artifact to the output' 11 | seed = gen_rand(cls.SEED_SIZE) 12 | 13 | # Test next 14 | expanded_data = cls(seed, dst, binder).next(expanded_len) 15 | assert len(expanded_data) == expanded_len 16 | 17 | want = cls(seed, dst, binder).next(700) 18 | got = b'' 19 | xof = cls(seed, dst, binder) 20 | for i in range(0, 700, 7): 21 | got += xof.next(7) 22 | assert got == want 23 | 24 | # Test derive 25 | derived_seed = cls.derive_seed(seed, dst, binder) 26 | assert len(derived_seed) == cls.SEED_SIZE 27 | 28 | # Test expand_into_vec 29 | expanded_vec = cls.expand_into_vec(field, seed, dst, binder, expanded_len) 30 | assert len(expanded_vec) == expanded_len 31 | 32 | 33 | class TestXof(unittest.TestCase): 34 | def test_rejection_sampling(self) -> None: 35 | # This test case was found through brute-force search using this tool: 36 | # https://github.com/divergentdave/vdaf-rejection-sampling-search 37 | expanded_vec = XofTurboShake128.expand_into_vec( 38 | Field64, 39 | bytes([0x44, 0x34, 0x1d, 0xc5, 0x2d, 0x71, 0xa2, 0xff, 0x2e, 0x4c, 40 | 0x30, 0x5e, 0x93, 0x35, 0xda, 0x9b, 0x19, 0xaf, 0xc6, 0x8e, 41 | 0x10, 0xb8, 0xb5, 0x43, 0x69, 0x0d, 0xad, 0x9d, 0x3b, 0xbb, 42 | 0x46, 0xba]), 43 | b'', # domain separation tag 44 | b'', # binder 45 | 13883, 46 | ) 47 | assert expanded_vec[-1] == Field64(4857131209231097247) 48 | 49 | def test_turboshake128(self) -> None: 50 | test_xof(XofTurboShake128, Field128, 23) 51 | 52 | def test_fixedkeyaes128(self) -> None: 53 | test_xof(XofFixedKeyAes128, Field128, 23) 54 | -------------------------------------------------------------------------------- /poc/vdaf_poc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cfrg/draft-irtf-cfrg-vdaf/6054556f3dfe4d0dfe65f1c0e09fbf65ca675323/poc/vdaf_poc/__init__.py -------------------------------------------------------------------------------- /poc/vdaf_poc/common.py: -------------------------------------------------------------------------------- 1 | """Functionalities used by other modules.""" 2 | 3 | import os 4 | from typing import Protocol, Self, TypeVar, overload 5 | 6 | # Document version, reved with each draft that contains breaking changes. 7 | VERSION = 12 8 | 9 | 10 | class FieldProtocol(Protocol): 11 | def __add__(self, other: Self) -> Self: 12 | ... 13 | 14 | def __sub__(self, other: Self) -> Self: 15 | ... 16 | 17 | def __neg__(self) -> Self: 18 | ... 19 | 20 | 21 | # We use a protocol instead of the Field class itself to avoid circular import 22 | # issues. 23 | F = TypeVar("F", bound=FieldProtocol) 24 | 25 | 26 | def next_power_of_2(n: int) -> int: 27 | """Return the smallest power of 2 that is larger than or equal to n.""" 28 | assert n > 0 29 | return 1 << (int(n - 1).bit_length()) 30 | 31 | 32 | def zeros(length: int) -> bytes: 33 | """Return the requested number of zero bytes.""" 34 | return bytes(bytearray(length)) 35 | 36 | 37 | def gen_rand(length: int) -> bytes: 38 | """Return the requested number of random bytes.""" 39 | return os.urandom(length) 40 | 41 | 42 | def byte(number: int) -> bytes: 43 | """Return the encoding of the input as a byte.""" 44 | return int(number).to_bytes(1, 'big') 45 | 46 | 47 | def xor(left: bytes, right: bytes) -> bytes: 48 | """Return the bitwise XOR of the inputs.""" 49 | return bytes(map(lambda x: x[0] ^ x[1], zip(left, right))) 50 | 51 | 52 | # NOTE: The vec_sub(), vec_add(), and vec_neg() functions are 53 | # excerpted in the document, as the figure 54 | # {{field-helper-functions}}. Their width should be limited to 69 55 | # columns to avoid warnings from xml2rfc. 56 | # =================================================================== 57 | def vec_sub(left: list[F], right: list[F]) -> list[F]: 58 | """ 59 | Subtract the right operand from the left and return the result. 60 | """ 61 | if len(left) != len(right): 62 | raise ValueError("mismatched vector sizes") 63 | return list(map(lambda x: x[0] - x[1], zip(left, right))) 64 | 65 | 66 | def vec_add(left: list[F], right: list[F]) -> list[F]: 67 | """Add the right operand to the left and return the result.""" 68 | if len(left) != len(right): 69 | raise ValueError("mismatched vector sizes") 70 | return list(map(lambda x: x[0] + x[1], zip(left, right))) 71 | 72 | 73 | def vec_neg(vec: list[F]) -> list[F]: 74 | """Negate the input vector.""" 75 | return list(map(lambda x: -x, vec)) 76 | 77 | 78 | def to_le_bytes(val: int, length: int) -> bytes: 79 | """ 80 | Convert unsigned integer `val` in the range `[0, 2 ^ (8 * length))` to a 81 | little-endian byte string. 82 | """ 83 | val = int(val) 84 | if val < 0 or val >= (1 << (8 * length)): 85 | raise ValueError( 86 | 'bad to_le_bytes call: val=%d length=%d' % (val, length)) 87 | return val.to_bytes(length, byteorder='little') 88 | 89 | 90 | def from_le_bytes(encoded: bytes) -> int: 91 | """Parse an unsigned integer from a little-endian byte string.""" 92 | return int.from_bytes(encoded, byteorder='little') 93 | 94 | 95 | def to_be_bytes(val: int, length: int) -> bytes: 96 | """ 97 | Convert unsigned integer `val` in the range `[0, 2 ^ (8 * length))` to a 98 | big-endian byte string. 99 | """ 100 | val = int(val) 101 | if val < 0 or val >= (1 << (8 * length)): 102 | raise ValueError( 103 | 'bad to_be_bytes call: val=%d length=%d' % (val, length)) 104 | return val.to_bytes(length, byteorder='big') 105 | 106 | 107 | def from_be_bytes(encoded: bytes) -> int: 108 | """Parse an unsigned integer from a big-endian byte string.""" 109 | return int.from_bytes(encoded, byteorder='big') 110 | 111 | 112 | def concat(parts: list[bytes]) -> bytes: 113 | """Return the concatenated byte strings.""" 114 | return b''.join(parts) 115 | 116 | 117 | T = TypeVar("T") 118 | 119 | 120 | @overload 121 | def front(length: int, vec: bytes) -> tuple[bytes, bytes]: 122 | ... 123 | 124 | 125 | @overload 126 | def front(length: int, vec: list[T]) -> tuple[list[T], list[T]]: 127 | ... 128 | 129 | 130 | def front( 131 | length: int, 132 | vec: bytes | list[T]) -> tuple[ 133 | bytes | list[T], 134 | bytes | list[T]]: 135 | """ 136 | Split list `vec` in two and return the front and remainder as a tuple. The 137 | length of the front is `length`. 138 | """ 139 | return (vec[:length], vec[length:]) 140 | 141 | 142 | # NOTE: This function is excerpted in the document. Its width should 143 | # be limited to 69 columns, to avoid warnings from xml2rfc. 144 | # =================================================================== 145 | def format_dst(algo_class: int, 146 | algo: int, 147 | usage: int) -> bytes: 148 | """ 149 | Format XOF domain separation tag. 150 | 151 | Pre-conditions: 152 | 153 | - `algo_class` in the range `[0, 2^8)` 154 | - `algo` in the range `[0, 2^32)` 155 | - `usage` in the range `[0, 2^16)` 156 | """ 157 | return concat([ 158 | to_be_bytes(VERSION, 1), 159 | to_be_bytes(algo_class, 1), 160 | to_be_bytes(algo, 4), 161 | to_be_bytes(usage, 2), 162 | ]) 163 | 164 | 165 | def print_wrapped_line(line: str, tab: int) -> None: 166 | width = 72 167 | chunk_len = width - tab 168 | for start in range(0, len(line), chunk_len): 169 | print(' ' * tab + line[start:start + chunk_len]) 170 | -------------------------------------------------------------------------------- /poc/vdaf_poc/daf.py: -------------------------------------------------------------------------------- 1 | """Definition of DAFs.""" 2 | 3 | from abc import ABCMeta, abstractmethod 4 | from typing import Generic, TypeVar, override 5 | 6 | from vdaf_poc.common import gen_rand 7 | 8 | Measurement = TypeVar("Measurement") 9 | AggParam = TypeVar("AggParam") 10 | PublicShare = TypeVar("PublicShare") 11 | InputShare = TypeVar("InputShare") 12 | OutShare = TypeVar("OutShare") 13 | AggShare = TypeVar("AggShare") 14 | AggResult = TypeVar("AggResult") 15 | 16 | 17 | class DistributedAggregation( 18 | Generic[ 19 | Measurement, AggParam, PublicShare, InputShare, OutShare, AggShare, 20 | AggResult 21 | ], 22 | metaclass=ABCMeta): 23 | """ 24 | Abstract base class containing methods common to DAFs and VDAFs. 25 | """ 26 | 27 | @abstractmethod 28 | def shard(self, 29 | ctx: bytes, 30 | measurement: Measurement, 31 | nonce: bytes, 32 | rand: bytes, 33 | ) -> tuple[PublicShare, list[InputShare]]: 34 | pass 35 | 36 | @abstractmethod 37 | def is_valid(self, 38 | agg_param: AggParam, 39 | previous_agg_params: list[AggParam]) -> bool: 40 | pass 41 | 42 | @abstractmethod 43 | def agg_init(self, agg_param: AggParam) -> AggShare: 44 | pass 45 | 46 | @abstractmethod 47 | def agg_update(self, 48 | agg_param: AggParam, 49 | agg_share: AggShare, 50 | out_share: OutShare) -> AggShare: 51 | pass 52 | 53 | @abstractmethod 54 | def merge(self, 55 | agg_param: AggParam, 56 | agg_shares: list[AggShare]) -> AggShare: 57 | pass 58 | 59 | @abstractmethod 60 | def unshard(self, 61 | agg_param: AggParam, 62 | agg_shares: list[AggShare], 63 | num_measurements: int) -> AggResult: 64 | pass 65 | 66 | 67 | class Daf( 68 | Generic[ 69 | Measurement, AggParam, PublicShare, InputShare, OutShare, AggShare, 70 | AggResult 71 | ], 72 | DistributedAggregation[ 73 | Measurement, AggParam, PublicShare, InputShare, OutShare, AggShare, 74 | AggResult 75 | ]): 76 | """ 77 | A Distributed Aggregation Function (DAF). 78 | 79 | Generic type parameters: 80 | Measurement -- the measurement type 81 | AggParam -- the aggregation parameter type 82 | PublicShare -- the public share type 83 | InputShare -- the input share type 84 | OutShare -- the output share type 85 | AggShare -- the aggregate share type 86 | AggResult -- the aggregate result type 87 | 88 | Attributes: 89 | ID -- algorithm identifier, a 32-bit integer 90 | SHARES -- the number of Aggregators 91 | NONCE_SIZE -- length of the nonce 92 | RAND_SIZE -- number of random bytes consumed by `shard()` 93 | """ 94 | 95 | # Algorithm identifier for this DAF, a 32-bit integer. 96 | ID: int 97 | 98 | # The number of Aggregators. 99 | SHARES: int 100 | 101 | # Length of the nonce. 102 | NONCE_SIZE: int 103 | 104 | # Number of random bytes consumed by `shard()`. 105 | RAND_SIZE: int 106 | 107 | @override 108 | @abstractmethod 109 | def shard( 110 | self, 111 | ctx: bytes, 112 | measurement: Measurement, 113 | nonce: bytes, 114 | rand: bytes) -> tuple[PublicShare, list[InputShare]]: 115 | """ 116 | Shard a measurement into a public share and a sequence of input 117 | shares, one for each Aggregator. This method is run by the Client. 118 | 119 | Pre-conditions: 120 | 121 | - `len(nonce) == Daf.NONCE_SIZE` 122 | - `len(rand) == Daf.RAND_SIZE` 123 | """ 124 | pass 125 | 126 | @override 127 | @abstractmethod 128 | def is_valid( 129 | self, 130 | agg_param: AggParam, 131 | previous_agg_params: list[AggParam]) -> bool: 132 | """ 133 | Check if `agg_param` is valid for use with an input share that has 134 | previously been used with all `previous_agg_params`. 135 | """ 136 | pass 137 | 138 | @abstractmethod 139 | def prep( 140 | self, 141 | ctx: bytes, 142 | agg_id: int, 143 | agg_param: AggParam, 144 | nonce: bytes, 145 | public_share: PublicShare, 146 | input_share: InputShare) -> OutShare: 147 | """ 148 | Prepare an input share for aggregation. This algorithm takes in the 149 | public share and one of the input shares generated by the Client. It 150 | also takes in the application context, the Aggregator's ID (a unique 151 | integer in the range `[0, SHARES)` corresponding to the index of 152 | `input_share` in the Client's output), and an aggregation parameter and 153 | returns the corresponding output share. 154 | 155 | Pre-conditions: 156 | 157 | - `agg_id` in the range `[0, daf.SHARES)` 158 | - `len(nonce) == daf.NONCE_SIZE` 159 | """ 160 | pass 161 | 162 | @override 163 | @abstractmethod 164 | def agg_init(self, 165 | agg_param: AggParam) -> AggShare: 166 | """ 167 | Return an empty aggregate share. 168 | """ 169 | pass 170 | 171 | @override 172 | @abstractmethod 173 | def agg_update(self, 174 | agg_param: AggParam, 175 | agg_share: AggShare, 176 | out_share: OutShare) -> AggShare: 177 | """ 178 | Accumulate an output share into an aggregate share and return the 179 | updated aggregate share. 180 | """ 181 | pass 182 | 183 | @override 184 | @abstractmethod 185 | def merge(self, 186 | agg_param: AggParam, 187 | agg_shares: list[AggShare]) -> AggShare: 188 | """ 189 | Merge a sequence of aggregate shares into a single aggregate share. 190 | """ 191 | pass 192 | 193 | @override 194 | @abstractmethod 195 | def unshard( 196 | self, 197 | agg_param: AggParam, 198 | agg_shares: list[AggShare], 199 | num_measurements: int) -> AggResult: 200 | """ 201 | Unshard the aggregate shares (encoded as byte strings) and compute the 202 | aggregate result. This is called by the Collector. 203 | """ 204 | pass 205 | 206 | 207 | # NOTE: This function is excerpted in the document, as the figure 208 | # {{run-daf}}. Its width should be limited to 69 columns to avoid 209 | # warnings from xml2rfc. 210 | # =================================================================== 211 | def run_daf( 212 | daf: Daf[ 213 | Measurement, 214 | AggParam, 215 | PublicShare, 216 | InputShare, 217 | OutShare, 218 | AggShare, 219 | AggResult, 220 | ], 221 | ctx: bytes, 222 | agg_param: AggParam, 223 | measurements: list[Measurement]) -> AggResult: 224 | agg_shares: list[AggShare] 225 | agg_shares = [daf.agg_init(agg_param) 226 | for _ in range(daf.SHARES)] 227 | for measurement in measurements: 228 | # Sharding 229 | nonce = gen_rand(daf.NONCE_SIZE) 230 | rand = gen_rand(daf.RAND_SIZE) 231 | (public_share, input_shares) = \ 232 | daf.shard(ctx, measurement, nonce, rand) 233 | 234 | # Preparation, aggregation 235 | for j in range(daf.SHARES): 236 | out_share = daf.prep(ctx, j, agg_param, nonce, 237 | public_share, input_shares[j]) 238 | agg_shares[j] = daf.agg_update(agg_param, 239 | agg_shares[j], 240 | out_share) 241 | 242 | # Unsharding 243 | num_measurements = len(measurements) 244 | agg_result = daf.unshard(agg_param, agg_shares, 245 | num_measurements) 246 | return agg_result 247 | -------------------------------------------------------------------------------- /poc/vdaf_poc/flp.py: -------------------------------------------------------------------------------- 1 | """Fully linear proof (FLP) systems.""" 2 | 3 | from abc import ABCMeta, abstractmethod 4 | from typing import Any, Generic, TypeVar 5 | 6 | from vdaf_poc.common import vec_add, vec_sub 7 | from vdaf_poc.field import Field 8 | 9 | Measurement = TypeVar("Measurement") 10 | AggResult = TypeVar("AggResult") 11 | F = TypeVar("F", bound=Field) 12 | 13 | 14 | class Flp(Generic[Measurement, AggResult, F], metaclass=ABCMeta): 15 | """The base class for FLPs.""" 16 | 17 | # Class object for the field. 18 | field: type[F] 19 | 20 | # Length of the joint randomness shared by the prover and verifier. 21 | JOINT_RAND_LEN: int 22 | 23 | # Length of the randomness consumed by the prover. 24 | PROVE_RAND_LEN: int 25 | 26 | # Length of the randomness consumed by the verifier. 27 | QUERY_RAND_LEN: int 28 | 29 | # Length of the encoded measurement. 30 | MEAS_LEN: int 31 | 32 | # Length of aggregatable output. 33 | OUTPUT_LEN: int 34 | 35 | # Length of the proof. 36 | PROOF_LEN: int 37 | 38 | # Length of the verifier message. 39 | VERIFIER_LEN: int 40 | 41 | @abstractmethod 42 | def __init__(self) -> None: 43 | pass 44 | 45 | @abstractmethod 46 | def encode(self, measurement: Measurement) -> list[F]: 47 | """Encode a measurement.""" 48 | pass 49 | 50 | @abstractmethod 51 | def prove(self, 52 | meas: list[F], 53 | prove_rand: list[F], 54 | joint_rand: list[F]) -> list[F]: 55 | """ 56 | Generate a proof of a measurement's validity. 57 | 58 | Pre-conditions: 59 | 60 | - `len(meas) == self.MEAS_LEN` 61 | - `len(prove_rand) == self.PROVE_RAND_LEN` 62 | - `len(joint_rand) == self.JOINT_RAND_LEN` 63 | """ 64 | pass 65 | 66 | @abstractmethod 67 | def query(self, 68 | meas: list[F], 69 | proof: list[F], 70 | query_rand: list[F], 71 | joint_rand: list[F], 72 | num_shares: int) -> list[F]: 73 | """ 74 | Generate a verifier message for a measurement and proof. 75 | 76 | Pre-conditions: 77 | 78 | - `len(meas) == self.MEAS_LEN` 79 | - `len(proof) == self.PROOF_LEN` 80 | - `len(query_rand) == self.QUERY_RAND_LEN` 81 | - `len(joint_rand) == self.JOINT_RAND_LEN` 82 | - `num_shares >= 1` 83 | """ 84 | pass 85 | 86 | @abstractmethod 87 | def decide(self, verifier: list[F]) -> bool: 88 | """ 89 | Decide if a verifier message was generated from a valid measurement. 90 | 91 | Pre-conditions: 92 | 93 | - `len(verifier) == self.VERIFIER_LEN` 94 | """ 95 | pass 96 | 97 | @abstractmethod 98 | def truncate(self, meas: list[F]) -> list[F]: 99 | """ 100 | Map an encoded measurement to an aggregatable output. 101 | 102 | Pre-conditions: 103 | 104 | - `len(meas) == self.MEAS_LEN` 105 | """ 106 | pass 107 | 108 | @abstractmethod 109 | def decode(self, output: list[F], num_measurements: int) -> AggResult: 110 | """ 111 | Decode an aggregate result. 112 | 113 | Pre-conditions: 114 | 115 | - `len(output) == self.OUTPUT_LEN` 116 | - `num_measurements >= 1` 117 | """ 118 | pass 119 | 120 | def test_vec_set_type_param(self, test_vec: dict[str, Any]) -> list[str]: 121 | """ 122 | Add any parameters to `test_vec` that are required to construct this 123 | class. Returns the keys that were set. 124 | """ 125 | return [] 126 | 127 | 128 | def additive_secret_share( 129 | vec: list[F], 130 | num_shares: int, 131 | field: type[F]) -> list[list[F]]: 132 | shares = [ 133 | field.rand_vec(len(vec)) 134 | for _ in range(num_shares - 1) 135 | ] 136 | last_share = vec 137 | for other_share in shares: 138 | last_share = vec_sub(last_share, other_share) 139 | shares.append(last_share) 140 | return shares 141 | 142 | 143 | # NOTE This function is excerpted in the document, as the figure 144 | # {{run-flp}}. Its width should be limited to 69 columns to avoid 145 | # warnings from xml2rfc. 146 | # =================================================================== 147 | def run_flp( 148 | flp: Flp[Measurement, AggResult, F], 149 | meas: list[F], 150 | num_shares: int) -> bool: 151 | """Run the FLP on an encoded measurement.""" 152 | 153 | joint_rand = flp.field.rand_vec(flp.JOINT_RAND_LEN) 154 | prove_rand = flp.field.rand_vec(flp.PROVE_RAND_LEN) 155 | query_rand = flp.field.rand_vec(flp.QUERY_RAND_LEN) 156 | 157 | # Prover generates the proof. 158 | proof = flp.prove(meas, prove_rand, joint_rand) 159 | 160 | # Shard the measurement and the proof. 161 | meas_shares = additive_secret_share( 162 | meas, 163 | num_shares, 164 | flp.field, 165 | ) 166 | proof_shares = additive_secret_share( 167 | proof, 168 | num_shares, 169 | flp.field, 170 | ) 171 | 172 | # Verifier queries the meas shares and proof shares. 173 | verifier_shares = [ 174 | flp.query( 175 | meas_share, 176 | proof_share, 177 | query_rand, 178 | joint_rand, 179 | num_shares, 180 | ) 181 | for meas_share, proof_share in zip(meas_shares, proof_shares) 182 | ] 183 | 184 | # Combine the verifier shares into the verifier. 185 | verifier = flp.field.zeros(len(verifier_shares[0])) 186 | for verifier_share in verifier_shares: 187 | verifier = vec_add(verifier, verifier_share) 188 | 189 | # Verifier decides if the measurement is valid. 190 | return flp.decide(verifier) 191 | -------------------------------------------------------------------------------- /poc/vdaf_poc/idpf.py: -------------------------------------------------------------------------------- 1 | """Definition of IDPFs.""" 2 | 3 | from abc import ABCMeta, abstractmethod 4 | from typing import Generic, Sequence, TypeAlias, TypeVar 5 | 6 | from vdaf_poc.field import Field 7 | 8 | FieldInner = TypeVar("FieldInner", bound=Field) 9 | FieldLeaf = TypeVar("FieldLeaf", bound=Field) 10 | PublicShare = TypeVar("PublicShare") 11 | 12 | # Type alias for the output of `eval()`. 13 | Output: TypeAlias = list[list[FieldInner]] | list[list[FieldLeaf]] 14 | # Type alias for a vector over the inner or leaf field. 15 | FieldVec: TypeAlias = list[FieldInner] | list[FieldLeaf] 16 | 17 | 18 | class Idpf(Generic[FieldInner, FieldLeaf, PublicShare], metaclass=ABCMeta): 19 | """ 20 | An Incremental Distributed Point Function (IDPF). 21 | 22 | Generic type parameters: 23 | FieldInner -- The finite field used to represent the inner nodes of the 24 | IDPF tree. 25 | FieldLeaf -- The finite field used to represent the leaf nodes of the IDPF 26 | tree. 27 | 28 | Attributes: 29 | SHARES -- Number of keys generated by the IDPF-key generation algorithm. 30 | BITS -- Bit length of valid input values (i.e., the length of `alpha`). 31 | VALUE_LEN -- The length of each output vector (i.e., the length of 32 | `beta_leaf` and each element of `beta_inner`). 33 | KEY_SIZE -- Size in bytes of each IDPF key share. 34 | RAND_SIZE -- Number of random bytes consumed by the `gen()` algorithm. 35 | field_inner -- Class object for the field used in inner nodes. 36 | field_leaf -- Class object for the field used in leaf nodes. 37 | """ 38 | 39 | # Number of keys generated by the IDPF-key generation algorithm. 40 | SHARES: int 41 | 42 | # Bit length of valid input values (i.e., the length of `alpha`). 43 | BITS: int 44 | 45 | # The length of each output vector (i.e., the length of `beta_leaf` and 46 | # each element of `beta_inner`). 47 | VALUE_LEN: int 48 | 49 | # Size in bytes of each IDPF key share. 50 | KEY_SIZE: int 51 | 52 | # Number of random bytes consumed by the `gen()` algorithm. 53 | RAND_SIZE: int 54 | 55 | # Number of random bytes in the nonce generated by the client. 56 | NONCE_SIZE: int 57 | 58 | # Class object for the field used in inner nodes. 59 | field_inner: type[FieldInner] 60 | 61 | # Class object for the field used in leaf nodes. 62 | field_leaf: type[FieldLeaf] 63 | 64 | # Name of the IDPF, for use in test vector filenames. 65 | test_vec_name: str 66 | 67 | @abstractmethod 68 | def gen(self, 69 | alpha: tuple[bool, ...], 70 | beta_inner: list[list[FieldInner]], 71 | beta_leaf: list[FieldLeaf], 72 | ctx: bytes, 73 | nonce: bytes, 74 | rand: bytes) -> tuple[PublicShare, list[bytes]]: 75 | """ 76 | Generates an IDPF public share and sequence of IDPF-keys of length 77 | `SHARES`. Input `alpha` is the index to encode. Inputs `beta_inner` and 78 | `beta_leaf` are assigned to the values of the nodes on the non-zero 79 | path of the IDPF tree. It takes two inputs from the higher-level 80 | application, a context string `ctx`, and a nonce string `nonce`. 81 | 82 | `alpha` is a tuple of booleans, and not a list, because IDPF indices 83 | need to be immutable and hashable in order to check the uniqueness of 84 | candidate prefixes efficiently. 85 | 86 | Pre-conditions: 87 | 88 | - `len(alpha) == self.BITS` 89 | - `len(beta_inner) == self.BITS - 1` 90 | - `len(beta_inner[level]) == self.VALUE_LEN` for each `level` in 91 | `[0, self.BITS - 1)` 92 | - `len(beta_leaf) == self.VALUE_LEN` 93 | - `len(rand) == self.RAND_SIZE` 94 | """ 95 | pass 96 | 97 | @abstractmethod 98 | def eval(self, 99 | agg_id: int, 100 | public_share: PublicShare, 101 | key: bytes, 102 | level: int, 103 | prefixes: Sequence[tuple[bool, ...]], 104 | ctx: bytes, 105 | nonce: bytes) -> Output: 106 | """ 107 | Evaluate an IDPF key share public share at a given level of the tree 108 | and with the given sequence of prefixes. The output is a vector where 109 | each element is a vector of length `VALUE_LEN`. The output field is 110 | `FieldLeaf` if `level == BITS` and `FieldInner` otherwise. `ctx` and 111 | `nonce` must match the context and nonce strings passed by the Client 112 | to `gen`. 113 | 114 | Each element of `prefixes` is a bit string of length `level + 1`. For 115 | each element of `prefixes` that is the length-`level + 1` prefix of 116 | the input encoded by the IDPF-key generation algorithm 117 | (i.e., `alpha`), the sum of the corresponding output shares will be 118 | equal to one of the programmed output vectors (i.e., an element of 119 | `beta_inner + [beta_leaf]`). For all other elements of `prefixes`, the 120 | corresponding output shares will sum up to the 0-vector. 121 | 122 | Pre-conditions: 123 | 124 | - `agg_id` in the range `[0, self.SHARES)` 125 | - `level` in the range `[0, self.BITS)` 126 | - `len(prefix) == level + 1` for each `prefix` in `prefixes` 127 | """ 128 | pass 129 | 130 | # NOTE: This method is excerpted in the document, de-indented. Its 131 | # width should be limited to 69 columns after de-indenting, or 73 132 | # columns before de-indenting, to avoid warnings from xml2rfc. 133 | # =================================================================== 134 | def current_field( 135 | self, 136 | level: int) -> type[FieldInner] | type[FieldLeaf]: 137 | if level < self.BITS - 1: 138 | return self.field_inner 139 | return self.field_leaf 140 | 141 | def is_prefix(self, x: tuple[bool, ...], y: tuple[bool, ...], level: int) -> bool: 142 | """ 143 | Returns `True` iff `x` is the prefix of `y` at level `level`. 144 | 145 | Pre-conditions: 146 | 147 | - `level` in the range `[0, self.BITS)` 148 | """ 149 | return x == y[:level + 1] 150 | 151 | @abstractmethod 152 | def test_vec_encode_public_share(self, public_share: PublicShare) -> bytes: 153 | pass 154 | -------------------------------------------------------------------------------- /poc/vdaf_poc/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cfrg/draft-irtf-cfrg-vdaf/6054556f3dfe4d0dfe65f1c0e09fbf65ca675323/poc/vdaf_poc/py.typed -------------------------------------------------------------------------------- /poc/vdaf_poc/xof.py: -------------------------------------------------------------------------------- 1 | """Extendable output functions (XOFs).""" 2 | 3 | from abc import ABCMeta, abstractmethod 4 | from typing import TypeVar 5 | 6 | from Cryptodome.Cipher import AES 7 | from Cryptodome.Hash import TurboSHAKE128 8 | 9 | from vdaf_poc.common import (concat, from_le_bytes, next_power_of_2, 10 | to_le_bytes, xor) 11 | from vdaf_poc.field import Field 12 | 13 | F = TypeVar("F", bound=Field) 14 | 15 | 16 | class Xof(metaclass=ABCMeta): 17 | """The base class for XOFs.""" 18 | 19 | # Size of the seed. 20 | SEED_SIZE: int 21 | 22 | # Name of the XOF, for use in test vector filenames. 23 | test_vec_name: str 24 | 25 | @abstractmethod 26 | def __init__(self, seed: bytes, dst: bytes, binder: bytes): 27 | """ 28 | Construct a new instance of this XOF from the given seed, domain 29 | separation tag, and binder string. 30 | 31 | Pre-conditions: 32 | 33 | - `len(seed) == self.SEED_SIZE` 34 | """ 35 | pass 36 | 37 | @abstractmethod 38 | def next(self, length: int) -> bytes: 39 | """ 40 | Output the next `length` bytes of the XOF stream. 41 | 42 | Pre-conditions: 43 | 44 | - `length > 0` 45 | """ 46 | pass 47 | 48 | # NOTE: The methods derive_seed(), next_vec(), and expand_into_vec() 49 | # are excerpted in the document, de-indented, as the figure 50 | # {{xof-derived-methods}}. Their width should be limited to 69 51 | # columns after de-indenting, or 73 columns before de-indenting, to 52 | # avoid warnings from xml2rfc. 53 | # =================================================================== 54 | @classmethod 55 | def derive_seed(cls, 56 | seed: bytes, 57 | dst: bytes, 58 | binder: bytes) -> bytes: 59 | """ 60 | Derive a new seed. 61 | 62 | Pre-conditions: 63 | 64 | - `len(seed) == cls.SEED_SIZE` 65 | """ 66 | xof = cls(seed, dst, binder) 67 | return xof.next(cls.SEED_SIZE) 68 | 69 | def next_vec(self, field: type[F], length: int) -> list[F]: 70 | """ 71 | Output the next `length` field elements. 72 | 73 | Pre-conditions: 74 | 75 | - `field` is sub-class of `Field` 76 | - `length > 0` 77 | """ 78 | m = next_power_of_2(field.MODULUS) - 1 79 | vec: list[F] = [] 80 | while len(vec) < length: 81 | x = from_le_bytes(self.next(field.ENCODED_SIZE)) 82 | x &= m 83 | if x < field.MODULUS: 84 | vec.append(field(x)) 85 | return vec 86 | 87 | @classmethod 88 | def expand_into_vec(cls, 89 | field: type[F], 90 | seed: bytes, 91 | dst: bytes, 92 | binder: bytes, 93 | length: int) -> list[F]: 94 | """ 95 | Expand the input `seed` into a vector of `length` field elements. 96 | 97 | Pre-conditions: 98 | 99 | - `field` is sub-class of `Field` 100 | - `len(seed) == cls.SEED_SIZE` 101 | - `length > 0` 102 | """ 103 | xof = cls(seed, dst, binder) 104 | return xof.next_vec(field, length) 105 | 106 | 107 | # NOTE: A simplified implementation of this class is excerpted in the 108 | # document. The contents of the docstrings of methods are used in 109 | # lieu of their actual bodies, because they provide a simpler (though 110 | # inefficient) implementation defined in terms of the 111 | # `TurboSHAKE128(M, D, L)` function, and not a sponge/XOF API. The 112 | # width of the relevant portions of the class should be limited to 69 113 | # columns, to avoid warnings from xml2rfc. 114 | # =================================================================== 115 | class XofTurboShake128(Xof): 116 | """XOF wrapper for TurboSHAKE128.""" 117 | 118 | # Associated parameters 119 | SEED_SIZE = 32 120 | 121 | # Name of the XOF, for use in test vector filenames. 122 | test_vec_name = 'XofTurboShake128' 123 | 124 | def __init__(self, seed: bytes, dst: bytes, binder: bytes): 125 | ''' 126 | self.l = 0 127 | self.m = \ 128 | to_le_bytes(len(dst), 2) + dst \ 129 | to_le_bytes(len(seed), 1) + seed + \ 130 | binder 131 | ''' 132 | self.length_consumed = 0 133 | self.h = TurboSHAKE128.new(domain=1) 134 | self.h.update(to_le_bytes(len(dst), 2)) 135 | self.h.update(dst) 136 | self.h.update(to_le_bytes(len(seed), 1)) 137 | self.h.update(seed) 138 | self.h.update(binder) 139 | 140 | def next(self, length: int) -> bytes: 141 | ''' 142 | self.l += length 143 | 144 | # Function `TurboSHAKE128(M, D, L)` is as defined in 145 | # Section 2.2 of [TurboSHAKE]. 146 | # 147 | # Implementation note: rather than re-generate the output 148 | # stream each time `next()` is invoked, most implementations 149 | # of TurboSHAKE128 will expose an "absorb-then-squeeze" API 150 | # that allows stateful handling of the stream. 151 | stream = TurboSHAKE128(self.m, 1, self.l) 152 | return stream[-length:] 153 | ''' 154 | return self.h.read(length) 155 | 156 | 157 | # NOTE: A simplified implementation of this class is excerpted in the 158 | # document. The code in the docstrings of some methods is used in 159 | # lieu of their actual bodies, because they provide a simpler 160 | # implementation defined in terms of abstract `TurboSHAKE128(M, D, 161 | # L)` and `AES128(key, plaintext)` functions, and not real 162 | # cryptographic APIs. The width of the relevant portions of the class 163 | # should be limited to 69 columns, to avoid warnings from xml2rfc. 164 | # =================================================================== 165 | class XofFixedKeyAes128(Xof): 166 | """ 167 | XOF based on a circular collision-resistant hash function from 168 | fixed-key AES. 169 | """ 170 | 171 | # Associated parameters 172 | SEED_SIZE = 16 173 | 174 | # Name of the XOF, for use in test vector filenames. 175 | test_vec_name = 'XofFixedKeyAes128' 176 | 177 | def __init__(self, seed: bytes, dst: bytes, binder: bytes): 178 | """ 179 | if len(seed) != self.SEED_SIZE: 180 | raise ValueError("incorrect seed size") 181 | 182 | self.length_consumed = 0 183 | 184 | # Use TurboSHAKE128 to derive a key from the binder string 185 | # and domain separation tag. Note that the AES key does not 186 | # need to be kept secret from any party. However, when used 187 | # with an IDPF, we require the binder to be a random nonce. 188 | # 189 | # Implementation note: this step can be cached across XOF 190 | # evaluations with many different seeds. 191 | dst_length = to_le_bytes(len(dst), 2) 192 | self.fixed_key = TurboSHAKE128( 193 | dst_length + dst + binder, 194 | 2, 195 | 16, 196 | ) 197 | self.seed = seed 198 | """ 199 | if len(seed) != self.SEED_SIZE: 200 | raise ValueError("incorrect seed size") 201 | 202 | self.length_consumed = 0 203 | 204 | # Use TurboSHAKE128 to derive a key from the binder string 205 | # and domain separation tag. Note that the AES key does not 206 | # need to be kept secret from any party. However, when used 207 | # with an IDPF, we require the binder to be a random nonce. 208 | # 209 | # Implementation note: this step can be cached across XOF 210 | # evaluations with many different seeds. 211 | h = TurboSHAKE128.new(domain=2) 212 | h.update(to_le_bytes(len(dst), 2)) 213 | h.update(dst) 214 | h.update(binder) 215 | fixed_key = h.read(16) 216 | self.cipher = AES.new(fixed_key, AES.MODE_ECB) 217 | # Save seed to be used in `next`. 218 | self.seed = seed 219 | 220 | def next(self, length: int) -> bytes: 221 | offset = self.length_consumed % 16 222 | new_length = self.length_consumed + length 223 | block_range = range( 224 | self.length_consumed // 16, 225 | new_length // 16 + 1 226 | ) 227 | self.length_consumed = new_length 228 | 229 | hashed_blocks = [ 230 | self.hash_block(xor(self.seed, to_le_bytes(i, 16))) 231 | for i in block_range 232 | ] 233 | return concat(hashed_blocks)[offset:offset+length] 234 | 235 | def hash_block(self, block: bytes) -> bytes: 236 | """ 237 | The multi-instance tweakable circular correlation-robust hash 238 | function of [GKWWY20] (Section 4.2). The tweak here is the 239 | key that stays constant for all XOF evaluations of the same 240 | Client, but differs between Clients. 241 | 242 | Function `AES128(key, block)` is the AES-128 blockcipher. 243 | 244 | --- 245 | 246 | lo, hi = block[:8], block[8:] 247 | sigma_block = concat([hi, xor(hi, lo)]) 248 | return xor(AES128(self.fixed_key, sigma_block), sigma_block) 249 | """ 250 | lo, hi = block[:8], block[8:] 251 | sigma_block = concat([hi, xor(hi, lo)]) 252 | return xor(self.cipher.encrypt(sigma_block), sigma_block) 253 | -------------------------------------------------------------------------------- /rejected_dictionary: -------------------------------------------------------------------------------- 1 | IPDF 2 | -------------------------------------------------------------------------------- /test_vec/IdpfBBCGGI21_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "alpha": [ 3 | false, 4 | false, 5 | false, 6 | false, 7 | false, 8 | false, 9 | false, 10 | false, 11 | false, 12 | false 13 | ], 14 | "beta_inner": [ 15 | [ 16 | "0", 17 | "0" 18 | ], 19 | [ 20 | "1", 21 | "1" 22 | ], 23 | [ 24 | "2", 25 | "2" 26 | ], 27 | [ 28 | "3", 29 | "3" 30 | ], 31 | [ 32 | "4", 33 | "4" 34 | ], 35 | [ 36 | "5", 37 | "5" 38 | ], 39 | [ 40 | "6", 41 | "6" 42 | ], 43 | [ 44 | "7", 45 | "7" 46 | ], 47 | [ 48 | "8", 49 | "8" 50 | ] 51 | ], 52 | "beta_leaf": [ 53 | "9", 54 | "9" 55 | ], 56 | "bits": 10, 57 | "ctx": "736f6d65206170706c69636174696f6e", 58 | "keys": [ 59 | "000102030405060708090a0b0c0d0e0f", 60 | "101112131415161718191a1b1c1d1e1f" 61 | ], 62 | "nonce": "000102030405060708090a0b0c0d0e0f", 63 | "public_share": "7db601e4a750964ed1ae57542b1ebe0b0a08fe58d02497b9db91673fb102da7748e7e81aac58088bf47f335bd5f6159fd20170100e16a2674cebd72d9e97e429effdca443283c74886657fb376b6fa364bd9de1ca1903db51d82c91ad35bee9c41c474365afb96a24199c7d503ffdd6844673b4c8d48e8dca480bc2964b52f85d8d484dcac4fe6991e2f854cb49a5df34e27aac828957384199a8abdaf52a4e55e62c7ecfcc77e47eba098eb9c85c67f28978cb827a7963d196e3f2b9b12ebf9a2dc77dc65f218f1b9c98b27860255425b6035f6ff3d74b4ea0f89877656159eef6b6392bdb12d80c75853daeffa5105888cbb45932d9de21906089192149d57a03ab414fb695446b442feaba55ca5146c2084a107e16ac42071154b69cc67e57632cb2cc586e7e6bd554b3d3363bc51396a21c565132591a592328d3a3cbaf94339eb96ce623c1f37588537647cbde8b52a0464379b0f9f2a6d69b3781833f6ccb01b844e15291270ecc8cbd36ddaaf8e7c3d" 64 | } 65 | -------------------------------------------------------------------------------- /test_vec/XofFixedKeyAes128.json: -------------------------------------------------------------------------------- 1 | { 2 | "binder": "62696e64657220737472696e67", 3 | "derived_seed": "ca97b6736483188fbf6d52a9063ab3e2", 4 | "dst": "646f6d61696e2073657061726174696f6e20746167", 5 | "expanded_vec_field128": "ca97b6736483188fbf6d52a9063ab3e2cd3d2a390443defd69ef29b85a905d166f01b109411930cb0379474684ec70fa412fdb6e05686eeb4351b5970cfcc497ecea23f53c257fe0b5909ca8502ab3dc701c10343fc1ba7fad7691272f741ab0258097d957ae571114e8059f5cfb901c75107e6394c2920f34a52548770d9b6253f22790b66e3a0a4229f332007276723755c5e3111549bba1d7586fd274feadf7e4aca60357db9b12ccb6d56265a20265c13167135d56d56c97bc344cf6f2634c614e63c490e0761739dcfb9075e99aea74c52bacd200944e882ec3f90e7c41e0c3098151afd2b94f9d31ca0e53a080a24933cadae8a28afad638e78bd24e1502b5cc68fb3b01f6a9877c70cbebcc6051e5d1e534f751dda147584740ba1d5de08b5b32a8d77b50887715c0bcf7547be0943a9cfffe6020ce01d752a49493a9be34903dc3a296b267a896b9f3ea1fa00241c334e7627d1f3bbc83164875aa7c3459cd1eac02573835e30538124a1e34b6ae4782c8df2cf1f2c82ccd5325d91919773b148280d64c8823d7bf4866c1a46b5cc709fa74059c4af4aec7c5d16eaedc13405c54a94834f8ad4475b6478ebf3572fc74201e7e4d51e978b7ddc3db1ded8852f3383259b3f8ea982288c0bd34b7f8b4b8a2f6ab599caf49d6648bf294eb7d5cafd2c5e201da5bd13a344cfb0d8fdf03b986ceaaeb80a05d50dd639aaa66a2d3bb7874033062620c96df2608900eed014ee00019cce9b3f54bd0217502c18777508957e8b052875ec0b96502123ca5925099be25c58ce70b712ac00baac08052db362b955ffb1467b73711eabbd3ddf00518d19c8081583613f09523d5a5144ac0aeefe78e5cac16d3fcabfd370029c76f471fffad24fe384c23dd315a", 6 | "length": 40, 7 | "seed": "000102030405060708090a0b0c0d0e0f" 8 | } 9 | -------------------------------------------------------------------------------- /test_vec/XofTurboShake128.json: -------------------------------------------------------------------------------- 1 | { 2 | "binder": "62696e64657220737472696e67", 3 | "derived_seed": "b62ef0a2778190792d4d42d8c167ba20e0c37a3f319ba79645829c427d70eea5", 4 | "dst": "646f6d61696e2073657061726174696f6e20746167", 5 | "expanded_vec_field128": "b62ef0a2778190792d4d42d8c167ba20e0c37a3f319ba79645829c427d70eea56d92fef65a07b17a0438e25452cdc7f15f63caf0e8a2a3c72ac69524729e64e03c7ec3a3afa7eb21cb6da6abf0a3b73630a91a5fa05dcbd33da3476f1c8bf4fd66fe5a80dd04bc95f0f02cb8fc9035b9e16e3a5c373bf81c820af513e0c832ca672c66dcc3b302fb8b27a48f09e0a843f5caf87b3c6c1c4b6765b5923e1d152c2e695601abbc69dbe810860fb30e871a2549861110672335ce50ee632264cbb21f62595298d1d1ad9303b84664801e52112989dfce93f0f9dc833ee0f4a0672ffe47cb0a13f80f9e9b7a2e267db7f7890ccd064bf2458cd4e27162d13e60dbc1b6cae71635a3e603f2c15d1c64a12d4e8235f7bf0a8efe6bb15f3e2adae06f7dd1c4d63ac6cecae731250f73c063c6e59aef630a7b6f4bc64bddadc8b8a479bfdb64ea31e6ab44540a52a8d3399d64a060d50190d66cac1335d739e5f8d508201058a2e0cc1ed90e23cfdc7a253f24710ef3b1604688606386dcc4c90e8476be9daac3271ac51301868df71a719a5f32184145d70fec31b4693d527f53875a2ea013523d3daa33bc49c22f66c9ae70dd7f06f227f50515ea9ab1fbe0162cf761e596d8999809cf672cd2d82b22231f841c36e494d7e44f7c6f07e29758b1b73ce95b202325116f04067a54aa767eab2e00ce55b3abcd5c3d5b05040a6bf5d4ba8a61267004bb82b12d43b08fed8f9aced3e833a58fc01004526631b0f4192d8754a397e1096511ee23761a29fcd6511ad2e5020683ca877e4075fe6bc5907f7df1ed4b6c97e0f897d781e850da0cf584842c124cdb6011fe8660e3b8c354cd659985332ea158ffcba83253d39803fa7efb9fcd02cea38089f239f68404814973", 6 | "length": 40, 7 | "seed": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 8 | } 9 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "0000000000020080", 3 | "agg_result": [ 4 | 0, 5 | 1 6 | ], 7 | "agg_shares": [ 8 | "f8145f10fdc8bca62df57afdc4066a8c", 9 | "09eba0ef01374359d50a85023af99573" 10 | ], 11 | "bits": 4, 12 | "ctx": "736f6d65206170706c69636174696f6e", 13 | "operations": [ 14 | { 15 | "operation": "shard", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "aggregator_id": 0, 21 | "operation": "prep_init", 22 | "report_index": 0, 23 | "success": true 24 | }, 25 | { 26 | "aggregator_id": 1, 27 | "operation": "prep_init", 28 | "report_index": 0, 29 | "success": true 30 | }, 31 | { 32 | "operation": "prep_shares_to_prep", 33 | "report_index": 0, 34 | "round": 0, 35 | "success": true 36 | }, 37 | { 38 | "aggregator_id": 0, 39 | "operation": "prep_next", 40 | "report_index": 0, 41 | "round": 1, 42 | "success": true 43 | }, 44 | { 45 | "aggregator_id": 1, 46 | "operation": "prep_next", 47 | "report_index": 0, 48 | "round": 1, 49 | "success": true 50 | }, 51 | { 52 | "operation": "prep_shares_to_prep", 53 | "report_index": 0, 54 | "round": 1, 55 | "success": true 56 | }, 57 | { 58 | "aggregator_id": 0, 59 | "operation": "prep_next", 60 | "report_index": 0, 61 | "round": 2, 62 | "success": true 63 | }, 64 | { 65 | "aggregator_id": 1, 66 | "operation": "prep_next", 67 | "report_index": 0, 68 | "round": 2, 69 | "success": true 70 | }, 71 | { 72 | "aggregator_id": 0, 73 | "operation": "aggregate", 74 | "success": true 75 | }, 76 | { 77 | "aggregator_id": 1, 78 | "operation": "aggregate", 79 | "success": true 80 | }, 81 | { 82 | "operation": "unshard", 83 | "success": true 84 | } 85 | ], 86 | "prep": [ 87 | { 88 | "input_shares": [ 89 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fda98fdbc162bff6a3d8ae1be166aa452ffc1a31a1869f47c52fe72a43ce9cecc47717f655044d6a42d6ab3625ecee4dfc389bbebbf3ed7c12458aaef564f5bb1e491d79bba4359041f11561164a8396784bf71ed7bd8e16ac4f11f1eaf6dd55b6b8c538f54ebead6920cb8509ee2211f", 90 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f6752ae15a1277011caf754974e5f9c7b51bbb6f537f446f4b99414f0cfd31eb4568278b83ac975508853807b1b9d27cef5e2ee3d4d8436ad7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b4660" 91 | ], 92 | "measurement": [ 93 | true, 94 | true, 95 | false, 96 | true 97 | ], 98 | "nonce": "000102030405060708090a0b0c0d0e0f", 99 | "out_shares": [ 100 | [ 101 | "f8145f10fdc8bca6", 102 | "2df57afdc4066a8c" 103 | ], 104 | [ 105 | "09eba0ef01374359", 106 | "d50a85023af99573" 107 | ] 108 | ], 109 | "prep_messages": [ 110 | "1be0415318fa71a0025509fdb4559fced849a418e0819d4c", 111 | "" 112 | ], 113 | "prep_shares": [ 114 | [ 115 | "0666e598602128e425ea5ac5440b241198c1253251d0773e", 116 | "167a5cbab6d849bcdd6aae37704a7bbd40887ee68eb1250e" 117 | ], 118 | [ 119 | "8dddb537d3b587de", 120 | "74224ac82b4a7821" 121 | ] 122 | ], 123 | "public_share": "ea706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cbe110ec9197dcda1433d652b904cb6aa90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb835d8920e9a95b4cd4812b71f114a07773a20bfe4362e0648fce76b8cb3dcf95e659379e3e151b7a62059020b7ae4c8081cda96253fa780a8b0ab6cdf22f92c136f", 124 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 125 | } 126 | ], 127 | "shares": 2, 128 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 129 | } 130 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "000100000004004080c0", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 0, 7 | 1 8 | ], 9 | "agg_shares": [ 10 | "6a175fdf8f26c6aeb0ab3fb9b95c288e686221f9b21a43f975aecba0241a1916", 11 | "97e8a0206fd939515154c04645a3d771999dde064ce5bc068d51345fdae5e6e9" 12 | ], 13 | "bits": 4, 14 | "ctx": "736f6d65206170706c69636174696f6e", 15 | "operations": [ 16 | { 17 | "operation": "shard", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "aggregator_id": 0, 23 | "operation": "prep_init", 24 | "report_index": 0, 25 | "success": true 26 | }, 27 | { 28 | "aggregator_id": 1, 29 | "operation": "prep_init", 30 | "report_index": 0, 31 | "success": true 32 | }, 33 | { 34 | "operation": "prep_shares_to_prep", 35 | "report_index": 0, 36 | "round": 0, 37 | "success": true 38 | }, 39 | { 40 | "aggregator_id": 0, 41 | "operation": "prep_next", 42 | "report_index": 0, 43 | "round": 1, 44 | "success": true 45 | }, 46 | { 47 | "aggregator_id": 1, 48 | "operation": "prep_next", 49 | "report_index": 0, 50 | "round": 1, 51 | "success": true 52 | }, 53 | { 54 | "operation": "prep_shares_to_prep", 55 | "report_index": 0, 56 | "round": 1, 57 | "success": true 58 | }, 59 | { 60 | "aggregator_id": 0, 61 | "operation": "prep_next", 62 | "report_index": 0, 63 | "round": 2, 64 | "success": true 65 | }, 66 | { 67 | "aggregator_id": 1, 68 | "operation": "prep_next", 69 | "report_index": 0, 70 | "round": 2, 71 | "success": true 72 | }, 73 | { 74 | "aggregator_id": 0, 75 | "operation": "aggregate", 76 | "success": true 77 | }, 78 | { 79 | "aggregator_id": 1, 80 | "operation": "aggregate", 81 | "success": true 82 | }, 83 | { 84 | "operation": "unshard", 85 | "success": true 86 | } 87 | ], 88 | "prep": [ 89 | { 90 | "input_shares": [ 91 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fda98fdbc162bff6a3d8ae1be166aa452ffc1a31a1869f47c52fe72a43ce9cecc47717f655044d6a42d6ab3625ecee4dfc389bbebbf3ed7c12458aaef564f5bb1e491d79bba4359041f11561164a8396784bf71ed7bd8e16ac4f11f1eaf6dd55b6b8c538f54ebead6920cb8509ee2211f", 92 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f6752ae15a1277011caf754974e5f9c7b51bbb6f537f446f4b99414f0cfd31eb4568278b83ac975508853807b1b9d27cef5e2ee3d4d8436ad7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b4660" 93 | ], 94 | "measurement": [ 95 | true, 96 | true, 97 | false, 98 | true 99 | ], 100 | "nonce": "000102030405060708090a0b0c0d0e0f", 101 | "out_shares": [ 102 | [ 103 | "6a175fdf8f26c6ae", 104 | "b0ab3fb9b95c288e", 105 | "686221f9b21a43f9", 106 | "75aecba0241a1916" 107 | ], 108 | [ 109 | "97e8a0206fd93951", 110 | "5154c04645a3d771", 111 | "999dde064ce5bc06", 112 | "8d51345fdae5e6e9" 113 | ] 114 | ], 115 | "prep_messages": [ 116 | "9962c3798c6cefd8c356ed0a50585c5c44c02307e0b8cedb", 117 | "" 118 | ], 119 | "prep_shares": [ 120 | [ 121 | "5b9d20684913a2696307dc3a03f8935ede135e8e653c5a6c", 122 | "3ec5a21143594d6f614f11d04b60c8fd66acc5787a7c746f" 123 | ], 124 | [ 125 | "ede48501afe2f08a", 126 | "141b7afe4f1d0f75" 127 | ] 128 | ], 129 | "public_share": "ea706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cbe110ec9197dcda1433d652b904cb6aa90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb835d8920e9a95b4cd4812b71f114a07773a20bfe4362e0648fce76b8cb3dcf95e659379e3e151b7a62059020b7ae4c8081cda96253fa780a8b0ab6cdf22f92c136f", 130 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 131 | } 132 | ], 133 | "shares": 2, 134 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 135 | } 136 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "000200000004004080c0", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 0, 7 | 1 8 | ], 9 | "agg_shares": [ 10 | "9f264e896625e15df3e34b825d37c92aa8dd47f5f1fd55cec5c8bd00c5eb2640", 11 | "62d9b17698da1ea20e1cb47da1c836d55922b80a0d02aa313d3742ff3914d9bf" 12 | ], 13 | "bits": 4, 14 | "ctx": "736f6d65206170706c69636174696f6e", 15 | "operations": [ 16 | { 17 | "operation": "shard", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "aggregator_id": 0, 23 | "operation": "prep_init", 24 | "report_index": 0, 25 | "success": true 26 | }, 27 | { 28 | "aggregator_id": 1, 29 | "operation": "prep_init", 30 | "report_index": 0, 31 | "success": true 32 | }, 33 | { 34 | "operation": "prep_shares_to_prep", 35 | "report_index": 0, 36 | "round": 0, 37 | "success": true 38 | }, 39 | { 40 | "aggregator_id": 0, 41 | "operation": "prep_next", 42 | "report_index": 0, 43 | "round": 1, 44 | "success": true 45 | }, 46 | { 47 | "aggregator_id": 1, 48 | "operation": "prep_next", 49 | "report_index": 0, 50 | "round": 1, 51 | "success": true 52 | }, 53 | { 54 | "operation": "prep_shares_to_prep", 55 | "report_index": 0, 56 | "round": 1, 57 | "success": true 58 | }, 59 | { 60 | "aggregator_id": 0, 61 | "operation": "prep_next", 62 | "report_index": 0, 63 | "round": 2, 64 | "success": true 65 | }, 66 | { 67 | "aggregator_id": 1, 68 | "operation": "prep_next", 69 | "report_index": 0, 70 | "round": 2, 71 | "success": true 72 | }, 73 | { 74 | "aggregator_id": 0, 75 | "operation": "aggregate", 76 | "success": true 77 | }, 78 | { 79 | "aggregator_id": 1, 80 | "operation": "aggregate", 81 | "success": true 82 | }, 83 | { 84 | "operation": "unshard", 85 | "success": true 86 | } 87 | ], 88 | "prep": [ 89 | { 90 | "input_shares": [ 91 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fda98fdbc162bff6a3d8ae1be166aa452ffc1a31a1869f47c52fe72a43ce9cecc47717f655044d6a42d6ab3625ecee4dfc389bbebbf3ed7c12458aaef564f5bb1e491d79bba4359041f11561164a8396784bf71ed7bd8e16ac4f11f1eaf6dd55b6b8c538f54ebead6920cb8509ee2211f", 92 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f6752ae15a1277011caf754974e5f9c7b51bbb6f537f446f4b99414f0cfd31eb4568278b83ac975508853807b1b9d27cef5e2ee3d4d8436ad7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b4660" 93 | ], 94 | "measurement": [ 95 | true, 96 | true, 97 | false, 98 | true 99 | ], 100 | "nonce": "000102030405060708090a0b0c0d0e0f", 101 | "out_shares": [ 102 | [ 103 | "9f264e896625e15d", 104 | "f3e34b825d37c92a", 105 | "a8dd47f5f1fd55ce", 106 | "c5c8bd00c5eb2640" 107 | ], 108 | [ 109 | "62d9b17698da1ea2", 110 | "0e1cb47da1c836d5", 111 | "5922b80a0d02aa31", 112 | "3d3742ff3914d9bf" 113 | ] 114 | ], 115 | "prep_messages": [ 116 | "c2575a57cd9c93a22b1b6cd869226045d99a016b369b8834", 117 | "" 118 | ], 119 | "prep_shares": [ 120 | [ 121 | "466257086581b4a7222bef9446e67759b305e8e5b9718292", 122 | "7df5024f671bdffa0af07c43223ce8eb279519857b2906a2" 123 | ], 124 | [ 125 | "9b223444167bf1bf", 126 | "66ddcbbbe8840e40" 127 | ] 128 | ], 129 | "public_share": "ea706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cbe110ec9197dcda1433d652b904cb6aa90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb835d8920e9a95b4cd4812b71f114a07773a20bfe4362e0648fce76b8cb3dcf95e659379e3e151b7a62059020b7ae4c8081cda96253fa780a8b0ab6cdf22f92c136f", 130 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 131 | } 132 | ], 133 | "shares": 2, 134 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 135 | } 136 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "0003000000071030507090d0f0", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 0, 7 | 0, 8 | 0, 9 | 1, 10 | 0 11 | ], 12 | "agg_shares": [ 13 | "2d33a6c6914134e86b28db019499a40e624d0e35a954566426ef278575a80075bb1c9cdd3183915c65ea2350dda1bfedd8be8831092d581922bb2abf9229aa415bc6dd4000b6d160bb6f9112ca88562d1891397df208deed0c2060929d893f35d29cdfc356faa320545df78e279875e871d19884b25ec2e3a4bbf96a57250766d7de114e2f2ff7bb1ab428d8470bdd13dd27d7514859584b603340546ec19835b8f6a799f07c3640ff0bc47fea239219e46132c191c8a5954501248b5ab7817b605af7ed5491cfcae08e88a756320cdc99189428952bc9859508293392cc3c7f", 14 | "c0cc59396ebecb1794d724fe6b665bf19db2f1ca56aba99bd910d87a8a57ff0a32e36322ce7c6ea39a15dcaf225e4012274177cef6d2a7e6dd44d5406dd6553e923922bfff492e9f44906eed3577a9d2e76ec6820df72112f3df9f6d6276c04a1b63203ca9055cdfaba20871d8678a178e2e677b4da13d1c5b440695a8daf8191621eeb1d0d00844e54bd727b8f422ec22d828aeb7a6a7b49fccbfab913e674a360958660f83c9bf00f43b8015dc6de61b9ecd3e6e375a6abafedb74a5487e048da50812ab6e30351f717758a9cdf32366e76bd76ad4367a6af7d6cc6d33c300" 15 | ], 16 | "bits": 4, 17 | "ctx": "736f6d65206170706c69636174696f6e", 18 | "operations": [ 19 | { 20 | "operation": "shard", 21 | "report_index": 0, 22 | "success": true 23 | }, 24 | { 25 | "aggregator_id": 0, 26 | "operation": "prep_init", 27 | "report_index": 0, 28 | "success": true 29 | }, 30 | { 31 | "aggregator_id": 1, 32 | "operation": "prep_init", 33 | "report_index": 0, 34 | "success": true 35 | }, 36 | { 37 | "operation": "prep_shares_to_prep", 38 | "report_index": 0, 39 | "round": 0, 40 | "success": true 41 | }, 42 | { 43 | "aggregator_id": 0, 44 | "operation": "prep_next", 45 | "report_index": 0, 46 | "round": 1, 47 | "success": true 48 | }, 49 | { 50 | "aggregator_id": 1, 51 | "operation": "prep_next", 52 | "report_index": 0, 53 | "round": 1, 54 | "success": true 55 | }, 56 | { 57 | "operation": "prep_shares_to_prep", 58 | "report_index": 0, 59 | "round": 1, 60 | "success": true 61 | }, 62 | { 63 | "aggregator_id": 0, 64 | "operation": "prep_next", 65 | "report_index": 0, 66 | "round": 2, 67 | "success": true 68 | }, 69 | { 70 | "aggregator_id": 1, 71 | "operation": "prep_next", 72 | "report_index": 0, 73 | "round": 2, 74 | "success": true 75 | }, 76 | { 77 | "aggregator_id": 0, 78 | "operation": "aggregate", 79 | "success": true 80 | }, 81 | { 82 | "aggregator_id": 1, 83 | "operation": "aggregate", 84 | "success": true 85 | }, 86 | { 87 | "operation": "unshard", 88 | "success": true 89 | } 90 | ], 91 | "prep": [ 92 | { 93 | "input_shares": [ 94 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fda98fdbc162bff6a3d8ae1be166aa452ffc1a31a1869f47c52fe72a43ce9cecc47717f655044d6a42d6ab3625ecee4dfc389bbebbf3ed7c12458aaef564f5bb1e491d79bba4359041f11561164a8396784bf71ed7bd8e16ac4f11f1eaf6dd55b6b8c538f54ebead6920cb8509ee2211f", 95 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f6752ae15a1277011caf754974e5f9c7b51bbb6f537f446f4b99414f0cfd31eb4568278b83ac975508853807b1b9d27cef5e2ee3d4d8436ad7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b4660" 96 | ], 97 | "measurement": [ 98 | true, 99 | true, 100 | false, 101 | true 102 | ], 103 | "nonce": "000102030405060708090a0b0c0d0e0f", 104 | "out_shares": [ 105 | [ 106 | "2d33a6c6914134e86b28db019499a40e624d0e35a954566426ef278575a80075", 107 | "bb1c9cdd3183915c65ea2350dda1bfedd8be8831092d581922bb2abf9229aa41", 108 | "5bc6dd4000b6d160bb6f9112ca88562d1891397df208deed0c2060929d893f35", 109 | "d29cdfc356faa320545df78e279875e871d19884b25ec2e3a4bbf96a57250766", 110 | "d7de114e2f2ff7bb1ab428d8470bdd13dd27d7514859584b603340546ec19835", 111 | "b8f6a799f07c3640ff0bc47fea239219e46132c191c8a5954501248b5ab7817b", 112 | "605af7ed5491cfcae08e88a756320cdc99189428952bc9859508293392cc3c7f" 113 | ], 114 | [ 115 | "c0cc59396ebecb1794d724fe6b665bf19db2f1ca56aba99bd910d87a8a57ff0a", 116 | "32e36322ce7c6ea39a15dcaf225e4012274177cef6d2a7e6dd44d5406dd6553e", 117 | "923922bfff492e9f44906eed3577a9d2e76ec6820df72112f3df9f6d6276c04a", 118 | "1b63203ca9055cdfaba20871d8678a178e2e677b4da13d1c5b440695a8daf819", 119 | "1621eeb1d0d00844e54bd727b8f422ec22d828aeb7a6a7b49fccbfab913e674a", 120 | "360958660f83c9bf00f43b8015dc6de61b9ecd3e6e375a6abafedb74a5487e04", 121 | "8da50812ab6e30351f717758a9cdf32366e76bd76ad4367a6af7d6cc6d33c300" 122 | ] 123 | ], 124 | "prep_messages": [ 125 | "614ed49602ef5034d6572fa5c3b40890eb8d0623d0c5e25e309ea8fb2f555f4b4f587251492daa3239a212928b2bd33d0b04ad744ee57b44ecb05acd6e916e29c3584921ef1ccbeffd215f93f799c68b1552621bcaa93d35ada1c495bb2a7d0e", 126 | "" 127 | ], 128 | "prep_shares": [ 129 | [ 130 | "500b8f3cdc4173c21b56e19673c5daaf40517e5bc3ee3cea6f1d7667a1c8ef615189a704fe4738b5757a52beefb4daeda272856fb41e283bc7bd51fb472eda295fecb60db98a60afff3d7ffb764c9481f39a9ca2ebf6ecf6dfeff9b4aa700160", 131 | "fe42455a26addd71ba014e0e50ef2de0aa3c88c70cd7a574c08032948e8c6f69ebceca4c4be5717dc327c0d39b76f84f689127059ac6530925f308d22663947f516c921336926a40fee3df97804d320a22b7c578deb2503ecdb1cae010ba7b2e" 132 | ], 133 | [ 134 | "974633f332f3f33384f9535d1f282b8fd10892105b73452116e34012681c5b01", 135 | "56b9cc0ccd0c0ccc7b06aca2e0d7d4702ef76defa48cbadee91cbfed97e3a47e" 136 | ] 137 | ], 138 | "public_share": "ea706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cbe110ec9197dcda1433d652b904cb6aa90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb835d8920e9a95b4cd4812b71f114a07773a20bfe4362e0648fce76b8cb3dcf95e659379e3e151b7a62059020b7ae4c8081cda96253fa780a8b0ab6cdf22f92c136f", 139 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 140 | } 141 | ], 142 | "shares": 2, 143 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 144 | } 145 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "0000000000020080", 3 | "agg_result": [ 4 | 0, 5 | 1 6 | ], 7 | "agg_shares": [ 8 | "f8145f10fdc8bca62df57afdc4066a8c", 9 | "09eba0ef01374359d50a85023af99573" 10 | ], 11 | "bits": 11, 12 | "ctx": "736f6d65206170706c69636174696f6e", 13 | "operations": [ 14 | { 15 | "operation": "shard", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "aggregator_id": 0, 21 | "operation": "prep_init", 22 | "report_index": 0, 23 | "success": true 24 | }, 25 | { 26 | "aggregator_id": 1, 27 | "operation": "prep_init", 28 | "report_index": 0, 29 | "success": true 30 | }, 31 | { 32 | "operation": "prep_shares_to_prep", 33 | "report_index": 0, 34 | "round": 0, 35 | "success": true 36 | }, 37 | { 38 | "aggregator_id": 0, 39 | "operation": "prep_next", 40 | "report_index": 0, 41 | "round": 1, 42 | "success": true 43 | }, 44 | { 45 | "aggregator_id": 1, 46 | "operation": "prep_next", 47 | "report_index": 0, 48 | "round": 1, 49 | "success": true 50 | }, 51 | { 52 | "operation": "prep_shares_to_prep", 53 | "report_index": 0, 54 | "round": 1, 55 | "success": true 56 | }, 57 | { 58 | "aggregator_id": 0, 59 | "operation": "prep_next", 60 | "report_index": 0, 61 | "round": 2, 62 | "success": true 63 | }, 64 | { 65 | "aggregator_id": 1, 66 | "operation": "prep_next", 67 | "report_index": 0, 68 | "round": 2, 69 | "success": true 70 | }, 71 | { 72 | "aggregator_id": 0, 73 | "operation": "aggregate", 74 | "success": true 75 | }, 76 | { 77 | "aggregator_id": 1, 78 | "operation": "aggregate", 79 | "success": true 80 | }, 81 | { 82 | "operation": "unshard", 83 | "success": true 84 | } 85 | ], 86 | "prep": [ 87 | { 88 | "input_shares": [ 89 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fc2ae29156142560363d6b3101bbc3a3c831ab3334d5212f220f7be62a879dfe3303d99c9b91c2ec4b2be88bf152958538910d1b290d4132fef827a3497d8b6afd07a6d42b5dd7418fe90b98a2fe671e6b09afa685444f5765969e1a411c6180b72e62f67b7d8338edff68762d0d0a59baa4793638727a6162003f84c2596acb467c4e928d904546058cd260b0042b3bb0294e677645e2078d76c33ea1a53929de1aef060ec73d23fffc162594b5be6c43ca72c91cd9814b72ecd11894674a56fe8bd2713309a0b1f19794338f30477cc3e36e5d8c889529db1d245507bc99900", 90 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b46e06e76d0d8d758ba8c3b15c2f12607b151228c095e1958aa23da07e702e6350ad9f4a63bfa33cfe2f7257bf42fd73f5fa2dc92a4988f14f40934886a69f8df372cebc3468df580d5c07e05112170e8d81b30e346eccec758a1061c0c7aa27020e5ffb2c3fb90d089144fdb1011273eecfa42ccca7e8b5af4c70f7cc82df8c8e0fc9a0cb8a10aa026736f3961d47e3f4a275c11311a7ef12849beb141a53a937378fa80413eda1e2f6f" 91 | ], 92 | "measurement": [ 93 | true, 94 | true, 95 | false, 96 | false, 97 | true, 98 | false, 99 | false, 100 | false, 101 | false, 102 | false, 103 | true 104 | ], 105 | "nonce": "000102030405060708090a0b0c0d0e0f", 106 | "out_shares": [ 107 | [ 108 | "f8145f10fdc8bca6", 109 | "2df57afdc4066a8c" 110 | ], 111 | [ 112 | "09eba0ef01374359", 113 | "d50a85023af99573" 114 | ] 115 | ], 116 | "prep_messages": [ 117 | "1be0415318fa71a0025509fdb4559fced849a418e0819d4c", 118 | "" 119 | ], 120 | "prep_shares": [ 121 | [ 122 | "0666e598602128e425ea5ac5440b241198c1253251d0773e", 123 | "167a5cbab6d849bcdd6aae37704a7bbd40887ee68eb1250e" 124 | ], 125 | [ 126 | "4a5f1a8007a83201", 127 | "b7a0e57ff757cdfe" 128 | ] 129 | ], 130 | "public_share": "6a1329706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cfaff8ce533763568b692904206d7e470ece45c4fda5556d5b5b62c3d7be2dd649830ea94d67042a055fc38b0d4c307aeccd53526cb04f05ac3237e54ab58554582c00c3837f9ea51cf494b9ef71d413dccde24f39378eee9347774958295313fda51d3b1be516df9084ee46cad89fe554246acfd40d4c424c8356132d053344e90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb83541a49c02299250357f3bd66755a20ff515520f0712a459a651881d31f6bc0f2838fa1094e019c0d0625982399d50d9332948f9b83c59a1d039e07f9a1766b56f5818a51ed19930e9db0426c0a047d74e962240b3e06100c502868e30d62d547aca59df848918ef02e02a8683ec541237ae33b2bbfe7de5eb4b01991c8c0866a4949009c100fac98f8956d9bc82ffbf7eba76a7774146d2018dd5b97f331ddf546dde718780bb97364b321f7f9d524d21", 131 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 132 | } 133 | ], 134 | "shares": 2, 135 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 136 | } 137 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_5.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "000a000000040000c800c820ffe0", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 1, 7 | 0 8 | ], 9 | "agg_shares": [ 10 | "d2cd4b7f24280e5a405b5b5cd4ae2d5190e6a18cd895929de2b7613739347f1f7ec642e913685f1b2be02add94a1000201bed9d86ed118f624d13423ce10ff0128a3d7193a01b7f10727e695de56ddb14df1cb8612697284bef73f0b8ad35b0cbce0dfca542e1e9a6b91f9db47d6bb68efcdafd7c0f58c76ab8f96ec77b03828", 11 | "1b32b480dbd7f1a5bfa4a4a32b51d2ae6f195e73276a6d621d489ec8c6cb80606f39bd16ec97a0e4d41fd5226b5efffdfe412627912ee709db2ecbdc31ef007ec65c28e6c5fe480ef8d8196a21a9224eb20e3479ed968d7b4108c0f4752ca473311f2035abd1e165946e0624b8294497103250283f0a738954706913884fc757" 12 | ], 13 | "bits": 11, 14 | "ctx": "736f6d65206170706c69636174696f6e", 15 | "operations": [ 16 | { 17 | "operation": "shard", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "aggregator_id": 0, 23 | "operation": "prep_init", 24 | "report_index": 0, 25 | "success": true 26 | }, 27 | { 28 | "aggregator_id": 1, 29 | "operation": "prep_init", 30 | "report_index": 0, 31 | "success": true 32 | }, 33 | { 34 | "operation": "prep_shares_to_prep", 35 | "report_index": 0, 36 | "round": 0, 37 | "success": true 38 | }, 39 | { 40 | "aggregator_id": 0, 41 | "operation": "prep_next", 42 | "report_index": 0, 43 | "round": 1, 44 | "success": true 45 | }, 46 | { 47 | "aggregator_id": 1, 48 | "operation": "prep_next", 49 | "report_index": 0, 50 | "round": 1, 51 | "success": true 52 | }, 53 | { 54 | "operation": "prep_shares_to_prep", 55 | "report_index": 0, 56 | "round": 1, 57 | "success": true 58 | }, 59 | { 60 | "aggregator_id": 0, 61 | "operation": "prep_next", 62 | "report_index": 0, 63 | "round": 2, 64 | "success": true 65 | }, 66 | { 67 | "aggregator_id": 1, 68 | "operation": "prep_next", 69 | "report_index": 0, 70 | "round": 2, 71 | "success": true 72 | }, 73 | { 74 | "aggregator_id": 0, 75 | "operation": "aggregate", 76 | "success": true 77 | }, 78 | { 79 | "aggregator_id": 1, 80 | "operation": "aggregate", 81 | "success": true 82 | }, 83 | { 84 | "operation": "unshard", 85 | "success": true 86 | } 87 | ], 88 | "prep": [ 89 | { 90 | "input_shares": [ 91 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3fc2ae29156142560363d6b3101bbc3a3c831ab3334d5212f220f7be62a879dfe3303d99c9b91c2ec4b2be88bf152958538910d1b290d4132fef827a3497d8b6afd07a6d42b5dd7418fe90b98a2fe671e6b09afa685444f5765969e1a411c6180b72e62f67b7d8338edff68762d0d0a59baa4793638727a6162003f84c2596acb467c4e928d904546058cd260b0042b3bb0294e677645e2078d76c33ea1a53929de1aef060ec73d23fffc162594b5be6c43ca72c91cd9814b72ecd11894674a56fe8bd2713309a0b1f19794338f30477cc3e36e5d8c889529db1d245507bc99900", 92 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f7f3c82bd56101979a4ab82454a0d0692cd62a7dc020b297feb9bc83164430e9d6db65e54d1f01d3102ffaa1e6542b45ab78ca492593b46e06e76d0d8d758ba8c3b15c2f12607b151228c095e1958aa23da07e702e6350ad9f4a63bfa33cfe2f7257bf42fd73f5fa2dc92a4988f14f40934886a69f8df372cebc3468df580d5c07e05112170e8d81b30e346eccec758a1061c0c7aa27020e5ffb2c3fb90d089144fdb1011273eecfa42ccca7e8b5af4c70f7cc82df8c8e0fc9a0cb8a10aa026736f3961d47e3f4a275c11311a7ef12849beb141a53a937378fa80413eda1e2f6f" 93 | ], 94 | "measurement": [ 95 | true, 96 | true, 97 | false, 98 | false, 99 | true, 100 | false, 101 | false, 102 | false, 103 | false, 104 | false, 105 | true 106 | ], 107 | "nonce": "000102030405060708090a0b0c0d0e0f", 108 | "out_shares": [ 109 | [ 110 | "d2cd4b7f24280e5a405b5b5cd4ae2d5190e6a18cd895929de2b7613739347f1f", 111 | "7ec642e913685f1b2be02add94a1000201bed9d86ed118f624d13423ce10ff01", 112 | "28a3d7193a01b7f10727e695de56ddb14df1cb8612697284bef73f0b8ad35b0c", 113 | "bce0dfca542e1e9a6b91f9db47d6bb68efcdafd7c0f58c76ab8f96ec77b03828" 114 | ], 115 | [ 116 | "1b32b480dbd7f1a5bfa4a4a32b51d2ae6f195e73276a6d621d489ec8c6cb8060", 117 | "6f39bd16ec97a0e4d41fd5226b5efffdfe412627912ee709db2ecbdc31ef007e", 118 | "c65c28e6c5fe480ef8d8196a21a9224eb20e3479ed968d7b4108c0f4752ca473", 119 | "311f2035abd1e165946e0624b8294497103250283f0a738954706913884fc757" 120 | ] 121 | ], 122 | "prep_messages": [ 123 | "d3c3fc12914cd2a31c7a41b62a77273b5275154cdb30a0fec68912f9ccc66b0b6b15ce285c181e4f16a65fbcc188b3f1d4daaa14b6823f69127b41ec7e1f30516ff551aea1448229641258ba9b3fb64b4b8b4b166010c803d107541d5045425e", 124 | "" 125 | ], 126 | "prep_shares": [ 127 | [ 128 | "d4c8460a5f62404f0ab779db098a078a8643f294f2ead87fef399a85adca9b2447ea034a67ec05dca8afc0a41c9ebc8e445ff7aca391a80e6ba24be308435f65daca191dddf03952c12402cc424509236c429d30c430944e8ad88424e35ddb0c", 129 | "ecfab50832ea915412c3c7da20ed1fb1cb3123b7e845c77ed74f78731ffccf66112bcadef42b18736df69e17a5eaf662907bb36712f1965aa7d8f50876dcd06b952a3891c45348d7a2ed55ee58faac28df48aee59bdf33b5462fcff86ce76651" 130 | ], 131 | [ 132 | "1a4aa3de4303c3d7d525a4c391558ea83bd9eec7db79e19415795cbb718eb571", 133 | "d3b55c21bcfc3c282ada5b3c6eaa7157c426113824861e6bea86a3448e714a0e" 134 | ] 135 | ], 136 | "public_share": "6a1329706203dc060e8f96eea7a90a09011bf9d6ec84927d83a71ac6f1110e0d11e7dbec8d93e59d350995244ae87c17a5ce6cfaff8ce533763568b692904206d7e470ece45c4fda5556d5b5b62c3d7be2dd649830ea94d67042a055fc38b0d4c307aeccd53526cb04f05ac3237e54ab58554582c00c3837f9ea51cf494b9ef71d413dccde24f39378eee9347774958295313fda51d3b1be516df9084ee46cad89fe554246acfd40d4c424c8356132d053344e90979f92f30320c2bbb1743b1d4f3578f54a5a9b0b242bf1841d6b39f5e70e1a75963d080ed76a295e89489ff47eb83541a49c02299250357f3bd66755a20ff515520f0712a459a651881d31f6bc0f2838fa1094e019c0d0625982399d50d9332948f9b83c59a1d039e07f9a1766b56f5818a51ed19930e9db0426c0a047d74e962240b3e06100c502868e30d62d547aca59df848918ef02e02a8683ec541237ae33b2bbfe7de5eb4b01991c8c0866a4949009c100fac98f8956d9bc82ffbf7eba76a7774146d2018dd5b97f331ddf546dde718780bb97364b321f7f9d524d21", 137 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 138 | } 139 | ], 140 | "shares": 2, 141 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 142 | } 143 | -------------------------------------------------------------------------------- /test_vec/vdaf/Poplar1_bad_corr_inner.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "0000000000020080", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "bits": 2, 6 | "ctx": "736f6d65206170706c69636174696f6e", 7 | "operations": [ 8 | { 9 | "aggregator_id": 0, 10 | "operation": "prep_init", 11 | "report_index": 0, 12 | "success": true 13 | }, 14 | { 15 | "aggregator_id": 1, 16 | "operation": "prep_init", 17 | "report_index": 0, 18 | "success": true 19 | }, 20 | { 21 | "operation": "prep_shares_to_prep", 22 | "report_index": 0, 23 | "round": 0, 24 | "success": true 25 | }, 26 | { 27 | "aggregator_id": 0, 28 | "operation": "prep_next", 29 | "report_index": 0, 30 | "round": 1, 31 | "success": true 32 | }, 33 | { 34 | "aggregator_id": 1, 35 | "operation": "prep_next", 36 | "report_index": 0, 37 | "round": 1, 38 | "success": true 39 | }, 40 | { 41 | "operation": "prep_shares_to_prep", 42 | "report_index": 0, 43 | "round": 1, 44 | "success": false 45 | } 46 | ], 47 | "prep": [ 48 | { 49 | "input_shares": [ 50 | "000102030405060708090a0b0c0d0e0f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f7f7dc6345cc6ddcbef04145502a6731d843077536603fd1d809d8364bc6fdddfb38b7bf53bb5f4d9d2ea3d8a452f866f63ef5cecb829d29a7b0a06e0c9536cf5d40dc59674ee759be1a70f2a4bec854b", 51 | "101112131415161718191a1b1c1d1e1f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5fc46de59d5a8c91b0187d22016323cdb06752ae15a1277011caf754974e5f9c7b51bbb6f537f446f4b99414f0cfd31e34568278b83ac975508853807b1b9d27cef5e2ee3d4d8436ad7f3c82bd56101979" 52 | ], 53 | "measurement": null, 54 | "nonce": "000102030405060708090a0b0c0d0e0f", 55 | "out_shares": [], 56 | "prep_messages": [ 57 | "ccbdb1ceb7889a61ad01079573109f5e2039c58a0b824be3" 58 | ], 59 | "prep_shares": [ 60 | [ 61 | "8bc6b288a2b65d77339a9eac14ae20c0fbe1171bc494b8f6", 62 | "42f7fe4514d23cea7b6768e85d627e9e2657ad6f46ed92ec" 63 | ], 64 | [ 65 | "b0f2002275ecc7e4", 66 | "1dcbb0ac419cd27c" 67 | ] 68 | ], 69 | "public_share": "01e4a750964ed1ae57542b1ebe0b0a08fefa73ea0dcb3423d084fd14e0bc3c1cffedfcc77e47eba098eabc56eb019dd6d5dbb10b924c32b54262a597d36a3df95d3b2f5031d7cabee88418350ceb0c0f7a2b19cbebe6a50bdb83fd5f9c94f469b23eb38c3469f3c72ebd725e92a7011670", 70 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 71 | } 72 | ], 73 | "shares": 2, 74 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 75 | } 76 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": 1, 4 | "agg_shares": [ 5 | "e369056891a9fd95", 6 | "1f96fa976d56026a" 7 | ], 8 | "ctx": "736f6d65206170706c69636174696f6e", 9 | "operations": [ 10 | { 11 | "operation": "shard", 12 | "report_index": 0, 13 | "success": true 14 | }, 15 | { 16 | "aggregator_id": 0, 17 | "operation": "prep_init", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "aggregator_id": 1, 23 | "operation": "prep_init", 24 | "report_index": 0, 25 | "success": true 26 | }, 27 | { 28 | "operation": "prep_shares_to_prep", 29 | "report_index": 0, 30 | "round": 0, 31 | "success": true 32 | }, 33 | { 34 | "aggregator_id": 0, 35 | "operation": "prep_next", 36 | "report_index": 0, 37 | "round": 1, 38 | "success": true 39 | }, 40 | { 41 | "aggregator_id": 1, 42 | "operation": "prep_next", 43 | "report_index": 0, 44 | "round": 1, 45 | "success": true 46 | }, 47 | { 48 | "aggregator_id": 0, 49 | "operation": "aggregate", 50 | "success": true 51 | }, 52 | { 53 | "aggregator_id": 1, 54 | "operation": "aggregate", 55 | "success": true 56 | }, 57 | { 58 | "operation": "unshard", 59 | "success": true 60 | } 61 | ], 62 | "prep": [ 63 | { 64 | "input_shares": [ 65 | "e369056891a9fd95d44e6fadb3b75e6774b666d312bcc59b57694d189321ffe06f46b37d26db61d056b17152e3726a2e", 66 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 67 | ], 68 | "measurement": 1, 69 | "nonce": "000102030405060708090a0b0c0d0e0f", 70 | "out_shares": [ 71 | [ 72 | "e369056891a9fd95" 73 | ], 74 | [ 75 | "1f96fa976d56026a" 76 | ] 77 | ], 78 | "prep_messages": [ 79 | "" 80 | ], 81 | "prep_shares": [ 82 | [ 83 | "5c6a0685bd0f0aa9b19b8c1c4431ec49eca02338e5e05da8fc91575311627200", 84 | "a595f97a41f0f5565abc07086ef01d5b3cb2629a3c534bd4dbd8f5187ad75aad" 85 | ] 86 | ], 87 | "public_share": "", 88 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 89 | } 90 | ], 91 | "shares": 2, 92 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 93 | } 94 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": 1, 4 | "agg_shares": [ 5 | "afccf0c22c8901be", 6 | "1f96fa976d56026a", 7 | "359d14a56320fcd7" 8 | ], 9 | "ctx": "736f6d65206170706c69636174696f6e", 10 | "operations": [ 11 | { 12 | "operation": "shard", 13 | "report_index": 0, 14 | "success": true 15 | }, 16 | { 17 | "aggregator_id": 0, 18 | "operation": "prep_init", 19 | "report_index": 0, 20 | "success": true 21 | }, 22 | { 23 | "aggregator_id": 1, 24 | "operation": "prep_init", 25 | "report_index": 0, 26 | "success": true 27 | }, 28 | { 29 | "aggregator_id": 2, 30 | "operation": "prep_init", 31 | "report_index": 0, 32 | "success": true 33 | }, 34 | { 35 | "operation": "prep_shares_to_prep", 36 | "report_index": 0, 37 | "round": 0, 38 | "success": true 39 | }, 40 | { 41 | "aggregator_id": 0, 42 | "operation": "prep_next", 43 | "report_index": 0, 44 | "round": 1, 45 | "success": true 46 | }, 47 | { 48 | "aggregator_id": 1, 49 | "operation": "prep_next", 50 | "report_index": 0, 51 | "round": 1, 52 | "success": true 53 | }, 54 | { 55 | "aggregator_id": 2, 56 | "operation": "prep_next", 57 | "report_index": 0, 58 | "round": 1, 59 | "success": true 60 | }, 61 | { 62 | "aggregator_id": 0, 63 | "operation": "aggregate", 64 | "success": true 65 | }, 66 | { 67 | "aggregator_id": 1, 68 | "operation": "aggregate", 69 | "success": true 70 | }, 71 | { 72 | "aggregator_id": 2, 73 | "operation": "aggregate", 74 | "success": true 75 | }, 76 | { 77 | "operation": "unshard", 78 | "success": true 79 | } 80 | ], 81 | "prep": [ 82 | { 83 | "input_shares": [ 84 | "afccf0c22c8901be040bc9d44987fd40085bf595185a34267ec8e975e8fdfa331abaaecc9d7104785e186102448192ad", 85 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f", 86 | "202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 87 | ], 88 | "measurement": 1, 89 | "nonce": "000102030405060708090a0b0c0d0e0f", 90 | "out_shares": [ 91 | [ 92 | "afccf0c22c8901be" 93 | ], 94 | [ 95 | "1f96fa976d56026a" 96 | ], 97 | [ 98 | "359d14a56320fcd7" 99 | ] 100 | ], 101 | "prep_messages": [ 102 | "" 103 | ], 104 | "prep_shares": [ 105 | [ 106 | "145aabe8608487ab66ff1e11a196662b0d268a8dbd2b01481daf7d190bf1b0b6", 107 | "a595f97a41f0f5565abc07086ef01d5b3cb2629a3c534bd4dbd8f5187ad75aad", 108 | "49105b9c5b8b82fd30916e72506a0c5679b07c05a3255a85103f30cfd5ab310e" 109 | ] 110 | ], 111 | "public_share": "", 112 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f" 113 | } 114 | ], 115 | "shares": 3, 116 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 117 | } 118 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_bad_gadget_poly.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "ctx": "736f6d65206170706c69636174696f6e", 6 | "operations": [ 7 | { 8 | "aggregator_id": 0, 9 | "operation": "prep_init", 10 | "report_index": 0, 11 | "success": true 12 | }, 13 | { 14 | "aggregator_id": 1, 15 | "operation": "prep_init", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "operation": "prep_shares_to_prep", 21 | "report_index": 0, 22 | "round": 0, 23 | "success": false 24 | } 25 | ], 26 | "prep": [ 27 | { 28 | "input_shares": [ 29 | "e369056891a9fd95d44e6fadb3b75e6774b666d312bcc59b57694d189321ffe06f46b37d26db61d057b17152e3726a2e", 30 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 31 | ], 32 | "measurement": null, 33 | "nonce": "000102030405060708090a0b0c0d0e0f", 34 | "out_shares": [], 35 | "prep_messages": [], 36 | "prep_shares": [ 37 | [ 38 | "5d6a0685bd0f0aa9b19b8c1c4431ec49eca02338e5e05da8cfd95bffb57f21f0", 39 | "a595f97a41f0f5565abc07086ef01d5b3cb2629a3c534bd4dbd8f5187ad75aad" 40 | ] 41 | ], 42 | "public_share": "", 43 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 44 | } 45 | ], 46 | "shares": 2, 47 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 48 | } 49 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_bad_helper_seed.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "ctx": "736f6d65206170706c69636174696f6e", 6 | "operations": [ 7 | { 8 | "aggregator_id": 0, 9 | "operation": "prep_init", 10 | "report_index": 0, 11 | "success": true 12 | }, 13 | { 14 | "aggregator_id": 1, 15 | "operation": "prep_init", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "operation": "prep_shares_to_prep", 21 | "report_index": 0, 22 | "round": 0, 23 | "success": false 24 | } 25 | ], 26 | "prep": [ 27 | { 28 | "input_shares": [ 29 | "e369056891a9fd95d44e6fadb3b75e6774b666d312bcc59b57694d189321ffe06f46b37d26db61d056b17152e3726a2e", 30 | "010102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 31 | ], 32 | "measurement": null, 33 | "nonce": "000102030405060708090a0b0c0d0e0f", 34 | "out_shares": [], 35 | "prep_messages": [], 36 | "prep_shares": [ 37 | [ 38 | "5c6a0685bd0f0aa9b19b8c1c4431ec49eca02338e5e05da8fc91575311627200", 39 | "1e3f9484477e86edb9b7c743b75f9b7c81f5ab0d9c1812eac73918b7633ab8ef" 40 | ] 41 | ], 42 | "public_share": "", 43 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 44 | } 45 | ], 46 | "shares": 2, 47 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 48 | } 49 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_bad_meas_share.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "ctx": "736f6d65206170706c69636174696f6e", 6 | "operations": [ 7 | { 8 | "aggregator_id": 0, 9 | "operation": "prep_init", 10 | "report_index": 0, 11 | "success": true 12 | }, 13 | { 14 | "aggregator_id": 1, 15 | "operation": "prep_init", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "operation": "prep_shares_to_prep", 21 | "report_index": 0, 22 | "round": 0, 23 | "success": false 24 | } 25 | ], 26 | "prep": [ 27 | { 28 | "input_shares": [ 29 | "e469056891a9fd95d44e6fadb3b75e6774b666d312bcc59b57694d189321ffe06f46b37d26db61d056b17152e3726a2e", 30 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 31 | ], 32 | "measurement": null, 33 | "nonce": "000102030405060708090a0b0c0d0e0f", 34 | "out_shares": [], 35 | "prep_messages": [], 36 | "prep_shares": [ 37 | [ 38 | "5b6a0685bd0f0aa9d77d58a46740003f1283efbf08f0719dfc91575311627200", 39 | "a595f97a41f0f5565abc07086ef01d5b3cb2629a3c534bd4dbd8f5187ad75aad" 40 | ] 41 | ], 42 | "public_share": "", 43 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 44 | } 45 | ], 46 | "shares": 2, 47 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 48 | } 49 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Count_bad_wire_seed.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "ctx": "736f6d65206170706c69636174696f6e", 6 | "operations": [ 7 | { 8 | "aggregator_id": 0, 9 | "operation": "prep_init", 10 | "report_index": 0, 11 | "success": true 12 | }, 13 | { 14 | "aggregator_id": 1, 15 | "operation": "prep_init", 16 | "report_index": 0, 17 | "success": true 18 | }, 19 | { 20 | "operation": "prep_shares_to_prep", 21 | "report_index": 0, 22 | "round": 0, 23 | "success": false 24 | } 25 | ], 26 | "prep": [ 27 | { 28 | "input_shares": [ 29 | "e369056891a9fd95d54e6fadb3b75e6774b666d312bcc59b57694d189321ffe06f46b37d26db61d056b17152e3726a2e", 30 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 31 | ], 32 | "measurement": null, 33 | "nonce": "000102030405060708090a0b0c0d0e0f", 34 | "out_shares": [], 35 | "prep_messages": [], 36 | "prep_shares": [ 37 | [ 38 | "5c6a0685bd0f0aa98cb9c0942022d854eca02338e5e05da8fc91575311627200", 39 | "a595f97a41f0f5565abc07086ef01d5b3cb2629a3c534bd4dbd8f5187ad75aad" 40 | ] 41 | ], 42 | "public_share": "", 43 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 44 | } 45 | ], 46 | "shares": 2, 47 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 48 | } 49 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 1, 7 | 0 8 | ], 9 | "agg_shares": [ 10 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0d8c76f65caf04ee0e22373be0d61ac5fa6e2f2866078b31b90537e24416fad1", 11 | "1adf0d29da11c354159e2a7c373bfab17f17b78fda89bcb7e1fd9b86d4c2eff0f5738909a350fb11d5ddc8c41f29e53a0791d0d799f874ce2afac81dbbe9052e" 12 | ], 13 | "chunk_length": 2, 14 | "ctx": "736f6d65206170706c69636174696f6e", 15 | "length": 4, 16 | "operations": [ 17 | { 18 | "operation": "shard", 19 | "report_index": 0, 20 | "success": true 21 | }, 22 | { 23 | "aggregator_id": 0, 24 | "operation": "prep_init", 25 | "report_index": 0, 26 | "success": true 27 | }, 28 | { 29 | "aggregator_id": 1, 30 | "operation": "prep_init", 31 | "report_index": 0, 32 | "success": true 33 | }, 34 | { 35 | "operation": "prep_shares_to_prep", 36 | "report_index": 0, 37 | "round": 0, 38 | "success": true 39 | }, 40 | { 41 | "aggregator_id": 0, 42 | "operation": "prep_next", 43 | "report_index": 0, 44 | "round": 1, 45 | "success": true 46 | }, 47 | { 48 | "aggregator_id": 1, 49 | "operation": "prep_next", 50 | "report_index": 0, 51 | "round": 1, 52 | "success": true 53 | }, 54 | { 55 | "aggregator_id": 0, 56 | "operation": "aggregate", 57 | "success": true 58 | }, 59 | { 60 | "aggregator_id": 1, 61 | "operation": "aggregate", 62 | "success": true 63 | }, 64 | { 65 | "operation": "unshard", 66 | "success": true 67 | } 68 | ], 69 | "prep": [ 70 | { 71 | "input_shares": [ 72 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0d8c76f65caf04ee0e22373be0d61ac5fa6e2f2866078b31b90537e24416fad1d878d8b7b93954e80a0a008ae08698e74409c646c5089bf508d7b32589a4442c84c94b77dd83e10d4cbdcb0a8e9084ba58812ef6c40e078587f3c82140facd7e6a4d6942338c87ab5336de984fca87d6315bd0cd55be021a5e8c95cb2f58d403209e06a16b0e535a34828e56812b7a3f995ffd32815553c38fc0ce8a7963e9758fc2a45b02b69ec05eb19de856357e3a3fd3063b2501f1c7e5791e3350f60a5e870c1d8d239707d61f61f6f3f8413185404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 73 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 74 | ], 75 | "measurement": 2, 76 | "nonce": "000102030405060708090a0b0c0d0e0f", 77 | "out_shares": [ 78 | [ 79 | "e720f2d625ee3cabce61d583c8c4054e", 80 | "82e8487025764348020264792b3d100f", 81 | "0d8c76f65caf04ee0e22373be0d61ac5", 82 | "fa6e2f2866078b31b90537e24416fad1" 83 | ], 84 | [ 85 | "1adf0d29da11c354159e2a7c373bfab1", 86 | "7f17b78fda89bcb7e1fd9b86d4c2eff0", 87 | "f5738909a350fb11d5ddc8c41f29e53a", 88 | "0791d0d799f874ce2afac81dbbe9052e" 89 | ] 90 | ], 91 | "prep_messages": [ 92 | "915cca74703d352a8e12e7441aa3896efbd633bf8eacd9a101ab922f6481404b" 93 | ], 94 | "prep_shares": [ 95 | [ 96 | "d94d1f36d8d2f9867208928069335f0957497c6df0fe6568d4304b27f51456acf8093e765dfe404541c27935c03bf8677569234022700212e1112ad5b4a474126b01b5b1be78d897a9e616bcb21c5644fad4b87f09bc65f0c46ec41e29b19ffd064ddc2301a2ff2176338dd52a09fdadd442cddcbe5d10dafe0d92551d81eac7", 97 | "28b2e0c9272d067971f76d7f96cca0f6bef7d43878716b3fcee1d230515956acdd7ced66a61a897483b4b233e4fe32b48babf43d070fc14b1c7fc4e1a386fd223f58eec1544973c9f39e871c48543e979da23642e48ae655de8c7e6b008ab43bc27dda399f3c8341c7476370573e51b6ebe601061807bb886fc55c2161ce436e" 98 | ] 99 | ], 100 | "public_share": "064ddc2301a2ff2176338dd52a09fdadd442cddcbe5d10dafe0d92551d81eac7c27dda399f3c8341c7476370573e51b6ebe601061807bb886fc55c2161ce436e", 101 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 102 | } 103 | ], 104 | "shares": 2, 105 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 106 | } 107 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": [ 4 | 0, 5 | 0, 6 | 1, 7 | 0, 8 | 0, 9 | 0, 10 | 0, 11 | 0, 12 | 0, 13 | 0, 14 | 0 15 | ], 16 | "agg_shares": [ 17 | "7c81185c4933c0c53be9489a06fe6f271c677e2d2f3469491cd4a5df164da7898ab986810546eb5f44d266bab7912eec02afbee3faa2fc691c60e08cb5b4fc441550907d4b1c16f374bda4acec4f4285934b3867956e39a671054936d1d35a7f4e834925c9aab4102a4d4869bdd3adf4c87230df479eadc9d8fd5113f230d1535754c64ae50e7acf66d0d49d6668f99d862a872d07d7c5e5bba254be6eaa867af95b6a6fe60b4cc1011e0fc2acd1c239", 18 | "1adf0d29da11c354159e2a7c373bfab17f17b78fda89bcb7e1fd9b86d4c2eff0f5738909a350fb11d5ddc8c41f29e53a0791d0d799f874ce2afac81dbbe9052ed75bb48b448e3dab58ea08704f275cf79253d1b055657dfd4862c648d1385a5b7b61459c9aeb8d5c9565370551c67bdea48ce598f7e3c93be4d48ecf19ee20a146a80033c3bbc8feb23771e908b528c13b5d8363939cd7df29d312b4ff9e453e7e3bce6b34722b6d70a389c1ab4724ef", 19 | "6b9fd97adcba7ce592788ce9c1c695266781ca42f641dafec92dbe9914f0688584d2ef745769198eae4fd0802845ecd8f8bf70446b648ec79ca556558f61fd8c1654bbf66f55ac61fa5752e3c3886183dc60f6e7142c495c2998f0805df34a25391b713e9c69bd92084d8091f165d62c9500ea87c07d88fa262d1f1df4e00d0b650339825735bd31aef7b97890e2dda04078f56e658c623afe89988d91b633478b68c724e58188d1553e677ca7e618d7" 20 | ], 21 | "chunk_length": 3, 22 | "ctx": "736f6d65206170706c69636174696f6e", 23 | "length": 11, 24 | "operations": [ 25 | { 26 | "operation": "shard", 27 | "report_index": 0, 28 | "success": true 29 | }, 30 | { 31 | "aggregator_id": 0, 32 | "operation": "prep_init", 33 | "report_index": 0, 34 | "success": true 35 | }, 36 | { 37 | "aggregator_id": 1, 38 | "operation": "prep_init", 39 | "report_index": 0, 40 | "success": true 41 | }, 42 | { 43 | "aggregator_id": 2, 44 | "operation": "prep_init", 45 | "report_index": 0, 46 | "success": true 47 | }, 48 | { 49 | "operation": "prep_shares_to_prep", 50 | "report_index": 0, 51 | "round": 0, 52 | "success": true 53 | }, 54 | { 55 | "aggregator_id": 0, 56 | "operation": "prep_next", 57 | "report_index": 0, 58 | "round": 1, 59 | "success": true 60 | }, 61 | { 62 | "aggregator_id": 1, 63 | "operation": "prep_next", 64 | "report_index": 0, 65 | "round": 1, 66 | "success": true 67 | }, 68 | { 69 | "aggregator_id": 2, 70 | "operation": "prep_next", 71 | "report_index": 0, 72 | "round": 1, 73 | "success": true 74 | }, 75 | { 76 | "aggregator_id": 0, 77 | "operation": "aggregate", 78 | "success": true 79 | }, 80 | { 81 | "aggregator_id": 1, 82 | "operation": "aggregate", 83 | "success": true 84 | }, 85 | { 86 | "aggregator_id": 2, 87 | "operation": "aggregate", 88 | "success": true 89 | }, 90 | { 91 | "operation": "unshard", 92 | "success": true 93 | } 94 | ], 95 | "prep": [ 96 | { 97 | "input_shares": [ 98 | "7c81185c4933c0c53be9489a06fe6f271c677e2d2f3469491cd4a5df164da7898ab986810546eb5f44d266bab7912eec02afbee3faa2fc691c60e08cb5b4fc441550907d4b1c16f374bda4acec4f4285934b3867956e39a671054936d1d35a7f4e834925c9aab4102a4d4869bdd3adf4c87230df479eadc9d8fd5113f230d1535754c64ae50e7acf66d0d49d6668f99d862a872d07d7c5e5bba254be6eaa867af95b6a6fe60b4cc1011e0fc2acd1c239bbc08eaa2f65a321aab41077ee2ee67cf916f12d74bd3e0db0b7f053c6000e42e127a7a9d766608fcfefe7741ad8098f7e4180cc5a7af54b4e1a2f6385bc3205c60cd615037620637b96a87b6277db02df81cfa9389ae42515dc02f26bd54d4f6d22ce1c37ba89287248bdde8c71cba5e08cf30d48fab7a600fb27d441ec60788419a3c2fe4f162404a45cbc6a759d71bb3537e109dd50d2810cd3bee8e3ed828646009b0d11310b6dbf5d768d4da864950b09ec34d39e9bde8d9a4496031a33a6452188885035ee869560aee14cd02c484588f5399d364dacfc2e63dbfa2844418722107a38f8af774f4b334ddafd699d3ff3aeb1653f19c0cba384337b114a290ea69463793a5f9986c22bea8ef77b51358adb52e8a61c9f8046504a6156ff82fe86b3868cb262b8db8fdfc07444132c7c42f3dce18d99f6b221319e54a908e7d458925900d23c120489a930b802aa808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f", 99 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f", 100 | "404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 101 | ], 102 | "measurement": 2, 103 | "nonce": "000102030405060708090a0b0c0d0e0f", 104 | "out_shares": [ 105 | [ 106 | "7c81185c4933c0c53be9489a06fe6f27", 107 | "1c677e2d2f3469491cd4a5df164da789", 108 | "8ab986810546eb5f44d266bab7912eec", 109 | "02afbee3faa2fc691c60e08cb5b4fc44", 110 | "1550907d4b1c16f374bda4acec4f4285", 111 | "934b3867956e39a671054936d1d35a7f", 112 | "4e834925c9aab4102a4d4869bdd3adf4", 113 | "c87230df479eadc9d8fd5113f230d153", 114 | "5754c64ae50e7acf66d0d49d6668f99d", 115 | "862a872d07d7c5e5bba254be6eaa867a", 116 | "f95b6a6fe60b4cc1011e0fc2acd1c239" 117 | ], 118 | [ 119 | "1adf0d29da11c354159e2a7c373bfab1", 120 | "7f17b78fda89bcb7e1fd9b86d4c2eff0", 121 | "f5738909a350fb11d5ddc8c41f29e53a", 122 | "0791d0d799f874ce2afac81dbbe9052e", 123 | "d75bb48b448e3dab58ea08704f275cf7", 124 | "9253d1b055657dfd4862c648d1385a5b", 125 | "7b61459c9aeb8d5c9565370551c67bde", 126 | "a48ce598f7e3c93be4d48ecf19ee20a1", 127 | "46a80033c3bbc8feb23771e908b528c1", 128 | "3b5d8363939cd7df29d312b4ff9e453e", 129 | "7e3bce6b34722b6d70a389c1ab4724ef" 130 | ], 131 | [ 132 | "6b9fd97adcba7ce592788ce9c1c69526", 133 | "6781ca42f641dafec92dbe9914f06885", 134 | "84d2ef745769198eae4fd0802845ecd8", 135 | "f8bf70446b648ec79ca556558f61fd8c", 136 | "1654bbf66f55ac61fa5752e3c3886183", 137 | "dc60f6e7142c495c2998f0805df34a25", 138 | "391b713e9c69bd92084d8091f165d62c", 139 | "9500ea87c07d88fa262d1f1df4e00d0b", 140 | "650339825735bd31aef7b97890e2dda0", 141 | "4078f56e658c623afe89988d91b63347", 142 | "8b68c724e58188d1553e677ca7e618d7" 143 | ] 144 | ], 145 | "prep_messages": [ 146 | "8794e3c89b479fd38914037c778563366e9c90f61a47f52e02e51003048af9e1" 147 | ], 148 | "prep_shares": [ 149 | [ 150 | "9c992f824ce7e69496d40dfa1da358109a3472a03ed58dc33d5426b0115bc7834b72373b9e78170ed075be80bd7842796b25594ce5825c4166bffa9165a98e40a56d6d17e0ca74af36cb5dbdb3a2f3c7d246b8f5bd78acc9f6c26ffde64d4817cdd9b28b742924c79516ca016098492df7580a49dd091f4f6e81fcf9d9fc148365b3e30205531b610d546de28a6b7c7cc058087324206afad931cea724e2285c", 151 | "5db683a2145d865bdcf7b710ffb7e5ebb0bd9384c53299aae2cfe91888654d713fb6f1f7c85c5cfeaad11f1231a6ad8af800be32effc0abc03e33cd8160323e080e98fdc84db0024002bf3aec21c8a9ba3d69a20bbf18d9ff7234d6b03e993f5ef0c2dad87518e4f16634da8f1d947a518540775525c97a3d8d4323fa0f488b782c591e1ce996072e1f096b5991133f1d6ab883b98ec6a028b1de85eefed6989", 152 | "08b04cdb9ebb920f71333af5e2a4c1037e6f5e776923d04de57b5c3ae70c15ad22f5380db9898c43134eb63198f64d67b47df6087bf19d0f85c2f729bb3235bc92bf9dc3a0f5ded0191ae397b5a2553a7ce4a518a7928893a51dc7dbfbdcb3b0d59aaa9ee4849b9d94faab5b691dc86fca19867ae82f97927ff5eece600cd002ed585479d34bf381eb8c5a1655338d24d04951ef27510d46ac2d45d22b5fc6c2" 153 | ] 154 | ], 155 | "public_share": "65b3e30205531b610d546de28a6b7c7cc058087324206afad931cea724e2285c82c591e1ce996072e1f096b5991133f1d6ab883b98ec6a028b1de85eefed6989ed585479d34bf381eb8c5a1655338d24d04951ef27510d46ac2d45d22b5fc6c2", 156 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebf" 157 | } 158 | ], 159 | "shares": 3, 160 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 161 | } 162 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_bad_helper_jr_blind.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "chunk_length": 2, 6 | "ctx": "736f6d65206170706c69636174696f6e", 7 | "length": 5, 8 | "operations": [ 9 | { 10 | "aggregator_id": 0, 11 | "operation": "prep_init", 12 | "report_index": 0, 13 | "success": true 14 | }, 15 | { 16 | "aggregator_id": 1, 17 | "operation": "prep_init", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "operation": "prep_shares_to_prep", 23 | "report_index": 0, 24 | "round": 0, 25 | "success": false 26 | } 27 | ], 28 | "prep": [ 29 | { 30 | "input_shares": [ 31 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0c8c76f65caf04ee0e22373be0d61ac5fb6e2f2866078b31b90537e24416fad12aa44b74bb71c2548b15f78fb0d8a308d878d8b7b93954e80a0a008ae08698e74409c646c5089bf508d7b32589a4442c84c94b77dd83e10d4cbdcb0a8e9084ba58812ef6c40e078587f3c82140facd7edd1d134c8fe77e054cbbe3b201fe287e73f802d264126d1cd1e62d26dc8210f3f4c294a90eeccd07a4649b3d86dd24f4995ffd32815553c38fc0ce8a7963e9751cf2fa51a65aa766662c98cea401dd92fe35d43616ad86c5561f86d8a3cbce6eb4e78e8480b98c28947ee90cf48f86d0404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 32 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f212122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 33 | ], 34 | "measurement": null, 35 | "nonce": "000102030405060708090a0b0c0d0e0f", 36 | "out_shares": [], 37 | "prep_messages": [], 38 | "prep_shares": [ 39 | [ 40 | "3fd4a2beffc4c823c1acf8f9a39773bdaec27f14225d18fe7de98864b3e88d9d55712f54e38d387cc86b316d537b5915ad15d319f7c9a462ca89babe10efc0ca8ee86d85f34038cd4ea89ba1678651b5c1ee2d3352368b9f5970d970d0516dc718d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd", 41 | "c22b5d41003b37dc225307065c688c42d9ac64500286b6c07d959e5b538d02a6f72dbfccac57b6520b59c9c41e290353f9fb383862ac7f3ed5f7172720af40241d58c2c93fd0f48cda4ea3561ef0f7809da23642e48ae655de8c7e6b008ab43b1207614dc8f0ea32fced4985e6635b5cbf4415fc1a52a72198a041faaec04e8e" 42 | ] 43 | ], 44 | "public_share": "18d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337", 45 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 46 | } 47 | ], 48 | "shares": 2, 49 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 50 | } 51 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_bad_leader_jr_blind.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "chunk_length": 2, 6 | "ctx": "736f6d65206170706c69636174696f6e", 7 | "length": 5, 8 | "operations": [ 9 | { 10 | "aggregator_id": 0, 11 | "operation": "prep_init", 12 | "report_index": 0, 13 | "success": true 14 | }, 15 | { 16 | "aggregator_id": 1, 17 | "operation": "prep_init", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "operation": "prep_shares_to_prep", 23 | "report_index": 0, 24 | "round": 0, 25 | "success": false 26 | } 27 | ], 28 | "prep": [ 29 | { 30 | "input_shares": [ 31 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0c8c76f65caf04ee0e22373be0d61ac5fb6e2f2866078b31b90537e24416fad12aa44b74bb71c2548b15f78fb0d8a308d878d8b7b93954e80a0a008ae08698e74409c646c5089bf508d7b32589a4442c84c94b77dd83e10d4cbdcb0a8e9084ba58812ef6c40e078587f3c82140facd7edd1d134c8fe77e054cbbe3b201fe287e73f802d264126d1cd1e62d26dc8210f3f4c294a90eeccd07a4649b3d86dd24f4995ffd32815553c38fc0ce8a7963e9751cf2fa51a65aa766662c98cea401dd92fe35d43616ad86c5561f86d8a3cbce6eb4e78e8480b98c28947ee90cf48f86d0414142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 32 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 33 | ], 34 | "measurement": null, 35 | "nonce": "000102030405060708090a0b0c0d0e0f", 36 | "out_shares": [], 37 | "prep_messages": [], 38 | "prep_shares": [ 39 | [ 40 | "3fd4a2beffc4c823c1acf8f9a39773bdb6fe15e8ca56227629d36ae852b46bbd55712f54e38d387cc86b316d537b591585d2c9264ae9a8f8e9fc597ad27260548ee86d85f34038cd4ea89ba1678651b5c1ee2d3352368b9f5970d970d0516dc71ecf2e9a6ebf3c59383c3a00db5547122c589eb77981ca940c715ba7dcd1b302", 41 | "c22b5d41003b37dc225307065c688c428f7aa7727ac876d478bb84fe406cb567f72dbfccac57b6520b59c9c41e2903530e961aa15a8181f92cfd667cf115ae831d58c2c93fd0f48cda4ea3561ef0f7809da23642e48ae655de8c7e6b008ab43b01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337" 42 | ] 43 | ], 44 | "public_share": "18d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337", 45 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 46 | } 47 | ], 48 | "shares": 2, 49 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 50 | } 51 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_bad_prep_msg.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "chunk_length": 2, 6 | "ctx": "736f6d65206170706c69636174696f6e", 7 | "length": 5, 8 | "operations": [ 9 | { 10 | "aggregator_id": 0, 11 | "operation": "prep_init", 12 | "report_index": 0, 13 | "success": true 14 | }, 15 | { 16 | "aggregator_id": 0, 17 | "operation": "prep_next", 18 | "report_index": 0, 19 | "round": 1, 20 | "success": false 21 | } 22 | ], 23 | "prep": [ 24 | { 25 | "input_shares": [ 26 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0c8c76f65caf04ee0e22373be0d61ac5fb6e2f2866078b31b90537e24416fad12aa44b74bb71c2548b15f78fb0d8a308d878d8b7b93954e80a0a008ae08698e74409c646c5089bf508d7b32589a4442c84c94b77dd83e10d4cbdcb0a8e9084ba58812ef6c40e078587f3c82140facd7edd1d134c8fe77e054cbbe3b201fe287e73f802d264126d1cd1e62d26dc8210f3f4c294a90eeccd07a4649b3d86dd24f4995ffd32815553c38fc0ce8a7963e9751cf2fa51a65aa766662c98cea401dd92fe35d43616ad86c5561f86d8a3cbce6eb4e78e8480b98c28947ee90cf48f86d0404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 27 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 28 | ], 29 | "measurement": null, 30 | "nonce": "000102030405060708090a0b0c0d0e0f", 31 | "out_shares": [], 32 | "prep_messages": [ 33 | "0000000000000000000000000000000000000000000000000000000000000000" 34 | ], 35 | "prep_shares": [ 36 | [ 37 | "3fd4a2beffc4c823c1acf8f9a39773bdaec27f14225d18fe7de98864b3e88d9d55712f54e38d387cc86b316d537b5915ad15d319f7c9a462ca89babe10efc0ca8ee86d85f34038cd4ea89ba1678651b5c1ee2d3352368b9f5970d970d0516dc718d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd" 38 | ] 39 | ], 40 | "public_share": "18d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337", 41 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 42 | } 43 | ], 44 | "shares": 2, 45 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 46 | } 47 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Histogram_bad_public_share.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": null, 4 | "agg_shares": [], 5 | "chunk_length": 2, 6 | "ctx": "736f6d65206170706c69636174696f6e", 7 | "length": 5, 8 | "operations": [ 9 | { 10 | "aggregator_id": 0, 11 | "operation": "prep_init", 12 | "report_index": 0, 13 | "success": true 14 | }, 15 | { 16 | "aggregator_id": 1, 17 | "operation": "prep_init", 18 | "report_index": 0, 19 | "success": true 20 | }, 21 | { 22 | "operation": "prep_shares_to_prep", 23 | "report_index": 0, 24 | "round": 0, 25 | "success": false 26 | } 27 | ], 28 | "prep": [ 29 | { 30 | "input_shares": [ 31 | "e720f2d625ee3cabce61d583c8c4054e82e8487025764348020264792b3d100f0c8c76f65caf04ee0e22373be0d61ac5fb6e2f2866078b31b90537e24416fad12aa44b74bb71c2548b15f78fb0d8a308d878d8b7b93954e80a0a008ae08698e74409c646c5089bf508d7b32589a4442c84c94b77dd83e10d4cbdcb0a8e9084ba58812ef6c40e078587f3c82140facd7edd1d134c8fe77e054cbbe3b201fe287e73f802d264126d1cd1e62d26dc8210f3f4c294a90eeccd07a4649b3d86dd24f4995ffd32815553c38fc0ce8a7963e9751cf2fa51a65aa766662c98cea401dd92fe35d43616ad86c5561f86d8a3cbce6eb4e78e8480b98c28947ee90cf48f86d0404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 32 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 33 | ], 34 | "measurement": null, 35 | "nonce": "000102030405060708090a0b0c0d0e0f", 36 | "out_shares": [], 37 | "prep_messages": [], 38 | "prep_shares": [ 39 | [ 40 | "3fd4a2beffc4c823c1acf8f9a39773bdaec27f14225d18fe7de98864b3e88d9d55712f54e38d387cc86b316d537b5915ad15d319f7c9a462ca89babe10efc0ca8ee86d85f34038cd4ea89ba1678651b5c1ee2d3352368b9f5970d970d0516dc718d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd", 41 | "c22b5d41003b37dc225307065c688c4254584ecbc8bb6b3be66dcf3c58ab4c11f72dbfccac57b6520b59c9c41e290353cebfb8098b1b94489e50eeae2ead5f6d1d58c2c93fd0f48cda4ea3561ef0f7809da23642e48ae655de8c7e6b008ab43b01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337" 42 | ] 43 | ], 44 | "public_share": "19d1fa6a203f562af224e6bb7cf1b465db06678b71c764bc793755fd5d1f4bcd01a98d76e07e02ee3f6c9055b3cbb73cdfd67c4dabf30d9323b93e327122e337", 45 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 46 | } 47 | ], 48 | "shares": 2, 49 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 50 | } 51 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3MultihotCountVec_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": [ 4 | 0, 5 | 1, 6 | 1, 7 | 0 8 | ], 9 | "agg_shares": [ 10 | "e2bb7419a8ac96a1c6def1bed78dcb074557af87236442363aa49be4fd351b11b64bede210be55b253caa20a0f75703e295aa329cd650b825d3d4492d46d2fb3", 11 | "1f448be65753695e1d210e41287234f8bda85078dc9bbdc9a95b641b02cae4ee4cb4121def41aa4d90355df5f08a8fc1d8a55cd6329af47d86c2bb6d2b92d04c" 12 | ], 13 | "chunk_length": 2, 14 | "ctx": "736f6d65206170706c69636174696f6e", 15 | "length": 4, 16 | "max_weight": 2, 17 | "operations": [ 18 | { 19 | "operation": "shard", 20 | "report_index": 0, 21 | "success": true 22 | }, 23 | { 24 | "aggregator_id": 0, 25 | "operation": "prep_init", 26 | "report_index": 0, 27 | "success": true 28 | }, 29 | { 30 | "aggregator_id": 1, 31 | "operation": "prep_init", 32 | "report_index": 0, 33 | "success": true 34 | }, 35 | { 36 | "operation": "prep_shares_to_prep", 37 | "report_index": 0, 38 | "round": 0, 39 | "success": true 40 | }, 41 | { 42 | "aggregator_id": 0, 43 | "operation": "prep_next", 44 | "report_index": 0, 45 | "round": 1, 46 | "success": true 47 | }, 48 | { 49 | "aggregator_id": 1, 50 | "operation": "prep_next", 51 | "report_index": 0, 52 | "round": 1, 53 | "success": true 54 | }, 55 | { 56 | "aggregator_id": 0, 57 | "operation": "aggregate", 58 | "success": true 59 | }, 60 | { 61 | "aggregator_id": 1, 62 | "operation": "aggregate", 63 | "success": true 64 | }, 65 | { 66 | "operation": "unshard", 67 | "success": true 68 | } 69 | ], 70 | "prep": [ 71 | { 72 | "input_shares": [ 73 | "e2bb7419a8ac96a1c6def1bed78dcb074557af87236442363aa49be4fd351b11b64bede210be55b253caa20a0f75703e295aa329cd650b825d3d4492d46d2fb362d5c7a84cb3dc77420ae9fb319a83b91a5ed94b3fee03d209e78f1b2561041a66cf7dff4a43e15165bcaed22955b547461b80bb28663b502ad8b1536aa6d3cffdd56552a27dab37d3921be5ad85e990d6182143dd4b359053d6b41b58a260a36444c50596526e58032cf8d75335a776d8e856f751d5b9972dcb6d5e9df41e1a13612efca2bf4dc3a35d88e64bde741e93d680d9a3e0261d17a21ccdee91bde85440901071218a3ab71661cef25d4275b17fdfedfdd8a9e6e48a0ebcb73f8fec09f6945689cb0a789221792d689e2ab2404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f", 74 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 75 | ], 76 | "measurement": [ 77 | false, 78 | true, 79 | true, 80 | false 81 | ], 82 | "nonce": "000102030405060708090a0b0c0d0e0f", 83 | "out_shares": [ 84 | [ 85 | "e2bb7419a8ac96a1c6def1bed78dcb07", 86 | "4557af87236442363aa49be4fd351b11", 87 | "b64bede210be55b253caa20a0f75703e", 88 | "295aa329cd650b825d3d4492d46d2fb3" 89 | ], 90 | [ 91 | "1f448be65753695e1d210e41287234f8", 92 | "bda85078dc9bbdc9a95b641b02cae4ee", 93 | "4cb4121def41aa4d90355df5f08a8fc1", 94 | "d8a55cd6329af47d86c2bb6d2b92d04c" 95 | ] 96 | ], 97 | "prep_messages": [ 98 | "407dd84e19e24c5dd58e2a735bade1c3feaa768f055f553e0767e8f8066ddc8b" 99 | ], 100 | "prep_shares": [ 101 | [ 102 | "0c82e0cc5f690b9caafa428dd6d85ec0cb91386cb2e980e5475e84dc83a874724d0ec28fc8583fcd3d2049ed82fe4cb49b774302bf9e3f22840fdcae9eda08edf34af1e39cf607549e72d9603c9064bd1ce6bcc5c3cff0cd2404afaa25d39fbef626f32389f3abd8f1007f22683246938f85d46fdcf067e79c9b6b088a0c70ba", 103 | "f57d1f33a096f4633905bd722927a13f27c3d26278454e1a5238c31d07b79cfb65698f64c871fc3ae4f3ec5c948d0436d2d857e9751199e9364d6f9097174bf6f551d066f30ae33ed61f9b4bed3609d6fd20479675bf4583e46fa4df4e93182f5728b4e324470bb29993bf18e20714421c8e06f4c0622e3641eb50f24fed1117" 104 | ] 105 | ], 106 | "public_share": "f626f32389f3abd8f1007f22683246938f85d46fdcf067e79c9b6b088a0c70ba5728b4e324470bb29993bf18e20714421c8e06f4c0622e3641eb50f24fed1117", 107 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f" 108 | } 109 | ], 110 | "shares": 2, 111 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 112 | } 113 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Sum_0.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": 100, 4 | "agg_shares": [ 5 | "21d79fba22fcea2e", 6 | "44296045dc0315d1" 7 | ], 8 | "ctx": "736f6d65206170706c69636174696f6e", 9 | "max_measurement": 255, 10 | "operations": [ 11 | { 12 | "operation": "shard", 13 | "report_index": 0, 14 | "success": true 15 | }, 16 | { 17 | "aggregator_id": 0, 18 | "operation": "prep_init", 19 | "report_index": 0, 20 | "success": true 21 | }, 22 | { 23 | "aggregator_id": 1, 24 | "operation": "prep_init", 25 | "report_index": 0, 26 | "success": true 27 | }, 28 | { 29 | "operation": "prep_shares_to_prep", 30 | "report_index": 0, 31 | "round": 0, 32 | "success": true 33 | }, 34 | { 35 | "aggregator_id": 0, 36 | "operation": "prep_next", 37 | "report_index": 0, 38 | "round": 1, 39 | "success": true 40 | }, 41 | { 42 | "aggregator_id": 1, 43 | "operation": "prep_next", 44 | "report_index": 0, 45 | "round": 1, 46 | "success": true 47 | }, 48 | { 49 | "aggregator_id": 0, 50 | "operation": "aggregate", 51 | "success": true 52 | }, 53 | { 54 | "aggregator_id": 1, 55 | "operation": "aggregate", 56 | "success": true 57 | }, 58 | { 59 | "operation": "unshard", 60 | "success": true 61 | } 62 | ], 63 | "prep": [ 64 | { 65 | "input_shares": [ 66 | "43126178225dca30c56c892f4cef3ca6f627cae360f77d4b2994ee7fbe779a5892f97897b3c5ff28eb1156127a87cb71feca3aa67f3f66472914077ef4bb37488245444045a2293f2c51f6d5d497646aa2ab3bdc62c1db973fabd7f72169d6d5dbe73437837c1c0662bb24addf7e07e6ae56db956b47ccc755b7c35912f67fb1b50659e637f1034ee3214ff4916ea9d1298a119d0e44c922bd17cc333224c31e67dc3282b61b9aab42ca71062aea44000cf3d13d667e8223477604725e654230e2a7fe703f610869e9f65d5a00203af41da5120d8850de22994a3f5de88910257bf11730c7721c63d712f309ff1a16961a6870876f89b7aece0a7aae9726c957a3bbde29775ad0e4159e2cdfaa741f3f8562468e879f0f4e50829d16ade458e32e321f2b7ab80348c4fcd23a2aeb3921b4b6c297d0e0e10ba6ac3839d9be19018012dfcce38bfb12d0caefc81b0bf2901638317c0a708b0d73f45b43aaf7cc1f7eb7ea6bd7f8e1248ef49224f1735c8112e1f740fc7020d3185ed3c588611aa6aed0f49ac90d908e28213b754fc64e330998b132579ea55c4d5be49e026fcb6e962377d3d540fa2d45116efa46182b7175e29f95499362aa5b631a4b504c7289a7cbf673f2f9d0f0e224f37a862ae822b5359c93de1a40e52e142897dcf25ee5bfaf4132019c1ad28f128186fd48b49eeca0ba9c31e25a025e2d5999e2654a02b6d1bd07d2a2095045cbd367a00a2d34976dc10effb80228afb1c3a7f05943053d65bd900b7a4bfd3833a93f1e0c3c0ecc469d10fbc05ac7881c68f0e3229efdc0e7d15d08a63d9c6de25377f4e01bdc64748dcfc1381977f323cce3c0d453c6a97df87718f54f386228fa70b978c331be3c4af998645e68b00d1f95bb7fedd7", 67 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 68 | ], 69 | "measurement": 100, 70 | "nonce": "000102030405060708090a0b0c0d0e0f", 71 | "out_shares": [ 72 | [ 73 | "21d79fba22fcea2e" 74 | ], 75 | [ 76 | "44296045dc0315d1" 77 | ] 78 | ], 79 | "prep_messages": [ 80 | "" 81 | ], 82 | "prep_shares": [ 83 | [ 84 | "3930d34b74e6676f0158033969b58f5f113b356a799287a5", 85 | "c8cf2cb48a199890ca5a3a856a34c034919f038dd5483648" 86 | ] 87 | ], 88 | "public_share": "", 89 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 90 | } 91 | ], 92 | "shares": 2, 93 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 94 | } 95 | -------------------------------------------------------------------------------- /test_vec/vdaf/Prio3Sum_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "agg_param": "", 3 | "agg_result": 100, 4 | "agg_shares": [ 5 | "0838d7ed77cd6687", 6 | "44296045dc0315d1", 7 | "1a9fc8cca92e84a7" 8 | ], 9 | "ctx": "736f6d65206170706c69636174696f6e", 10 | "max_measurement": 255, 11 | "operations": [ 12 | { 13 | "operation": "shard", 14 | "report_index": 0, 15 | "success": true 16 | }, 17 | { 18 | "aggregator_id": 0, 19 | "operation": "prep_init", 20 | "report_index": 0, 21 | "success": true 22 | }, 23 | { 24 | "aggregator_id": 1, 25 | "operation": "prep_init", 26 | "report_index": 0, 27 | "success": true 28 | }, 29 | { 30 | "aggregator_id": 2, 31 | "operation": "prep_init", 32 | "report_index": 0, 33 | "success": true 34 | }, 35 | { 36 | "operation": "prep_shares_to_prep", 37 | "report_index": 0, 38 | "round": 0, 39 | "success": true 40 | }, 41 | { 42 | "aggregator_id": 0, 43 | "operation": "prep_next", 44 | "report_index": 0, 45 | "round": 1, 46 | "success": true 47 | }, 48 | { 49 | "aggregator_id": 1, 50 | "operation": "prep_next", 51 | "report_index": 0, 52 | "round": 1, 53 | "success": true 54 | }, 55 | { 56 | "aggregator_id": 2, 57 | "operation": "prep_next", 58 | "report_index": 0, 59 | "round": 1, 60 | "success": true 61 | }, 62 | { 63 | "aggregator_id": 0, 64 | "operation": "aggregate", 65 | "success": true 66 | }, 67 | { 68 | "aggregator_id": 1, 69 | "operation": "aggregate", 70 | "success": true 71 | }, 72 | { 73 | "aggregator_id": 2, 74 | "operation": "aggregate", 75 | "success": true 76 | }, 77 | { 78 | "operation": "unshard", 79 | "success": true 80 | } 81 | ], 82 | "prep": [ 83 | { 84 | "input_shares": [ 85 | "c7fb0bdef5bfe20176af571fbc49515389eaadd4df90c3529f06e993f24e7938e710c770d2dbd06f9b2523f00213f7baaa65475354cb73e7a5d9f889974b2e776e8b996484aeb62b04576947b5a96b0a72189f03c37c2a8309fee95bc15681c9115ee8147e871e4f1edb0ec1e56b68ad083182c17baa213f17a8d8624f21213c55a404746658fb100e1cb0a086315bbcba60c6e3ed3018ed8e7e837f2033bb8284a440d0822e5aaf2cc0dc43c6fc7f78a9a9b2471a4db243f8de37a30b6bcc6c8369f1c6d0c80cc096d25fff0aee3a6e2f9d6da165b01cfe2d7141caa00439e110b68fdd94556d1506ef1499cd969438d98b641f9c132ed547da7eab49dff9c0545f57abbf971f52ccd2e993781cd0cd2c8788b5279e5f70339731ac86ed9195cd3c60836b6dd9698fd1e4ad7050a698df611eee9c5ba98f10e6b64485d7c82bf48968aacfabf982c4b8d98196e25c276dd5e31ee61144266c66d2d2889047cf0b509d0e447e378c179bbd18386eb389c10cfc815ce724b01e47aafef1bb8e52753548f77626a0af40b75036ad6518e293284f82ccb28b4baa512c45276c2986908c87cc69c53f9ac7e0d6850ee756cd430745239b545552c8776d6bd3e2573415e89d96f6b4064456d4cdeb577f87eff828e008e6b1ec0b5943abef717959c0816e12f771fe4feac12ebf5eebb6a268182d25e08027a8dc01a81fbfff43b5604d034cb8d0ba775899a9b6e02bb5542e7ccbeaedf230cedfadf50a541b51281b4d85f67b6c8cfbe1b5e77f95b70053553d7686ee34f5bc6462a0121af4d61057bdea117ebc86fb37f8ac45955a6b73d7dfa2a193033b4524a106de810ac96c754ffc1e46b09c6310564f3a58cfcc123f9fc6d9a530bd16fcbfe45a0dd91b929f", 86 | "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f", 87 | "202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" 88 | ], 89 | "measurement": 100, 90 | "nonce": "000102030405060708090a0b0c0d0e0f", 91 | "out_shares": [ 92 | [ 93 | "0838d7ed77cd6687" 94 | ], 95 | [ 96 | "44296045dc0315d1" 97 | ], 98 | [ 99 | "1a9fc8cca92e84a7" 100 | ] 101 | ], 102 | "prep_messages": [ 103 | "" 104 | ], 105 | "prep_shares": [ 106 | [ 107 | "bc2b0bbf09338766f8abb2e591e5b779587a4647599932a2", 108 | "c8cf2cb48a199890ca5a3a856a34c034919f038dd5483648", 109 | "7d04c88c6ab3e008f1ee26ce020902b52a880fe76f72d29a" 110 | ] 111 | ], 112 | "public_share": "", 113 | "rand": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f" 114 | } 115 | ], 116 | "shares": 3, 117 | "verify_key": "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" 118 | } 119 | --------------------------------------------------------------------------------