├── .gitignore ├── secsidh-rs ├── measuring ├── archived-results │ ├── .gitignore │ ├── upload.sh │ ├── README.md │ └── download.sh ├── .gitignore ├── scripts │ ├── setinitcwd.sh │ ├── create-experimental-setup.sh │ ├── setup_ns.sh │ ├── measure-handshake-size.sh │ ├── count-hs-bytes.py │ ├── process.py │ └── experiment.py ├── README.md ├── run_experiment.sh └── LICENSE.md ├── .dockerignore ├── update_submodules.sh ├── LICENSE.md ├── update_algorithms.sh ├── .gitmodules ├── CITATION.bib ├── Dockerfile └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ -------------------------------------------------------------------------------- /secsidh-rs: -------------------------------------------------------------------------------- 1 | secsidh/secsidh-rs -------------------------------------------------------------------------------- /measuring/archived-results/.gitignore: -------------------------------------------------------------------------------- 1 | *.tar.* 2 | data-* 3 | draft-* 4 | -------------------------------------------------------------------------------- /measuring/.gitignore: -------------------------------------------------------------------------------- 1 | bin/* 2 | data/* 3 | data-*/* 4 | *.tar.* 5 | processed/* 6 | __pycache__ 7 | -------------------------------------------------------------------------------- /measuring/scripts/setinitcwd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | sudo ip netns exec srv_ns ip route change 10.99.0.0/24 via 10.99.0.1 dev srv_ve initcwnd $1 4 | sudo ip netns exec cli_ns ip route change 10.99.0.0/24 via 10.99.0.2 dev cli_ve initcwnd $1 5 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git/* 2 | Dockerfile 3 | .dockerignore 4 | *.sh 5 | **/target/* 6 | mk-cert/*.crt 7 | mk-cert/*.pub 8 | mk-cert/*.bin 9 | mk-cert/*.key 10 | mk-cert/tbs* 11 | *.o 12 | *.a 13 | *.so 14 | measuring/* 15 | rustls/Cargo.lock 16 | webpki/Cargo.lock 17 | -------------------------------------------------------------------------------- /update_submodules.sh: -------------------------------------------------------------------------------- 1 | #!/bin/zsh 2 | 3 | pushd mk-cert 4 | git submodule update 5 | pushd signutil 6 | cargo update 7 | git add Cargo.lock 8 | popd 9 | pushd kemutil 10 | cargo update 11 | git add Cargo.lock 12 | popd 13 | 14 | git commit -m "Update Cargo lockfiles" 15 | git push 16 | 17 | popd 18 | git add mk-cert 19 | 20 | pushd rustls 21 | cargo update 22 | popd 23 | 24 | pushd webpki 25 | cargo update 26 | popd 27 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # Licensing in this consolidated project 2 | 3 | Due to the nature of this project, making heavy use of (modified) existing software, 4 | there is no uniform licensing that we can make available. 5 | In general, we want to make our modifications available under an as permissive as 6 | possible license, and as such, we make them available under the CC0 license. 7 | 8 | In each of the subdirectories you should find one or more files named `LICENSE`. 9 | The terms in those files generally apply to the software contained within. 10 | -------------------------------------------------------------------------------- /measuring/README.md: -------------------------------------------------------------------------------- 1 | # Scripts 2 | 3 | * `./create-binaries-and-certs.sh`: creates the necessary binaries and certificates. Uses Docker. 4 | * `experiment.py`: produces a `data` folder to be used by `process.py`. Sets up experiments as necessary. 5 | 6 | ## credit 7 | This is inspired by https://github.com/xvzcf/pq-tls-benchmark/tree/master/emulation-exp/ 8 | 9 | which is the code that accompanies 10 | 11 | Christian Paquin, Douglas Stebila, and Goutam Tamvada. Benchmarking post-quantum cryptography in TLS. IACR Cryptology ePrint Archive, Report 2019/1447. December, 2019. https://eprint.iacr.org/2019/1447. 12 | -------------------------------------------------------------------------------- /update_algorithms.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | pushd mk-cert/kemutil 5 | cargo update 6 | popd 7 | pushd mk-cert/signutil 8 | cargo update 9 | popd 10 | 11 | pushd webpki/src 12 | python3 generate_schemes.py 13 | git add data generated 14 | popd 15 | 16 | pushd webpki 17 | cargo update 18 | cargo check 19 | popd 20 | 21 | pushd rustls 22 | python3 generate_schemes.py 23 | git add rustls/src/generated 24 | cargo update 25 | popd 26 | 27 | pushd rustls/test-ca 28 | 29 | for dir in kyber dilithium; do 30 | pushd $dir 31 | bash cp_from_mkcert.sh 32 | git add . 33 | popd 34 | done 35 | 36 | popd 37 | -------------------------------------------------------------------------------- /measuring/archived-results/upload.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # First list already existing files 4 | 5 | BUCKET=kemtls 6 | 7 | existing_files=$(aws s3 ls s3://$BUCKET/archived-results/ | grep -o ".*data-.*.tar.*") 8 | 9 | echo "#!/bin/sh" > ./download.sh 10 | 11 | for file in *.tar.*; do 12 | if [[ ! "${existing_files}" =~ "${file}" ]]; then 13 | echo "$file not already uploaded" 14 | aws s3 cp "$file" "s3://$BUCKET/archived-results/" 15 | else 16 | echo "$file already exists" 17 | fi 18 | echo "wget 'https://kemtls.s3.amazonaws.com/archived-results/$file'" >> ./download.sh 19 | done 20 | -------------------------------------------------------------------------------- /measuring/archived-results/README.md: -------------------------------------------------------------------------------- 1 | # Archived results 2 | 3 | This directory contains results that have been processed into results in (versions of) our publications. 4 | 5 | ## ERRATA 6 | 7 | The results from 2021-05 and 2021-12 are flawed; due to an implementation error they all use Kyber512 for ephemeral key exchange. 8 | 9 | ## Obtaining results 10 | 11 | Execute the `download.sh` script. 12 | 13 | ## Uploading new results 14 | 15 | Thom has the keys for the S3 bucket, he can upload results. 16 | 17 | ## Long-term access 18 | 19 | This repository is archived with Zenodo, and the archive files should also be there. 20 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "webpki"] 2 | path = webpki 3 | url = https://github.com/thomwiggers/webpki.git 4 | branch = optls 5 | [submodule "mk-cert"] 6 | path = mk-cert 7 | url = https://github.com/thomwiggers/mk-cert 8 | [submodule "rustls"] 9 | path = rustls 10 | url = https://github.com/thomwiggers/rustls.git 11 | branch = secsidh-nike 12 | [submodule "ring"] 13 | path = ring 14 | url = https://github.com/thomwiggers/ring.git 15 | [submodule "oqs-rs"] 16 | path = oqs-rs 17 | url = https://github.com/thomwiggers/liboqs-rust.git 18 | [submodule "secsidh"] 19 | path = secsidh 20 | url = https://github.com/kemtls-secsidh/secsidh.git 21 | -------------------------------------------------------------------------------- /measuring/run_experiment.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | echo "Make sure to setup the namespaces beforehand." 6 | echo "Run ./scripts/setup_ns.sh" 7 | 8 | echo "This will remove the current experiment data!" 9 | read -p "Are you sure? " -n 1 -r 10 | echo # (optional) move to a new line 11 | 12 | if [[ $REPLY =~ ^[Nn]$ ]] 13 | then 14 | exit 15 | fi 16 | 17 | rm -rf data 18 | if [ -d bin ]; then 19 | echo "Delete bin folder to recreate stuff" 20 | fi 21 | 22 | sudo killall -9 tlsserver || true 23 | 24 | ntfy="" 25 | if command -v ntfy > /dev/null; then 26 | ntfy="ntfy -b pushover done " 27 | fi 28 | 29 | $ntfy sudo -E python3.9 -u scripts/experiment.py $@ 30 | -------------------------------------------------------------------------------- /measuring/archived-results/download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2020-05-03.tar.xz' 3 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2020-06-12.tar.xz' 4 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2021-05-07.tar.xz' 5 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2021-12-13.tar.xz' 6 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2022-01-26.tar.xz' 7 | wget 'https://kemtls.s3.amazonaws.com/archived-results/data-2023-04-14.tar.zst' 8 | wget 'https://kemtls.s3.amazonaws.com/archived-results/secsidh-data-2023-05-20.tar.zst' 9 | wget 'https://kemtls.s3.amazonaws.com/archived-results/secsidh-data-2023-10-11.tar.zst' 10 | -------------------------------------------------------------------------------- /measuring/scripts/create-experimental-setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | ROOT=$(dirname $0)/../../ 6 | cd $ROOT 7 | 8 | export KEX_ALG="${1:-Kyber512}" 9 | export LEAF_ALG="${2:-Dilithium2}" 10 | export INT_SIGALG="${3:-Dilithium2}" 11 | export ROOT_SIGALG="${4:-Dilithium2}" 12 | export CLIENT_ALG="${5}" 13 | export CLIENT_CA_ALG="${6}" 14 | export KEYGEN_CACHE="${7}" 15 | 16 | tag=${KEX_ALG,,}-${LEAF_ALG,,}-${INT_SIGALG,,}-${ROOT_SIGALG,,} 17 | 18 | extra_args= 19 | if [ "$CLIENT_ALG" != "" ]; then 20 | tag=${tag}-clauth-${CLIENT_ALG,,}-${CLIENT_CA_ALG,,} 21 | extra_args="--build-arg CLIENT_ALG=$CLIENT_ALG --build-arg CLIENT_CA_ALG=$CLIENT_CA_ALG" 22 | fi 23 | 24 | if [ "$KEYGEN_CACHE" != "" ]; then 25 | tag=${tag}-keycache 26 | extra_args="$extra_args --build-arg RUSTLS_FEATURES=--features=lru" 27 | fi 28 | 29 | dockertag="$(echo -n $tag | shasum - | cut -f1 -d' ')" 30 | 31 | docker build \ 32 | --build-arg ROOT_SIGALG=$ROOT_SIGALG \ 33 | --build-arg INT_SIGALG=$INT_SIGALG \ 34 | --build-arg LEAF_ALG=$LEAF_ALG \ 35 | --build-arg KEX_ALG=$KEX_ALG \ 36 | $extra_args \ 37 | --tag "pqtls-builder:$dockertag" . 38 | 39 | volumename=$PWD/measuring/bin/$tag 40 | echo $volumename 41 | rm -rf $volumename 42 | mkdir -p $volumename 43 | 44 | docker run --rm \ 45 | --user $(id -u):$(id -g) \ 46 | --volume $volumename:/output \ 47 | --workdir /output \ 48 | "pqtls-builder:$dockertag" \ 49 | bash -c "cp /usr/local/bin/tlsserver . && 50 | cp /usr/local/bin/tlsclient . && 51 | cp /certs/* ." 52 | 53 | if [ "$SUDO_USER" != "" ]; then 54 | chown -R $SUDO_USER:$SUDO_GID . 55 | fi -------------------------------------------------------------------------------- /measuring/scripts/setup_ns.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -x 3 | 4 | ########################## 5 | # Setup network namespaces 6 | ########################## 7 | 8 | SERVER_VETH_LL_ADDR=00:00:00:00:00:02 9 | SERVER_NS=srv_ns 10 | SERVER_VETH=srv_ve 11 | 12 | CLIENT_NS=cli_ns 13 | CLIENT_VETH_LL_ADDR=00:00:00:00:00:01 14 | CLIENT_VETH=cli_ve 15 | 16 | ip netns add ${SERVER_NS} 17 | ip netns add ${CLIENT_NS} 18 | ip link add \ 19 | name ${SERVER_VETH} \ 20 | address ${SERVER_VETH_LL_ADDR} \ 21 | netns ${SERVER_NS} type veth \ 22 | peer name ${CLIENT_VETH} \ 23 | address ${CLIENT_VETH_LL_ADDR} \ 24 | netns ${CLIENT_NS} 25 | 26 | ip netns exec ${SERVER_NS} \ 27 | ip link set dev ${SERVER_VETH} up 28 | ip netns exec ${SERVER_NS} \ 29 | ip link set dev lo up 30 | ip netns exec ${SERVER_NS} \ 31 | ip addr add 10.99.0.1/24 dev ${SERVER_VETH} 32 | 33 | ip netns exec ${CLIENT_NS} \ 34 | ip addr add 10.99.0.2/24 dev ${CLIENT_VETH} 35 | ip netns exec ${CLIENT_NS} \ 36 | ip link set dev lo up 37 | ip netns exec ${CLIENT_NS} \ 38 | ip link set dev ${CLIENT_VETH} up 39 | ip netns exec ${CLIENT_NS} \ 40 | ip link set dev lo up 41 | 42 | ip netns exec ${SERVER_NS} \ 43 | ip neigh add 10.99.0.2 \ 44 | lladdr ${CLIENT_VETH_LL_ADDR} \ 45 | dev ${SERVER_VETH} 46 | ip netns exec ${CLIENT_NS} \ 47 | ip neigh add 10.99.0.1 \ 48 | lladdr ${SERVER_VETH_LL_ADDR} \ 49 | dev ${CLIENT_VETH} 50 | 51 | # Turn off optimizations 52 | # that dent realism. 53 | ip netns exec ${CLIENT_NS} \ 54 | ethtool -K ${CLIENT_VETH} gso off gro off tso off 55 | 56 | ip netns exec ${SERVER_NS} \ 57 | ethtool -K ${SERVER_VETH} gso off gro off tso off 58 | 59 | ip netns exec ${CLIENT_NS} \ 60 | tc qdisc add \ 61 | dev ${CLIENT_VETH} \ 62 | root netem 63 | ip netns exec ${SERVER_NS} \ 64 | tc qdisc add \ 65 | dev ${SERVER_VETH} \ 66 | root netem 67 | 68 | if ! grep -q "servername" /etc/hosts; then 69 | echo "Adding servername to /etc/hosts" 70 | echo "10.99.0.1 servername" >> /etc/hosts 71 | fi 72 | -------------------------------------------------------------------------------- /CITATION.bib: -------------------------------------------------------------------------------- 1 | @inproceedings{CCS:SchSteWig20, 2 | author = {Schwabe, Peter and Stebila, Douglas and Wiggers, Thom}, 3 | title = {Post-Quantum {TLS} Without Handshake Signatures}, 4 | year = {2020}, 5 | isbn = {9781450370899}, 6 | publisher = {Association for Computing Machinery}, 7 | address = {New York, {NY}, {USA}}, 8 | url = {https://thomwiggers.nl/publication/kemtls/}, 9 | doi = {10.1145/3372297.3423350}, 10 | booktitle = {Proceedings of the 2020 {ACM} {SIGSAC} Conference on Computer and Communications Security}, 11 | pages = {1461–1480}, 12 | numpages = {20}, 13 | keywords = {transport layer security, key-encapsulation mechanism, {NIST PQC}, post-quantum cryptography}, 14 | location = {Virtual Event, {USA}}, 15 | series = {{CCS '20}} 16 | } 17 | 18 | @misc{EPRINT:SchSteWig20, 19 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 20 | title = {Post-quantum {TLS} without handshake signatures}, 21 | year = 2022, 22 | month = mar, 23 | note = {full online version}, 24 | url = {https://ia.cr/2020/534}, 25 | } 26 | 27 | @inproceedings{ESORICS:SchSteWig21, 28 | title = {More efficient post-quantum {KEMTLS} with pre-distributed public keys}, 29 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 30 | year = 2021, 31 | month = sep, 32 | url = {https://thomwiggers.nl/publication/kemtlspdk/}, 33 | editor = {Bertino, Elisa and Shulman, Haya and Waidner, Michael}, 34 | booktitle = {Computer Security -- ESORICS 2021}, 35 | series = {Lecture Notes in Computer Science}, 36 | publisher = {Springer International Publishing}, 37 | address = {Cham}, 38 | pages = {3--22}, 39 | isbn = {978-3-030-88418-5}, 40 | doi = {10.1007/978-3-030-88418-5_1}, 41 | } 42 | 43 | @misc{EPRINT:SchSteWig21, 44 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 45 | title = {More efficient post-quantum {KEMTLS} with pre-distributed public keys}, 46 | howpublished = {Cryptology ePrint Archive, Paper 2021/779}, 47 | year = {2022}, 48 | month = mar, 49 | note = {full online version}, 50 | url = {https://eprint.iacr.org/2021/779} 51 | } 52 | 53 | @misc{EPRINT:CCCMRRSW23, 54 | author = {Fabio Campos and Jorge Chavez-Saab and Jesús-Javier Chi-Domínguez and Michael Meyer and Krijn Reijnders and Francisco Rodríguez-Henríquez and Peter Schwabe and Thom Wiggers}, 55 | title = {Optimizations and Practicality of High-Security {CSIDH}}, 56 | howpublished = {Cryptology ePrint Archive, Paper 2023/793}, 57 | year = {2023}, 58 | url = {https://eprint.iacr.org/2023/793} 59 | } 60 | 61 | @phdthesis{RU:Wiggers24, 62 | title = {Post-Quantum {TLS}}, 63 | author = {Thom Wiggers}, 64 | date = {2024-01-09}, 65 | school = {Radboud University}, 66 | address = {Nijmegen, The Netherlands}, 67 | url = {https://thomwiggers.nl/publication/thesis/} 68 | } 69 | 70 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Author: Thom Wiggers 2 | # LICENSE: CC0 3 | # 4 | FROM rust:1.66-bullseye AS builder 5 | 6 | SHELL ["/bin/bash", "-c"] 7 | 8 | EXPOSE 8443 443/tcp 9 | 10 | ADD https://apt.llvm.org/llvm-snapshot.gpg.key /llvm.key 11 | RUN apt-key add /llvm.key 12 | 13 | # Install requirements 14 | RUN echo "deb http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-12 main" > /etc/apt/sources.list.d/llvm.list 15 | RUN apt-get update -qq 16 | RUN apt-get install -qq -y pipenv libssl-dev cmake clang-12 llvm-12 17 | 18 | # Default C compiler 19 | # XXX: Somehow clang breaks. 20 | ENV CC=gcc 21 | 22 | # Rust options 23 | ENV RUSTFLAGS "-C target-cpu=native -C link-arg=-s" 24 | ENV RUST_MIN_STACK "20971520" 25 | 26 | # Copy in the source 27 | COPY mk-cert /usr/src/pqtls/mk-cert 28 | 29 | # Cleanup mk-cert and install deps 30 | WORKDIR /usr/src/pqtls/mk-cert 31 | RUN pipenv install 32 | RUN ./clean.sh 33 | 34 | # populate cargo build caches 35 | WORKDIR /usr/src/pqtls/mk-cert/signutil 36 | RUN echo "pub use oqs::sig::Algorithm::Dilithium2 as alg;" > src/lib.rs 37 | RUN cargo update 38 | RUN cargo build --release --examples 39 | 40 | WORKDIR /usr/src/pqtls/mk-cert/kemutil 41 | RUN echo "pub use oqs::kem::Algorithm::Kyber512 as thealgorithm;" > src/kem.rs 42 | RUN cargo update 43 | RUN cargo build --release --features oqs 44 | 45 | COPY secsidh /usr/src/pqtls 46 | COPY secsidh-rs /usr/src/pqtls/secsidh-rs 47 | WORKDIR /usr/src/pqtls/mk-cert/csidhutil 48 | RUN echo "pub use csidh_rust::ctidh512 as csidh;" > src/instance.rs 49 | RUN cargo update 50 | RUN cargo build --features csidh-rust --release 51 | 52 | WORKDIR /usr/src/pqtls/mk-cert/xmss-rs 53 | RUN cargo build --release 54 | 55 | # Copy remaining sources 56 | COPY webpki /usr/src/pqtls/webpki 57 | COPY ring /usr/src/pqtls/ring 58 | COPY rustls /usr/src/pqtls/rustls 59 | 60 | # Generate rustls build cache 61 | WORKDIR /usr/src/pqtls/rustls/rustls-mio 62 | RUN cargo build --release --examples 63 | 64 | # pre-Compile tlsserver and tlsclient examples 65 | WORKDIR /usr/src/pqtls/rustls/rustls-mio/ 66 | RUN cargo build --release --example tlsserver && \ 67 | cargo build --release --example tlsclient 68 | 69 | # These must exactly match what is listed in the options of mk-cert/encoder.py 70 | # (and those follow from liboqs) 71 | ARG KEX_ALG="Kyber512" 72 | # re-export build args as env vars 73 | ENV KEX_ALG $KEX_ALG 74 | 75 | # Update the KEX alg 76 | RUN sed -i 's@NamedGroup::[[:alnum:]]\+@NamedGroup::'${KEX_ALG}'@' /usr/src/pqtls/rustls/rustls/src/client/default_group.rs 77 | 78 | ARG RUSTLS_FEATURES="" 79 | # Compile tlsserver and tlsclient examples 80 | RUN cargo build --release $RUSTLS_FEATURES --example tlsserver && \ 81 | cargo build --release $RUSTLS_FEATURES --example tlsclient 82 | 83 | # These must exactly match what is listed in the options of mk-cert/encoder.py 84 | # (and those follow from liboqs) 85 | ARG ROOT_SIGALG="Dilithium2" 86 | ARG INT_SIGALG="Dilithium2" 87 | ARG LEAF_ALG="Dilithium2" 88 | ARG CLIENT_ALG="Kyber512" 89 | ARG CLIENT_CA_ALG="Dilithium2" 90 | ENV ROOT_SIGALG $ROOT_SIGALG 91 | ENV INT_SIGALG $INT_SIGALG 92 | ENV LEAF_ALG $LEAF_ALG 93 | ENV CLIENT_ALG $CLIENT_ALG 94 | ENV CLIENT_CA_ALG $CLIENT_CA_ALG 95 | 96 | # actually generate the certificates 97 | WORKDIR /usr/src/pqtls/mk-cert 98 | RUN pipenv run python encoder.py 99 | 100 | # Set up clean environment 101 | FROM debian:bullseye 102 | 103 | # Install libssl1.1 104 | RUN apt-get update -qq \ 105 | && apt-get install -qq -y libssl1.1 \ 106 | && rm -rf /var/cache/apt 107 | 108 | COPY --from=builder /usr/src/pqtls/rustls/target/release/examples/tlsserver /usr/local/bin/tlsserver 109 | COPY --from=builder /usr/src/pqtls/rustls/target/release/examples/tlsclient /usr/local/bin/tlsclient 110 | COPY --from=builder /usr/src/pqtls/mk-cert/*.crt /certs/ 111 | COPY --from=builder /usr/src/pqtls/mk-cert/*.key /certs/ 112 | COPY --from=builder /usr/src/pqtls/mk-cert/*.pub /certs/ 113 | 114 | WORKDIR /certs 115 | CMD ["echo", "Run tls{server,client} for the rustls-mio server/client with KEX:", $KEX_ALG] 116 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Post-Quantum TLS without handshake signatures 2 | 3 | This repository accompanies 4 | 5 | * Peter Schwabe, Douglas Stebila and Thom Wiggers. **More efficient KEMTLS with pre-distributed public keys.** ESORICS 2021. 6 | * Peter Schwabe, Douglas Stebila and Thom Wiggers. **Post-quantum TLS without handshake signatures.** ACM CCS 2020. 7 | * Peter Schwabe, Douglas Stebila and Thom Wiggers. **More efficient KEMTLS with pre-distributed public keys.** IACR Cryptology ePrint Archive, Report 2021/779. Updated online version. March 2022. 8 | * Peter Schwabe, Douglas Stebila and Thom Wiggers. **Post-quantum TLS without handshake signatures.** IACR Cryptology ePrint Archive, Report 2020/534. Updated online version. March 2022. 9 | * Fabio Campos, Jorge Chavez-Saab, Jesús-Javier Chi-Domínguez, Michael Meyer, Krijn Reijnders, Francisco Rodríguez-Henríquez, Peter Schwabe, Thom Wiggers. **Optimizations and Practicality of High-Security CSIDH.** IACR Cryptology ePrint Archive, Report 2023/793. October 2023. 10 | * Thom Wiggers. **Post-Quantum TLS**. PhD thesis, January 2024. 11 | 12 | ```bibtex 13 | @inproceedings{CCS:SchSteWig20, 14 | author = {Schwabe, Peter and Stebila, Douglas and Wiggers, Thom}, 15 | title = {Post-Quantum {TLS} Without Handshake Signatures}, 16 | year = {2020}, 17 | isbn = {9781450370899}, 18 | publisher = {Association for Computing Machinery}, 19 | address = {New York, {NY}, {USA}}, 20 | url = {https://thomwiggers.nl/publication/kemtls/}, 21 | doi = {10.1145/3372297.3423350}, 22 | booktitle = {Proceedings of the 2020 {ACM} {SIGSAC} Conference on Computer and Communications Security}, 23 | pages = {1461–1480}, 24 | numpages = {20}, 25 | keywords = {transport layer security, key-encapsulation mechanism, {NIST PQC}, post-quantum cryptography}, 26 | location = {Virtual Event, {USA}}, 27 | series = {{CCS '20}} 28 | } 29 | 30 | @misc{EPRINT:SchSteWig20, 31 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 32 | title = {Post-quantum {TLS} without handshake signatures}, 33 | year = 2022, 34 | month = mar, 35 | note = {full online version}, 36 | url = {https://ia.cr/2020/534}, 37 | } 38 | 39 | @inproceedings{ESORICS:SchSteWig21, 40 | title = {More efficient post-quantum {KEMTLS} with pre-distributed public keys}, 41 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 42 | year = 2021, 43 | month = sep, 44 | url = {https://thomwiggers.nl/publication/kemtlspdk/}, 45 | editor = {Bertino, Elisa and Shulman, Haya and Waidner, Michael}, 46 | booktitle = {Computer Security -- ESORICS 2021}, 47 | series = {Lecture Notes in Computer Science}, 48 | publisher = {Springer International Publishing}, 49 | address = {Cham}, 50 | pages = {3--22}, 51 | isbn = {978-3-030-88418-5}, 52 | doi = {10.1007/978-3-030-88418-5_1}, 53 | } 54 | 55 | @misc{EPRINT:SchSteWig21, 56 | author = {Peter Schwabe and Douglas Stebila and Thom Wiggers}, 57 | title = {More efficient post-quantum {KEMTLS} with pre-distributed public keys}, 58 | howpublished = {Cryptology ePrint Archive, Paper 2021/779}, 59 | year = {2022}, 60 | month = mar, 61 | note = {full online version}, 62 | url = {https://eprint.iacr.org/2021/779} 63 | } 64 | 65 | @misc{EPRINT:CCCMRRSW23, 66 | author = {Fabio Campos and Jorge Chavez-Saab and Jesús-Javier Chi-Domínguez and Michael Meyer and Krijn Reijnders and Francisco Rodríguez-Henríquez and Peter Schwabe and Thom Wiggers}, 67 | title = {Optimizations and Practicality of High-Security {CSIDH}}, 68 | howpublished = {Cryptology ePrint Archive, Paper 2023/793}, 69 | year = {2023}, 70 | url = {https://eprint.iacr.org/2023/793} 71 | } 72 | 73 | @phdthesis{RU:Wiggers24, 74 | title = {Post-Quantum {TLS}}, 75 | author = {Thom Wiggers}, 76 | date = {2024-01-09}, 77 | school = {Radboud University}, 78 | address = {Nijmegen, The Netherlands}, 79 | url = {https://thomwiggers.nl/publication/thesis/} 80 | } 81 | 82 | ``` 83 | 84 | ## Overview of this repository 85 | 86 | The below are all [git submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules). 87 | If you want to make a fork of this repository, you will need to also fork the relevant submodules and update your `.gitmodules`. 88 | See also the notes below. 89 | 90 | ### Main folders 91 | 92 | * ``rustls``: modified Rustls TLS stack to implement KEMTLS and post-quantum versions of "normal" TLS 1.3 93 | * ``measuring``: The scripts to measure the above 94 | * ``ring``: Modified version of Ring to allow for longer DER-encoded strings than typically expected from TLS instances. 95 | * ``webpki``: Modified version of WebPKI to work with PQ and KEM public keys in certificates 96 | * ``mk-cert``: Utility scripts to create post-quantum PKI for pqtls and KEMTLS. 97 | 98 | ### Supporting repositories 99 | 100 | * [``oqs-rs``][]: Rust wrapper around ``liboqs``. Contains additional implementations of schemes (notably AVX2 implementations). 101 | * ``mk-cert/xmss-rs``: Rust wrapper around the XMSS reference code, with our custom parameter set (``src/settings.rs``) and utilities for keygen and signing. 102 | 103 | [``oqs-rs``]: https://github.com/open-quantum-safe/liboqs-rust 104 | 105 | ## Working with this repository 106 | 107 | * **MAKE SURE TO CLONE WITH __ALL__ SUBMODULES**. There are submodules _within_ submodules, so clone with ``--recurse-submodules``. 108 | * If you want to make a fork of this repository, you will need to also fork the relevant submodules and update your `.gitmodules`. 109 | * The Dockerfile serves as an example of how everything can be compiled and how test setups can be created. 110 | It is used by the ``./measuring/script/create-experimental-setup.sh`` script, which serves as an example of its use. 111 | * The `mk-certs` folder contains a python script, `encoder.py`, that can be used to create the required PKI. 112 | RSA certificates and X25519 certificates are available in subfolders. 113 | The certificates assume that the server hostname is ``servername``, so put this in your `/etc/hosts`. 114 | Alternatively, override it using the environment variables in the file (which is also how you set which algorithms are used). 115 | * Experimenting with ``rustls`` can be done directly; use the ``rustls-mio`` subfolders 116 | and run ``cargo run --example tlsserver -- --help`` or ``cargo run --example tlsclient -- --help``. 117 | * The measurement setup is handled in the `measuring/` folder. See the `./run_experiment.sh` script. 118 | * Processing of results is done by the `./scripts/process.py` folder. It expects a `data` folder as produced by `./scripts/experiment.py`. 119 | * Downloading archived results can be done through the scripts in ``measuring/archived-results/`` 120 | -------------------------------------------------------------------------------- /measuring/LICENSE.md: -------------------------------------------------------------------------------- 1 | # SOFTWARE AND DATA LICENSE 2 | 3 | Software and data in this folder is available under CC0 License 4 | 5 | ## CC0 1.0 Universal 6 | 7 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED HEREUNDER. 8 | 9 | ### Statement of Purpose 10 | 11 | The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). 12 | 13 | Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. 14 | 15 | For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 16 | 17 | 1. __Copyright and Related Rights.__ A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: 18 | 19 | i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; 20 | 21 | ii. moral rights retained by the original author(s) and/or performer(s); 22 | 23 | iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; 24 | 25 | iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; 26 | 27 | v. rights protecting the extraction, dissemination, use and reuse of data in a Work; 28 | 29 | vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and 30 | 31 | vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 32 | 33 | 2. __Waiver.__ To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 34 | 35 | 3. __Public License Fallback.__ Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 36 | 37 | 4. __Limitations and Disclaimers.__ 38 | 39 | a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. 40 | 41 | b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. 42 | 43 | c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. 44 | 45 | d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. 46 | -------------------------------------------------------------------------------- /measuring/scripts/measure-handshake-size.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if ! [ -f "count-hs-bytes.py" ]; then 4 | echo "Need to run this from where I exist" 5 | exit 1 6 | fi 7 | 8 | PORT=4443 9 | 10 | TMPDIR="$(mktemp -d -t tmp.MESXXXXXXXX)" 11 | OUTFILE=/tmp/sizes.log 12 | 13 | if [[ "$(getent hosts servername)" != "127.0.0.1 servername" ]]; then 14 | echo "You need to probably fix /etc/hosts to set servername to 127.0.0.1"; 15 | exit 1; 16 | fi 17 | 18 | export LOGLEVEL=DEBUG 19 | 20 | loops=1 21 | 22 | rm -rf $OUTFILE 23 | 24 | for dir in $@; do 25 | pushd $dir; 26 | if [ -f "signing.crt" ]; then 27 | echo "PQTLS" 28 | if [[ "$dir" = *"classicmceliece"* ]]; then 29 | echo "Skipping McEliece" 30 | popd 31 | continue 32 | fi 33 | killall tlsserver tlsclient 2> /dev/null 34 | echo Starting TSHARK 35 | tshark -i lo -w $TMPDIR/pqtls-dump.pcap & 36 | sharkpid=$! 37 | sleep 1 38 | if [[ "$dir" = *"-clauth-"* ]]; then 39 | ./tlsserver --port $PORT --certs signing.chain.crt --auth client-ca.crt --require-auth --key signing.key http > $TMPDIR/server_pqtls_mut.log & 40 | SPID=$! 41 | sleep 0.5 42 | ./tlsclient --port $PORT --loops $loops --cafile signing-ca.crt --auth-certs client.crt --auth-key client.key --no-tickets --http servername > $TMPDIR/client_pqtls_mut 43 | echo "Done measuring mutual" 44 | measurementtype=pqtls-mut 45 | else 46 | ./tlsserver --port $PORT --certs signing.chain.crt --key signing.key http > $TMPDIR/server_pqtls.log & 47 | SPID=$! 48 | sleep 0.5 49 | ./tlsclient --port $PORT --loops $loops --cafile signing-ca.crt --no-tickets --http servername > $TMPDIR/client_pqtls.log 50 | echo "Done measuring unilateral" 51 | measurementtype=pqtls 52 | fi 53 | sleep 5 54 | kill -TERM $sharkpid $SPID 55 | tshark -r $TMPDIR/pqtls-dump.pcap -R tls -2 -Tjson --no-duplicate-keys > $TMPDIR/pqtls-dump.json 56 | popd > /dev/null 57 | echo "$dir PQTLS" >> $OUTFILE 58 | python3 count-hs-bytes.py $measurementtype $TMPDIR/pqtls-dump.json >> $OUTFILE 59 | ret=$? 60 | if [ $ret != 0 ]; then 61 | echo "Error occurred when processing" 62 | mv $TMPDIR/pqtls-dump.json /tmp 63 | mv $TMPDIR/pqtls-dump.pcap /tmp 64 | exit 1; 65 | fi 66 | pushd $dir > /dev/null 67 | fi 68 | if [ -f "signing.crt" ]; then 69 | echo "PQTLS with caching" 70 | if [[ "$dir" = *"classicmceliece"* ]]; then 71 | echo "Skipping McEliece" 72 | popd 73 | continue 74 | fi 75 | killall tlsserver tlsclient 2> /dev/null 76 | echo Starting TSHARK 77 | tshark -i lo -w $TMPDIR/pqtls-dump.pcap & 78 | sharkpid=$! 79 | sleep 1 80 | if [[ "$dir" = *"-clauth-"* ]]; then 81 | ./tlsserver --port $PORT --certs signing.chain.crt --auth client-ca.crt --require-auth --key signing.key http > $TMPDIR/server_pqtls_mut.log & 82 | SPID=$! 83 | sleep 0.5 84 | ./tlsclient --port $PORT --loops $loops --cafile signing-ca.crt --cached-certs signing.chain.crt --auth-certs client.crt --auth-key client.key --no-tickets --http servername > $TMPDIR/client_pqtls_mut 85 | echo "Done measuring mutual" 86 | measurementtype=pqtls-mut 87 | else 88 | ./tlsserver --port $PORT --certs signing.chain.crt --key signing.key http > $TMPDIR/server_pqtls.log & 89 | SPID=$! 90 | sleep 0.5 91 | ./tlsclient --port $PORT --loops $loops --cafile signing-ca.crt --cached-certs signing.chain.crt --no-tickets --http servername > $TMPDIR/client_pqtls.log 92 | echo "Done measuring unilateral" 93 | measurementtype=pqtls 94 | fi 95 | sleep 5 96 | kill -TERM $sharkpid $SPID 97 | tshark -r $TMPDIR/pqtls-dump.pcap -R tls -2 -Tjson --no-duplicate-keys > $TMPDIR/pqtls-dump.json 98 | popd > /dev/null 99 | echo "$dir PQTLS-CACHED" >> $OUTFILE 100 | python3 count-hs-bytes.py $measurementtype $TMPDIR/pqtls-dump.json >> $OUTFILE 101 | ret=$? 102 | if [ $ret != 0 ]; then 103 | echo "Error occurred when processing" 104 | mv $TMPDIR/pqtls-dump.json /tmp 105 | mv $TMPDIR/pqtls-dump.pcap /tmp 106 | exit 1; 107 | fi 108 | pushd $dir > /dev/null 109 | fi 110 | if [ -f "kem.crt" ]; then 111 | echo "KEMTLS" 112 | if [[ "$dir" = *"mceliece"* ]]; then 113 | echo "Skipping McEliece" 114 | popd 115 | continue 116 | fi 117 | killall tlsserver tlsclient > /dev/null 118 | echo Starting TSHARK 119 | tshark -i lo -w $TMPDIR/kemtls-dump.pcap & 120 | sharkpid=$! 121 | sleep 1 122 | if ! [[ "$dir" = *"clauth"* ]]; then 123 | ./tlsserver --port $PORT --certs kem.chain.crt --key kem.key http > $TMPDIR/server_kemtls.log & 124 | SPID=$! 125 | sleep 0.5 126 | ./tlsclient --port $PORT --loops $loops --cafile kem-ca.crt --no-tickets --http servername > $TMPDIR/client_kemtls.log 127 | echo "Done measuring unilateral" 128 | measurementtype=kemtls 129 | else 130 | ./tlsserver --port $PORT --certs kem.chain.crt --require-auth --auth client-ca.crt --key kem.key http > $TMPDIR/server_kemtls_mut.log & 131 | SPID=$! 132 | sleep 0.5 133 | ./tlsclient --port $PORT --loops $loops --cafile kem-ca.crt --no-tickets --auth-certs client.crt --auth-key client.key --http servername > $TMPDIR/client_kemtls_mut.log 134 | echo "Done measuring mutual" 135 | measurementtype=kemtls-mut 136 | fi 137 | sleep 5 138 | kill -TERM $sharkpid $SPID 139 | tshark -r $TMPDIR/kemtls-dump.pcap -R tls -2 -Tjson --no-duplicate-keys > $TMPDIR/kemtls-dump.json 140 | popd > /dev/null 141 | 142 | echo "$dir KEM" >> $OUTFILE 143 | python3 count-hs-bytes.py $measurementtype $TMPDIR/kemtls-dump.json >> $OUTFILE 144 | ret=$? 145 | if [ $ret != 0 ]; then 146 | echo "Error occurred when processing" 147 | mv $TMPDIR/kemtls-dump.json /tmp 148 | mv $TMPDIR/kemtls-dump.pcap /tmp 149 | exit 1; 150 | fi 151 | # go back up in expected state 152 | pushd $dir > /dev/null 153 | fi 154 | if [ -f "kem.crt" ]; then 155 | echo "KEMTLS-PDK" 156 | if [[ "$dir" = *"mceliece"* ]]; then 157 | echo "Skipping McEliece" 158 | popd 159 | continue 160 | fi 161 | killall tlsserver tlsclient > /dev/null 162 | echo Starting TSHARK 163 | tshark -i lo -w $TMPDIR/kemtls-dump.pcap & 164 | sharkpid=$! 165 | sleep 1 166 | if [[ "$dir" = *"-clauth-"* ]]; then 167 | ./tlsserver --port $PORT --certs kem.chain.crt --key kem.key --auth client-ca.crt --require-auth http > $TMPDIR/server_kemtlspdk_mut.log & 168 | SPID=$! 169 | sleep 0.5 170 | ./tlsclient --port $PORT --loops $loops --cached-certs kem.crt --auth-certs client.crt --auth-key client.key --cafile kem-ca.crt --no-tickets --http servername > $TMPDIR/client_kemtlspdk_mut.log 171 | echo "Done measuring mutual" 172 | measurementtype=kemtls-pdk-mut 173 | else 174 | ./tlsserver --port $PORT --certs kem.chain.crt --key kem.key http > $TMPDIR/server_kemtlspdk.log & 175 | SPID=$! 176 | sleep 0.5 177 | ./tlsclient --port $PORT --loops $loops --cached-certs kem.crt --cafile kem-ca.crt --no-tickets --http servername > $TMPDIR/client_kemtlspdk.log 178 | echo "Done measuring" 179 | measurementtype=kemtls-pdk 180 | fi 181 | sleep 5 182 | kill -TERM $sharkpid $SPID 183 | tshark -r $TMPDIR/kemtls-dump.pcap -R tls -2 -Tjson --no-duplicate-keys > $TMPDIR/kemtls-dump.json 184 | popd > /dev/null 185 | echo "$dir KEMPDK" >> $OUTFILE 186 | python3 count-hs-bytes.py $measurementtype $TMPDIR/kemtls-dump.json >> $OUTFILE 187 | ret=$? 188 | if [ $ret != 0 ]; then 189 | echo "Error occurred when processing" 190 | mv $TMPDIR/kemtls-dump.json /tmp 191 | mv $TMPDIR/kemtls-dump.pcap /tmp 192 | exit 1; 193 | fi 194 | # go back up in expected state 195 | pushd $dir > /dev/null 196 | fi 197 | popd 198 | done 199 | 200 | #rm -rf $TMPDIR 201 | -------------------------------------------------------------------------------- /measuring/scripts/count-hs-bytes.py: -------------------------------------------------------------------------------- 1 | """ 2 | This script counts the handshake bytes from a json-dumped tshark pcap 3 | 4 | Measure handshakes using 5 | ``` 6 | tshark -i lo -w dump.pcap 7 | ``` 8 | 9 | Dump them to ``dump.json`` using 10 | ``` 11 | tshark -r dump.pcap -R tls -2 -Tjson --no-duplicate-keys > dump.json 12 | ``` 13 | 14 | (Requires a decently recent version of wireshark) 15 | 16 | and then process the dump using this script. 17 | """ 18 | 19 | 20 | import itertools 21 | import json 22 | import os 23 | import sys 24 | 25 | import logging 26 | 27 | SUPPORTED_TYPES = ("pqtls", "pqtls-mut", "kemtls", "kemtls-mut", "kemtls-pdk", "kemtls-pdk-mut") 28 | 29 | if len(sys.argv) != 3 or sys.argv[1] not in SUPPORTED_TYPES: 30 | print(f"Usage: {sys.argv[0]} dump.json") 31 | sys.exit(1) 32 | 33 | logging.basicConfig(level=getattr(logging, os.environ.get('LOGLEVEL', 'DEBUG').upper())) 34 | 35 | with open(sys.argv[2]) as f: 36 | data = json.load(f) 37 | 38 | client_port = None 39 | server_port = None 40 | 41 | class Packet: 42 | 43 | def __init__(self, packet): 44 | self._packet = packet 45 | self._tcp = packet["_source"]["layers"]["tcp"] 46 | self._tls = packet["_source"]["layers"].get("tls") 47 | 48 | @property 49 | def srcport(self): 50 | return self._tcp["tcp.srcport"] 51 | 52 | @property 53 | def dstport(self): 54 | return self._tcp["tcp.dstport"] 55 | 56 | @property 57 | def is_tls(self): 58 | return self._tls is not None 59 | 60 | @property 61 | def tls_records(self): 62 | if not self.is_tls: 63 | raise ValueError 64 | if isinstance(self._tls, list): 65 | all_records = [] 66 | for tls_item in self._tls: 67 | records = tls_item['tls.record'] 68 | if isinstance(records, list): 69 | all_records.extend(records) 70 | else: 71 | all_records.append(records) 72 | return all_records 73 | # just a singular tls record 74 | records = self._tls['tls.record'] 75 | if isinstance(records, list): 76 | return records 77 | else: 78 | return [records] 79 | 80 | def is_css(self): 81 | return any(record.get('tls.change_cipher_spec', False) == "" for record in self.tls_records) 82 | 83 | @property 84 | def is_client_hello(self): 85 | hs = self.tls_records[0].get('tls.handshake') 86 | if not hs: 87 | return False 88 | return hs['tls.handshake.type'] == "1" 89 | 90 | @property 91 | def is_server_hello(self): 92 | hs = self.tls_records[0].get('tls.handshake') 93 | if not hs: 94 | return False 95 | return hs['tls.handshake.type'] == "2" 96 | 97 | @property 98 | def tcp_payload_size(self): 99 | return int(self._tcp['tcp.len']) 100 | 101 | handshakes = [] 102 | for packet in [Packet(p) for p in data]: 103 | if not packet.is_tls: 104 | continue 105 | logging.debug(f"Packet: {packet.srcport} -> {packet.dstport}: {packet.tcp_payload_size} bytes") 106 | if packet.is_client_hello: 107 | client_port = packet.srcport 108 | server_port = packet.dstport 109 | handshakes.append([]) 110 | handshakes[-1].append(packet) 111 | 112 | # Now handshake contains a full TLS handshake 113 | 114 | def length(record): 115 | return 5 + int(record['tls.record.length']) 116 | 117 | # if PQTLS 118 | TLS_TYPE = sys.argv[1] 119 | if TLS_TYPE == "pqtls" or TLS_TYPE == "pqtls-mut": 120 | for handshake in handshakes: 121 | size = 0 122 | # Client Hello 123 | clmsgs = list(filter(lambda p: p.dstport == server_port, handshake)) 124 | cmsgiter = itertools.chain.from_iterable(msg.tls_records for msg in clmsgs) 125 | 126 | assert clmsgs[0].is_client_hello 127 | size += (msgsize := length(next(cmsgiter))) 128 | logging.debug(f"Client hello size: {msgsize}") 129 | 130 | # Server Hello, CSS, EE, Cert, CertV, SFIN 131 | # chain all next server->client messages 132 | servmsgs = list(filter(lambda p: p.srcport == server_port, handshake)) 133 | smsgiter = itertools.chain.from_iterable(msg.tls_records for msg in servmsgs) 134 | assert servmsgs[0].is_server_hello 135 | 136 | size += (msgsize := length(next(smsgiter))) 137 | logging.debug(f"Server hello size: {msgsize}") 138 | 139 | size += (msgsize := length(next(smsgiter))) 140 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 141 | logging.debug(f"ChangeCipherSpec size: {msgsize}") 142 | 143 | size += (msgsize := length(next(smsgiter))) 144 | logging.debug(f"EncryptedExtensions size: {msgsize}") 145 | if TLS_TYPE == "pqtls-mut": 146 | size += (msgsize := length(next(smsgiter))) 147 | logging.debug(f"CertificateRequest size: {msgsize}") 148 | 149 | cert_size = (msgsize := length(next(smsgiter))) 150 | while msgsize == 16406: # magic constant for large msgs that got fragmented by TLS 151 | cert_size += (msgsize := length(next(smsgiter))) 152 | size += cert_size 153 | logging.debug(f"Certificate size: {cert_size}") 154 | size += (msgsize := length(next(smsgiter))) 155 | logging.debug(f"CertificateVerify size: {msgsize}") 156 | size += (msgsize := length(next(smsgiter))) 157 | logging.debug(f"ServerFinished size: {msgsize}") 158 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 159 | 160 | # CSS, ClientFinished 161 | size += (msgsize := length(next(cmsgiter))) 162 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 163 | logging.debug(f"ChangeCipherSpec size: {msgsize}") 164 | 165 | if TLS_TYPE == "pqtls-mut": 166 | cert_size += (msgsize := length(next(cmsgiter))) 167 | while msgsize == 16406: # magic constant for large msgs that got fragmented by TLS 168 | cert_size += (msgsize := length(next(cmsgiter))) 169 | size += cert_size 170 | logging.debug(f"Certificate size: {cert_size}") 171 | size += (msgsize := length(next(cmsgiter))) 172 | logging.debug(f"CertificateVerify size: {msgsize}") 173 | 174 | size += (msgsize := length(next(cmsgiter))) 175 | logging.debug(f"ClientFinished size: {msgsize}") 176 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 177 | 178 | print(f"Total size: {size}") 179 | 180 | 181 | if TLS_TYPE == "kemtls" or TLS_TYPE == "kemtls-mut": 182 | for handshake in handshakes: 183 | size = 0 184 | 185 | # Client msgs 186 | clmsgs = list(filter(lambda p: p.dstport == server_port, handshake)) 187 | cmsgiter = itertools.chain.from_iterable(msg.tls_records for msg in clmsgs) 188 | # Server msgs 189 | servmsgs = list(filter(lambda p: p.srcport == server_port, handshake)) 190 | smsgiter = itertools.chain.from_iterable(msg.tls_records for msg in servmsgs) 191 | 192 | # Client Hello 193 | ch = next(cmsgiter) 194 | assert clmsgs[0].is_client_hello 195 | size += (msgsize := length(ch)) 196 | logging.debug(f"Client hello size: {msgsize}") 197 | 198 | # Server Hello, CSS, EE, Cert 199 | assert servmsgs[0].is_server_hello 200 | size += (msgsize := length(next(smsgiter))) 201 | logging.debug(f"Server hello size: {msgsize}") 202 | size += (msgsize := length(next(smsgiter))) 203 | logging.debug(f"ChangeCipherSpec size: {msgsize}") 204 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 205 | size += (msgsize := length(next(smsgiter))) 206 | logging.debug(f"EncryptedExtensions size: {msgsize}") 207 | 208 | if TLS_TYPE == "kemtls-mut": 209 | size += (msgsize := length(next(smsgiter))) 210 | logging.debug(f"CertificateRequest size: {msgsize}") 211 | 212 | cert_size = (msgsize := length(next(smsgiter))) 213 | while msgsize == 16406: # magic constant for large msgs that got fragmented by TLS 214 | cert_size += (msgsize := length(next(smsgiter))) 215 | size += cert_size 216 | logging.debug(f"Certificate size: {cert_size}") 217 | 218 | # CSS, CKEX 219 | size += (msgsize := length(next(cmsgiter))) 220 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 221 | logging.debug(f"Client ChangeCipherSpec: {msgsize}") 222 | size += (msgsize := length(next(cmsgiter))) 223 | logging.debug(f"ClientCiphertext: {msgsize}") 224 | 225 | if TLS_TYPE == "kemtls-mut": 226 | # CCERT 227 | cert_size = (msgsize := length(next(cmsgiter))) 228 | while msgsize == 16406: # magic constant for large msgs that got fragmented by TLS 229 | cert_size += (msgsize := length(next(cmsgiter))) 230 | size += cert_size 231 | logging.debug("ClientCertificate size: %d", cert_size) 232 | 233 | # SKEX 234 | size += (msgsize := length(next(smsgiter))) 235 | logging.debug("ServerCiphertext size: %d", msgsize) 236 | 237 | # CFIN 238 | size += (msgsize := length(next(cmsgiter))) 239 | logging.debug(f"ClientFinished: {msgsize}") 240 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 241 | 242 | # ServerFinished 243 | size += (msgsize := length(next(smsgiter))) 244 | logging.debug(f"ServerFinished size: {msgsize}") 245 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 246 | 247 | 248 | print(f"Total size: {size}") 249 | 250 | if TLS_TYPE == "kemtls-pdk" or TLS_TYPE == "kemtls-pdk-mut": 251 | for handshake in handshakes: 252 | size = 0 253 | # Client messages 254 | clmsgs = list(filter(lambda p: p.dstport == server_port, handshake)) 255 | cmsgiter = itertools.chain.from_iterable(msg.tls_records for msg in clmsgs) 256 | # Server msgs 257 | servmsgs = list(filter(lambda p: p.srcport == server_port, handshake)) 258 | smsgiter = itertools.chain.from_iterable(msg.tls_records for msg in servmsgs) 259 | 260 | # Client hello 261 | ch = next(cmsgiter) 262 | assert clmsgs[0].is_client_hello 263 | size += (msgsize := length(ch)) 264 | logging.debug(f"ClientHello size (PDK): {msgsize}") 265 | 266 | if TLS_TYPE == "kemtls-pdk-mut": 267 | size += (msgsize := length(msg := next(cmsgiter))) 268 | logging.debug(f"ChangeCipherSpec: {msgsize}") 269 | 270 | cert_size = (msgsize := length(next(cmsgiter))) 271 | while msgsize == 16406: # magic constant for large msgs that got fragmented by TLS 272 | cert_size += (msgsize := length(next(cmsgiter))) 273 | size += cert_size 274 | logging.debug("ClientCertificate size: %d", cert_size) 275 | 276 | # SH, CSS, EE, [SKEX], SFIN 277 | assert servmsgs[0].is_server_hello 278 | size += (msgsize := length(next(smsgiter))) 279 | logging.debug(f"Server hello size: {msgsize}") 280 | size += (msgsize := length(next(smsgiter))) 281 | logging.debug(f"ChangeCipherSpec size: {msgsize}") 282 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 283 | size += (msgsize := length(next(smsgiter))) 284 | logging.debug(f"EncryptedExtensions size: {msgsize}") 285 | 286 | if TLS_TYPE == "kemtls-pdk-mut": 287 | size += (msgsize := length(next(smsgiter))) 288 | logging.debug("ServerKemCiphertext size: %d", msgsize) 289 | 290 | size += (msgsize := length(next(smsgiter))) 291 | logging.debug(f"ServerFinished size: {msgsize}") 292 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 293 | 294 | # [CSS], CFIN 295 | if not TLS_TYPE == "kemtls-pdk-mut": 296 | size += (msgsize := length(msg := next(cmsgiter))) 297 | assert msgsize == 6, f"expected ccs to be 6 bytes instead of {msgsize}" 298 | logging.debug(f"ChangeCipherSpec: {msgsize}") 299 | size += (msgsize := length(next(cmsgiter))) 300 | logging.debug(f"ClientFinished: {msgsize}") 301 | assert msgsize == 58, f"Expected finished size to be 58 bytes instead of {msgsize}" 302 | 303 | print(f"Total size: {size}") 304 | -------------------------------------------------------------------------------- /measuring/scripts/process.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from collections import defaultdict 4 | from itertools import chain 5 | import csv 6 | import json 7 | import os 8 | from pathlib import Path 9 | import re 10 | import statistics 11 | import multiprocessing 12 | 13 | from typing import Any, Literal, Optional, Union, cast 14 | 15 | from experiment import ALGORITHMS, Experiment 16 | 17 | BASEPATH = Path(__file__).parent.absolute().parent 18 | 19 | DATAPATH = BASEPATH / "data" 20 | PROCESSED_PATH = BASEPATH / "processed" 21 | 22 | 23 | #: Renames for the key exchange 24 | KEX_RENAMES: dict[str, str] = { 25 | "X25519": "E", 26 | "Kyber768": "Kiii", 27 | "Dilithium3": "Diii", 28 | "Falcon1024": "Fv", 29 | "SikeP434Compressed": "Sc", 30 | "CTIDH512": "Ctfivetwelve", 31 | "CTIDH1024": "Cttentwentyfour", 32 | "CSIDH2047M1L226": "Cs", 33 | "CTIDH2047M1L226": "Ct", 34 | "CSIDH4095M27L262": "CsIc", 35 | "CTIDH4095M27L262": "CtIc", 36 | "CTIDH5119M46L244": "CtIIf", 37 | } 38 | 39 | SIG_RENAMES: dict[str, str] = { 40 | "RainbowICircumzenithal": "Rcz", 41 | } 42 | 43 | #: Renames for the leaf algorithm: combination of signature schemes and KEX 44 | AUTH_RENAMES = dict() 45 | AUTH_RENAMES.update(KEX_RENAMES) 46 | AUTH_RENAMES.update(SIG_RENAMES) 47 | 48 | ExperimentType = Union[ 49 | Literal["sig"], 50 | Literal["pdk"], 51 | Literal["kemtls"], 52 | Literal["sigcache"], 53 | Literal["optls"], 54 | ] 55 | 56 | 57 | def get_experiment_name(experiment: dict[str, Any]) -> str: 58 | kex = experiment["kex"] 59 | leaf = experiment["leaf"] 60 | inter = experiment["int"] 61 | root = experiment["root"] 62 | clauth = experiment["clauth"] 63 | clca = experiment["clca"] 64 | keycache = experiment["keycache"] 65 | 66 | type: Optional[ExperimentType] = None 67 | if experiment["type"] == "pdk": 68 | type = "pdk" 69 | elif experiment["type"] == "kemtls": 70 | type = "kemtls" 71 | elif experiment["type"] == "sign-cached": 72 | type = "sigcache" 73 | elif experiment["type"] == "optls": 74 | type = "optls" 75 | else: 76 | assert experiment["type"] == "sign", f"{experiment['type']} unknown" 77 | type = "sig" 78 | 79 | kex = KEX_RENAMES.get(kex, kex[0].upper()) 80 | leaf = AUTH_RENAMES.get(leaf, leaf[0].upper()) 81 | if inter is not None and type not in ("pdk", "sigcache"): 82 | inter = SIG_RENAMES.get(inter, inter[0].upper()) 83 | elif inter is None or type in ("pdk", "sigcache"): 84 | inter = "" 85 | if root is not None and type != "pdk": 86 | root = SIG_RENAMES.get(root, root[0].upper()) 87 | elif root is None or type == "pdk": 88 | root = "" 89 | 90 | authpart = "" 91 | if clauth is not None: 92 | clauth = AUTH_RENAMES.get(clauth, clauth[0].upper()) 93 | clca = SIG_RENAMES.get(clca, clca[0].upper()) 94 | authpart = f"auth{clauth}{clca}" 95 | 96 | keycache = "" 97 | if experiment["keycache"]: 98 | keycache = "keycache" 99 | 100 | return f"{type}{kex}{leaf}{inter}{root}{authpart}{keycache}" 101 | 102 | 103 | def read_csv_lines(filename): 104 | """Read the entries from a csv""" 105 | with open(filename, "r") as file_: 106 | reader = csv.DictReader(file_) 107 | for line in reader: 108 | valid = True 109 | for key, val in line.items(): 110 | if not val: 111 | print("Invalid value for {} on line {}".format(key, line)) 112 | valid = False 113 | break 114 | if valid: 115 | yield line 116 | 117 | 118 | def get_averages(filename: Union[str, Path]) -> tuple[dict[str, float], int]: 119 | sums: defaultdict[str, list] = defaultdict(list) 120 | for line in read_csv_lines(filename): 121 | for key, val in line.items(): 122 | sums[key].append(int(val) / 1000) # convert to microseconds 123 | results: dict[str, float] = dict() 124 | key = None 125 | for key in sums.keys(): 126 | results[key] = round(statistics.mean(sums[key]), 3) 127 | results[f"{key} stdev"] = round(statistics.stdev(sums[key]), 3) 128 | results[f"{key} var%"] = round(statistics.stdev(sums[key])/statistics.mean(sums[key])*100, 3) 129 | assert key is not None 130 | 131 | return (results, len(sums[key])) 132 | 133 | 134 | AVG_FIELDS: list[str] = [ 135 | "type", 136 | "kex", 137 | "leaf", 138 | "int", 139 | "root", 140 | "clauth", 141 | "clca", 142 | "int-only", 143 | "keycache", 144 | "rtt", 145 | "drop_rate", 146 | "rate", 147 | "measurements", 148 | "name", 149 | "filename", 150 | # client keys 151 | *chain.from_iterable( 152 | (f"client {key}", f"client {key} stdev") 153 | for key in [ 154 | "start", 155 | "creating keyshares", 156 | "created keyshares", 157 | "created pdk encapsulation", 158 | "sending chelo", 159 | "received sh", 160 | "decapsulating ephemeral", 161 | "decapsulated ephemeral", 162 | "derived hs", 163 | "received cert", 164 | "submitted ckex to server", 165 | "encapsulating to cert", 166 | "encapsulated to cert", 167 | "derived ahs", 168 | "emit cert", 169 | "decapsulating from ccert", 170 | "decapsulated from ccert", 171 | "derived ms", 172 | "emitted finished", 173 | "received finished", 174 | "authenticated server", 175 | "handshake completed", 176 | "writing to server", 177 | "received server reply", 178 | ] 179 | ), 180 | # server keys 181 | *chain.from_iterable( 182 | (f"server {key}", f"server {key} stdev") 183 | for key in [ 184 | "received client hello", 185 | "encapsulating to ephemeral", 186 | "encapsulated to ephemeral", 187 | "emitted sh", 188 | "pdk decapsulating from certificate", 189 | "pdk decapsulating from certificate", 190 | "pdk decapsulated from certificate", 191 | "derived hs", 192 | "pdk encapsulating to ccert", 193 | "pdk encapsulated to ccert", 194 | "emitted certificate", 195 | "emitting certv", 196 | "received ckex", 197 | "decapsulating from certificate", 198 | "decapsulated from certificate", 199 | "derived ahs", 200 | "received certificate", 201 | "encapsulating to client", 202 | "submitted skex to client", 203 | "received certv", 204 | "received finished", 205 | "authenticated client", 206 | "emitted finished", 207 | "reading traffic", 208 | "writing to client", 209 | "handshake completed", 210 | ] 211 | ), 212 | ] 213 | 214 | 215 | def format_results_tex(avgs: dict[str, Any]): 216 | latency = float(avgs["rtt"]) 217 | loss: str = avgs["drop_rate"] 218 | rate: str = avgs["rate"] 219 | 220 | macro_name_base = "res" + ("slow" if latency > 50 else "fast") + avgs["name"] 221 | 222 | def macro(name, number): 223 | number = "%0.1f" % (number / 1000) 224 | return ( 225 | fr"\newcommand{{\{macro_name_base}{name}}}{{{number}}} % {avgs['filename']}" 226 | "\n" 227 | ) 228 | 229 | with open( 230 | PROCESSED_PATH / f"processed_results_{latency:0.1f}_{loss}_{rate}.tex", "a+" 231 | ) as texfile: 232 | texfile.write(macro("encrypting", avgs["client writing to server"])) 233 | 234 | texfile.write(macro("clientdone", avgs["client handshake completed"])) 235 | texfile.write(macro("serverdone", avgs["server handshake completed"])) 236 | texfile.write( 237 | macro("serverexplicitauthed", avgs["client authenticated server"]) 238 | ) 239 | texfile.write(macro("clientgotreply", avgs["client received server reply"])) 240 | 241 | 242 | def process_experiment( 243 | experiment: tuple[str, dict[str, Union[int, float, bool, str]]] 244 | ) -> dict[str, Union[int, float, bool, str]]: 245 | (filename, data) = experiment 246 | (the_avgs, count) = get_averages(filename) 247 | avgs = cast(dict[str, Union[float, int, bool, str]], the_avgs) 248 | print(f"processed {filename} and got {count} points") 249 | avgs["measurements"] = count 250 | avgs.update(data) 251 | return avgs 252 | 253 | 254 | # https://stackoverflow.com/a/54392761/248065 255 | def dump_lua(data) -> str: 256 | if type(data) is str: 257 | return f'"{data}"' 258 | if type(data) in (int, float): 259 | return f"{data}" 260 | if type(data) is bool: 261 | return data and "true" or "false" 262 | if type(data) is list: 263 | l = "{" 264 | l += ", ".join([dump_lua(item) for item in data]) 265 | l += "}" 266 | return l 267 | if type(data) is dict: 268 | t = "{" 269 | t += ", ".join([f'["{k}"]={dump_lua(v)}' for k, v in data.items()]) 270 | t += "}" 271 | return t 272 | 273 | assert False, f"Unknown type {type(data)}" 274 | 275 | 276 | def write_averages(experiments): 277 | names = set() 278 | 279 | with multiprocessing.Pool() as p: 280 | avgses = p.map(process_experiment, experiments) 281 | 282 | with open(PROCESSED_PATH / "avgs.csv", "w") as csvfile: 283 | writer = csv.DictWriter(csvfile, fieldnames=AVG_FIELDS) 284 | writer.writeheader() 285 | for avgs in avgses: 286 | name = avgs["name"] 287 | # print(f"{name}: from {avgs['filename']}") 288 | 289 | # Sanity check 290 | assert (name, avgs["rtt"]) not in names, f"Already seen {name}" 291 | names.add(name) 292 | 293 | print(f"{name}: {avgs['rtt']} Server reply: {avgs['client received server reply']:0.1f} (+- {avgs['client received server reply var%']:0.1f})") 294 | print(f"{name}: {avgs['rtt']} Server done: {avgs['server handshake completed']:0.1f}") 295 | writer.writerow({key: val for key, val in avgs.items() if "%" not in key}) 296 | format_results_tex(avgs) 297 | 298 | lua_table = dict() 299 | for avgs in avgses: 300 | item = lua_table 301 | for key_item in ("type", "rtt", "kex", "leaf", "int", "root", "clauth", "clca", "keycache"): 302 | key = avgs[key_item] or "none" 303 | if key_item == "rtt": 304 | key = f"{float(key):0.1f}" 305 | elif key_item == "keycache": 306 | key = "true" if avgs[key_item] else "false" 307 | if key not in item: 308 | item[key] = {} 309 | item = item[key] 310 | for key_item in avgs.keys(): 311 | if key_item.startswith("server ") or key_item.startswith("client "): 312 | val = avgs[key_item] 313 | assert isinstance(val, (int, float)) 314 | item[key_item] = val / 1000 315 | elif key_item == "measurements": 316 | val = avgs[key_item] 317 | item[key_item] = val 318 | 319 | with open(PROCESSED_PATH / "avgs.lua", "w") as luafile: 320 | luafile.write("measurement_results=") 321 | luafile.write(dump_lua(lua_table)) 322 | with open(PROCESSED_PATH / "avgs.json", "w") as fh: 323 | json.dump(lua_table, fh, indent=2) 324 | 325 | 326 | EXPERIMENT_REGEX = re.compile( 327 | r"(?P(kemtls|sign|sign-cached|optls|pdk))(-(?P(int-chain|int-only)))?(-(?Pkeycache))?/" 328 | r"(?P[^_]+)_(?P[^_]+)(_(?P[^_]+))?(_(?P[^_]+))?" 329 | r"(_clauth_(?P[^_]+)_(?P[^_]+))?" 330 | r"_(?P\d+\.\d+)ms_(?P\d+(\.\d+)?)_(?P\d+mbit).csv" 331 | ) 332 | 333 | 334 | def get_experiments() -> list[tuple[Path, dict[str, Any]]]: 335 | filenames = DATAPATH.glob("*/*.csv") 336 | return [(filename, get_experiment(filename)) for filename in filenames] 337 | 338 | 339 | def get_experiment(filename) -> dict[str, Union[int, float, bool, str]]: 340 | relpath = str(filename.relative_to(DATAPATH)) 341 | matches = EXPERIMENT_REGEX.match(relpath) 342 | assert matches, f"Experiment '{relpath}' doesn't match regex" 343 | experiment: dict[str, Union[int, bool, str, float]] = {} 344 | experiment["filename"] = filename.name 345 | for item in [ 346 | "type", 347 | "kex", 348 | "leaf", 349 | "int", 350 | "root", 351 | "clauth", 352 | "clca", 353 | "rtt", 354 | "drop_rate", 355 | "rate", 356 | ]: 357 | experiment[item] = matches.group(item) 358 | 359 | if experiment["type"] not in ("pdk", "sign-cached"): 360 | experiment["int-only"] = matches.group("cached") == "int-only" 361 | else: 362 | experiment["int-only"] = True 363 | assert matches.group("cached") is None 364 | assert experiment["int"] is None 365 | assert experiment["root"] is None 366 | assert matches.group("int") is None 367 | 368 | experiment["keycache"] = matches.group("keycache") == "keycache" 369 | 370 | if experiment["int-only"]: 371 | assert experiment["root"] is None 372 | 373 | experiment["name"] = get_experiment_name(experiment) 374 | 375 | return experiment 376 | 377 | 378 | def create_handle(experiment: Experiment) -> str: 379 | def get_handle(alg: Optional[str]) -> str: 380 | if alg is None: 381 | return "" 382 | if alg == "X25519": 383 | return "e" 384 | if alg == "RSA2048": 385 | return "r" 386 | if alg.startswith("ClassicMcEliece"): 387 | return "M" 388 | if alg.startswith("Sphincs"): 389 | if "f" in alg: 390 | return "Sf" 391 | return "Ss" 392 | return alg[0] 393 | 394 | output = ( 395 | get_handle(experiment.kex) 396 | + get_handle(experiment.leaf) 397 | + get_handle(experiment.intermediate) 398 | + get_handle(experiment.root) 399 | ) 400 | if experiment.client_auth: 401 | output += "-" + get_handle(experiment.client_auth) 402 | output += get_handle(experiment.client_ca) 403 | 404 | return output 405 | 406 | 407 | def produce_experiment_list(): 408 | algs = [] 409 | for experiment in ALGORITHMS: 410 | alg = {"handle": create_handle(experiment)} 411 | for attr in ( 412 | "type", 413 | "level", 414 | "kex", 415 | "leaf", 416 | "intermediate", 417 | "root", 418 | "client_auth", 419 | "client_ca", 420 | ): 421 | if (a := getattr(experiment, attr)) is not None: 422 | alg[attr] = a 423 | algs.append(alg) 424 | with Path(BASEPATH / "processed" / "experiments.json").open("w") as fh: 425 | json.dump(algs, fh, indent=2) 426 | 427 | 428 | def main(): 429 | os.makedirs(DATAPATH / ".." / "processed", exist_ok=True) 430 | write_averages(get_experiments()) 431 | produce_experiment_list() 432 | 433 | 434 | if __name__ == "__main__": 435 | main() 436 | -------------------------------------------------------------------------------- /measuring/scripts/experiment.py: -------------------------------------------------------------------------------- 1 | """Based on https://github.com/xvzcf/pq-tls-benchmark/blob/master/emulation-exp/code/kex/experiment.py""" 2 | 3 | import csv 4 | import datetime 5 | import io 6 | import itertools 7 | import logging 8 | import math 9 | import multiprocessing 10 | import os 11 | import re 12 | import socket 13 | import subprocess 14 | import sys 15 | import time 16 | from functools import partial 17 | from multiprocessing.connection import Connection 18 | from pathlib import Path 19 | from typing import ( 20 | Final, 21 | Iterable, 22 | List, 23 | Literal, 24 | NamedTuple, 25 | Optional, 26 | Tuple, 27 | Union, 28 | cast, 29 | ) 30 | 31 | ################################################################################################### 32 | ## SETTTINGS ###################################################################################### 33 | ################################################################################################### 34 | 35 | SECSIDH_PAPER = False 36 | 37 | # Original set of latencies 38 | # LATENCIES = ['2.684ms', '15.458ms', '39.224ms', '97.73ms'] 39 | # LATENCIES = ["2.0ms"] 40 | LATENCIES: Final[list[str]] = [ 41 | "15.458ms", 42 | "97.73ms", 43 | ] # ['2.684ms', '15.458ms', '97.73ms'] #['15.458ms', '97.73ms'] 44 | #: Loss rates are too annoying to include 45 | LOSS_RATES: Final[list[int]] = [0] 46 | #: Number of pings used for measuring latency 47 | NUM_PINGS: Final[int] = 20 # for measuring the practical latency 48 | #: Link speeds to use in experiments 49 | SPEEDS: Final[list[int]] = [1000, 10] 50 | 51 | 52 | START_PORT: Final[int] = 10000 53 | 54 | if not SECSIDH_PAPER: 55 | # xvzcf's experiment used POOL_SIZE = 40 56 | # We start as many servers as clients, so make sure to adjust accordingly 57 | POOL_SIZE: int = 40 58 | ITERATIONS: int = 1 59 | # Total iterations = ITERATIONS * POOL_SIZE * MEASUREMENTS_PER_ITERATION 60 | MEASUREMENTS_PER_ITERATION: int = 500 61 | MEASUREMENTS_PER_CLIENT: int = 500 62 | else: 63 | POOL_SIZE: int = 80 64 | ITERATIONS: int = 10 65 | MEASUREMENTS_PER_ITERATION: int = 10 66 | MEASUREMENTS_PER_CLIENT: int = 10 67 | 68 | ################################################################################################### 69 | 70 | ResultType = dict[str, str] 71 | ResultListType = list[ResultType] 72 | 73 | SCRIPTDIR: Path = Path(sys.path[0]).resolve() 74 | sys.path.append(str(SCRIPTDIR.parent.parent / "mk-cert")) 75 | 76 | SERVER_PORTS: Final[list[str]] = [ 77 | str(port) for port in range(START_PORT, START_PORT + POOL_SIZE) 78 | ] 79 | 80 | 81 | import algorithms 82 | 83 | hostname = "servername" 84 | 85 | #: UserID of the user so we don't end up with a bunch of root-owned files 86 | USERID: int = int(os.environ.get("SUDO_UID", 1001)) 87 | #: Group ID of the user so we don't end up with a bunch of root-owned files 88 | GROUPID: int = int(os.environ.get("SUDO_GID", 1001)) 89 | 90 | 91 | class CustomFormatter(logging.Formatter): 92 | """ 93 | Logging Formatter to add colors and count warning / errors 94 | 95 | https://stackoverflow.com/a/56944256/248065 96 | """ 97 | 98 | grey: Final[str] = "\x1b[38;21m" 99 | yellow: Final[str] = "\x1b[33;21m" 100 | red: Final[str] = "\x1b[31;21m" 101 | bold_red: Final[str] = "\x1b[31;1m" 102 | reset: Final[str] = "\x1b[0m" 103 | format_tpl: Final[ 104 | str 105 | ] = "%(asctime)s - %(levelname)-8s - %(message)-50s (%(filename)s:%(lineno)d)" 106 | 107 | FORMATS: Final[dict[int, str]] = { 108 | logging.DEBUG: grey + format_tpl + reset, 109 | logging.INFO: grey + format_tpl + reset, 110 | logging.WARNING: yellow + format_tpl + reset, 111 | logging.ERROR: red + format_tpl + reset, 112 | logging.CRITICAL: bold_red + format_tpl + reset, 113 | } 114 | 115 | def format(self, record: logging.LogRecord) -> str: 116 | log_fmt = self.FORMATS.get(record.levelno) 117 | formatter = logging.Formatter(log_fmt) 118 | return formatter.format(record) 119 | 120 | 121 | ExperimentType = Union[ 122 | Literal["sign"], 123 | Literal["pdk"], 124 | Literal["kemtls"], 125 | Literal["sign-cached"], 126 | Literal["optls"], 127 | ] 128 | 129 | NistLevel = Union[Literal[1], Literal[3], Literal[5]] 130 | 131 | 132 | class Experiment(NamedTuple): 133 | """Represents an experiment""" 134 | 135 | type: ExperimentType 136 | level: Union[NistLevel, Literal["n/a"]] 137 | kex: str 138 | leaf: str 139 | intermediate: Optional[str] = None 140 | root: Optional[str] = None 141 | client_auth: Optional[str] = None 142 | client_ca: Optional[str] = None 143 | keygen_cache: bool = False 144 | 145 | def all_algorithms(self) -> set[str]: 146 | algs = {self.kex, self.leaf} 147 | if self.intermediate is not None: 148 | algs.add(self.intermediate) 149 | if self.root is not None: 150 | algs.add(self.root) 151 | if self.client_auth is not None: 152 | algs.add(self.client_auth) 153 | if self.client_ca is not None: 154 | algs.add(self.client_ca) 155 | 156 | return algs 157 | 158 | 159 | FRODOS = [ 160 | f"FrodoKem{size.title()}{alg.title()}" 161 | for size in ("640", "976", "1344") 162 | for alg in ("aes", "shake") 163 | ] 164 | SMALLFRODOS = [frodo for frodo in FRODOS if "640" in frodo] 165 | KYBERS = ["Kyber512", "Kyber768", "Kyber1024"] 166 | KYBER = {1: "Kyber512", 3: "Kyber768", 5: "Kyber1024"} 167 | BIKES = ["BikeL1", "BikeL3"] # NOTE: IND-CPA! 168 | HQCS = ["Hqc128", "Hqc192", "Hqc256"] 169 | HQC = {1: "Hqc128", 3: "Hqc192", 5: "Hqc256"} 170 | MCELIECES_ = [ 171 | f"ClassicMcEliece{size}{variant}" 172 | for size in ("348864", "460896", "6688128", "6960119", "8192128") 173 | for variant in ["", "f"] 174 | ] 175 | MCELIECEL1 = [mc for mc in MCELIECES_ if "348864" in mc] 176 | MCELIECEL3 = [mc for mc in MCELIECES_ if "460896" in mc] 177 | MCELIECEL5 = [mc for mc in MCELIECES_ if mc not in (MCELIECEL1 + MCELIECEL3)] 178 | MCELIECES = {1: MCELIECEL1, 3: MCELIECEL3, 5: MCELIECEL5} 179 | 180 | DILITHIUMS = ["Dilithium2", "Dilithium3", "Dilithium5"] 181 | # yes I know D2 is level 2, but this is how we map the experiments 182 | DILITHIUM = {1: "Dilithium2", 3: "Dilithium3", 5: "Dilithium5"} 183 | FALCONS = ["Falcon512", "Falcon1024"] 184 | # Idem falcon 1024 which is level 5, but which we use in the L3 experiments 185 | FALCON = {1: "Falcon512", 3: "Falcon1024", 5: "Falcon1024"} 186 | SPHINCSES_ = [ 187 | f"SphincsHaraka{size}{var}Simple" for size in [128, 192, 256] for var in ["s", "f"] 188 | ] 189 | SPHINCSESL1 = [spx for spx in SPHINCSES_ if "128" in spx] 190 | SPHINCSESL3 = [spx for spx in SPHINCSES_ if "192" in spx] 191 | SPHINCSESL5 = [spx for spx in SPHINCSES_ if "256" in spx] 192 | SPHINCS = {1: SPHINCSESL1, 3: SPHINCSESL3, 5: SPHINCSESL5} 193 | 194 | XMSS = {1: "XMSS1", 3: "XMSS3", 5: "XMSS5"} 195 | 196 | UOVS_ = [ 197 | f"Pqov{size}{variant}" 198 | for size in ("1616064", "25611244", "25618472", "25624496") 199 | for variant in ["Classic"] 200 | ] 201 | UOVL1 = [uov for uov in UOVS_ if "1616064" in uov or "25611244" in uov] 202 | UOVL3 = [uov for uov in UOVS_ if "25618472" in uov] 203 | UOVL5 = [uov for uov in UOVS_ if "25624496" in uov] 204 | # UOVS = {1: UOVL1, 3: UOVL3, 5: UOVL5} 205 | UOVS = {1: [], 3: [], 5: []} 206 | 207 | # KEMS: list[str] = [ 208 | # *KYBERS, 209 | # *HQCS, 210 | # *BIKES, 211 | # *SMALLFRODOS, 212 | # ] 213 | 214 | KEMSL1 = [KYBERS[0], BIKES[0], HQCS[0], *SMALLFRODOS] 215 | KEMSL3 = [KYBERS[1], BIKES[1], HQCS[1]] 216 | KEMSL5 = [KYBERS[2], HQCS[2]] 217 | 218 | LEVELS: list[NistLevel] = [1, 3, 5] 219 | KEMS = {1: KEMSL1, 3: KEMSL3, 5: KEMSL5} 220 | 221 | # SIGS: list[str] = [*DILITHIUMS, *FALCONS, *SPHINCSES] 222 | 223 | SIGSL1 = [DILITHIUMS[0], FALCONS[0], *SPHINCSESL1] 224 | SIGSL3 = [DILITHIUMS[1], FALCONS[1], *SPHINCSESL3] 225 | SIGSL5 = [DILITHIUMS[2], FALCONS[1], *SPHINCSESL5] 226 | 227 | SIGS = {1: SIGSL1, 3: SIGSL3, 5: SIGSL5} 228 | 229 | ALGORITHMS: set[Experiment] = { 230 | # Need to specify leaf always as sigalg to construct correct binary directory 231 | # EXPERIMENT - KEX - LEAF - INT - ROOT - CLIENT AUTH - CLIENT CA 232 | # Smaller list of actually printed experiments 233 | Experiment("sign", "n/a", "X25519", "RSA2048", "RSA2048", "RSA2048"), 234 | Experiment( 235 | "sign", "n/a", "X25519", "RSA2048", "RSA2048", "RSA2048", "RSA2048", "RSA2048" 236 | ), 237 | # PQ experiments 238 | # KDDD & KFFF + KSfSfSf + KSsSsSs 239 | *( 240 | Experiment("sign", level, KYBER[level], sig, sig, sig) 241 | for level in LEVELS 242 | for sig in [DILITHIUM[level], FALCON[level], *SPHINCS[level]] 243 | ), 244 | # And the mutual variants 245 | *( 246 | Experiment("sign", level, KYBER[level], sig, sig, sig, sig, sig) 247 | for level in LEVELS 248 | for sig in [DILITHIUM[level], FALCON[level], *SPHINCS[level]] 249 | ), 250 | # KDFF 251 | *( 252 | Experiment( 253 | "sign", level, KYBER[level], DILITHIUM[level], FALCON[level], FALCON[level] 254 | ) 255 | for level in LEVELS 256 | ), 257 | # KDFFDF 258 | *( 259 | Experiment( 260 | "sign", 261 | level, 262 | KYBER[level], 263 | DILITHIUM[level], 264 | FALCON[level], 265 | FALCON[level], 266 | DILITHIUM[level], 267 | FALCON[level], 268 | ) 269 | for level in LEVELS 270 | ), 271 | # KSxXX + KDXX 272 | *( 273 | Experiment("sign", level, KYBER[level], sig, XMSS[level], XMSS[level]) 274 | for level in LEVELS 275 | for sig in [*SPHINCS[level], DILITHIUM[level]] 276 | ), 277 | # KSxXXSxX + KDXXDX 278 | *( 279 | Experiment( 280 | "sign", level, KYBER[level], sig, XMSS[level], XMSS[level], sig, XMSS[level] 281 | ) 282 | for level in LEVELS 283 | for sig in [*SPHINCS[level], DILITHIUM[level]] 284 | ), 285 | # HDDD 286 | *( 287 | Experiment( 288 | "sign", 289 | level, 290 | HQC[level], 291 | DILITHIUM[level], 292 | DILITHIUM[level], 293 | DILITHIUM[level], 294 | ) 295 | for level in LEVELS 296 | ), 297 | # HDDDDD 298 | *( 299 | Experiment( 300 | "sign", 301 | level, 302 | HQC[level], 303 | DILITHIUM[level], 304 | DILITHIUM[level], 305 | DILITHIUM[level], 306 | DILITHIUM[level], 307 | DILITHIUM[level], 308 | ) 309 | for level in LEVELS 310 | ), 311 | ## KEMTLS 312 | # KKDD + KKFF + KKSxSx + KKXX 313 | *( 314 | Experiment("kemtls", level, KYBER[level], KYBER[level], sig, sig) 315 | for level in LEVELS 316 | for sig in {DILITHIUM[level], FALCON[level], *SPHINCS[level], XMSS[level]} 317 | ), 318 | # KKDDKD + KKFFKF + KKSxSxKSx + KKXXKX 319 | *( 320 | Experiment( 321 | "kemtls", level, KYBER[level], KYBER[level], sig, sig, KYBER[level], sig 322 | ) 323 | for level in LEVELS 324 | for sig in {DILITHIUM[level], FALCON[level], *SPHINCS[level], XMSS[level]} 325 | ), 326 | *( 327 | Experiment( 328 | "kemtls", level, HQC[level], HQC[level], DILITHIUM[level], DILITHIUM[level] 329 | ) 330 | for level in LEVELS 331 | ), 332 | *( 333 | Experiment( 334 | "kemtls", 335 | level, 336 | HQC[level], 337 | HQC[level], 338 | DILITHIUM[level], 339 | DILITHIUM[level], 340 | HQC[level], 341 | DILITHIUM[level], 342 | ) 343 | for level in LEVELS 344 | ), 345 | ## PDK 346 | ## KK + HH 347 | *( 348 | Experiment("pdk", level, kem, kem) 349 | for level in LEVELS 350 | for kem in [KYBER[level], HQC[level]] 351 | ), 352 | ## KK-KD + HH-HD 353 | *( 354 | Experiment("pdk", level, kem, kem, client_auth=kem, client_ca=sig) 355 | for level in LEVELS 356 | for kem in [KYBER[level], HQC[level]] 357 | for sig in [DILITHIUM[level], FALCON[level]] 358 | ), 359 | # Mceliece + K 360 | *( 361 | Experiment("pdk", level, KYBER[level], kem) 362 | for level in LEVELS 363 | for kem in MCELIECES[level] 364 | ), 365 | # McEliece + K + KD 366 | *( 367 | Experiment( 368 | "pdk", 369 | level, 370 | KYBER[level], 371 | kem, 372 | client_auth=KYBER[level], 373 | client_ca=sig, 374 | ) 375 | for level in LEVELS 376 | for kem in MCELIECES[level] 377 | for sig in [DILITHIUM[level], FALCON[level]] 378 | ), 379 | # Sign-cached 380 | *( 381 | Experiment("sign-cached", level, kem, sig) 382 | for level in LEVELS 383 | for kem in [KYBER[level], HQC[level]] 384 | for sig in [DILITHIUM[level], FALCON[level]] 385 | ), 386 | *( 387 | Experiment("sign-cached", level, kem, sig, client_auth=sig, client_ca=sig) 388 | for level in LEVELS 389 | for kem in [KYBER[level], HQC[level]] 390 | for sig in [DILITHIUM[level], FALCON[level]] 391 | ), 392 | *( 393 | Experiment("optls", "n/a", nike, nike, "Falcon512", "Falcon512", keygen_cache=cached) 394 | for nike in [ 395 | "CTIDH512", 396 | "CTIDH1024", 397 | # "CSIDH2047M1L226", 398 | # "CTIDH2047M1L226", 399 | # "CSIDH4095M27L262", 400 | # "CTIDH4095M27L262", 401 | # "CTIDH5119M46L244", 402 | ] 403 | for cached in [True, False] 404 | ), 405 | } 406 | 407 | if SECSIDH_PAPER: 408 | # Selection for secsidh paper 409 | ALGORITHMS = { 410 | Experiment("sign", 1, "Kyber512", "Falcon512", "Falcon512"), 411 | Experiment("sign", 1, "Kyber512", "Dilithium2", "Falcon512"), 412 | Experiment("sign", 3, "Kyber768", "Falcon1024", "Falcon512"), 413 | Experiment("sign", 3, "Kyber768", "Dilithium3", "Falcon512"), 414 | Experiment("kemtls", 1, "Kyber512", "Kyber512", "Falcon512"), 415 | Experiment("kemtls", 3, "Kyber768", "Kyber768", "Falcon512"), 416 | *( 417 | Experiment("optls", "n/a", nike, nike, "Falcon512", "Falcon512", keygen_cache=cached) 418 | for nike in [ 419 | "CTIDH512", 420 | "CTIDH1024", 421 | "CSIDH2047M1L226", 422 | "CSIDH4095M27L262", 423 | "CTIDH2047M1L226", 424 | "CTIDH4095M27L262", 425 | "CTIDH5119M46L244", 426 | ] 427 | for cached in [True, False] 428 | ), 429 | } 430 | 431 | BIG_LIST: set[Experiment] = { 432 | Experiment("sign", "n/a", "X25519", "RSA2048", "RSA2048", "RSA2048"), 433 | Experiment( 434 | "sign", "n/a", "X25519", "RSA2048", "RSA2048", "RSA2048", "RSA2048", "RSA2048" 435 | ), 436 | # KEMTLS paper 437 | # PQ Signed KEX 438 | *( 439 | Experiment("sign", level, kem, sig, sig, sig) 440 | for level in LEVELS 441 | for kem in KEMS[level] 442 | for sig in SIGS[level] 443 | ), 444 | # PQ Signed KEX with XMSS roots 445 | *( 446 | Experiment("sign", level, kem, sig, XMSS[level], XMSS[level]) 447 | for level in LEVELS 448 | for kem in KEMS[level] 449 | for sig in SIGS[level] 450 | ), 451 | ## Mutually authenticated 452 | *( 453 | Experiment("sign", level, kem, sig, sig, sig, sig, sig) 454 | for level in LEVELS 455 | for kem in KEMS[level] 456 | for sig in SIGS[level] 457 | ), 458 | # PQ Signed KEX with XMSS roots 459 | *( 460 | Experiment("sign", level, kem, sig, XMSS[level], XMSS[level], sig, XMSS[level]) 461 | for level in LEVELS 462 | for kem in KEMS[level] 463 | for sig in SIGS[level] 464 | ), 465 | # TLS with cached certs + client auth 466 | *( 467 | Experiment("sign-cached", level, kex, sig, client_auth=sig, client_ca=sig) 468 | for level in LEVELS 469 | for kex in KEMS[level] 470 | for sig in SIGS[level] 471 | ), 472 | # KEMTLS 473 | *( 474 | Experiment("kemtls", level, kex, kex, sig, sig) 475 | for level in LEVELS 476 | for kex in KEMS[level] 477 | for sig in [*SIGS[level], XMSS[level]] 478 | ), 479 | # KEMTLS 480 | # KEMTLS mutual 481 | *( 482 | Experiment("kemtls", level, kex, kex, sig, sig, kex, sig) 483 | for level in LEVELS 484 | for kex in KEMS[level] 485 | for sig in [*SIGS[level], XMSS[level]] 486 | ), 487 | # KEMTLS extra combinations L1 488 | *( 489 | Experiment("kemtls", 1, kex, kex, sig, sig2) 490 | for kex in KEMSL1 491 | for sig in [DILITHIUMS[0], FALCONS[0]] 492 | for sig2 in [FALCONS[0], *UOVS[1]] 493 | if sig2 != sig 494 | ), 495 | # KEMTLS extra L3 496 | *( 497 | Experiment("kemtls", 3, kex, kex, sig, sig2) 498 | for kex in KEMSL3 499 | for sig in [DILITHIUMS[1], FALCONS[1]] 500 | for sig2 in [FALCONS[1], *UOVS[3]] 501 | if sig2 != sig 502 | ), 503 | # KEMTLS extra L5 504 | *( 505 | Experiment("kemtls", 5, kex, kex, sig, sig2) 506 | for kex in KEMSL5 507 | for sig in [DILITHIUMS[2], FALCONS[1]] 508 | for sig2 in [FALCONS[1], *UOVS[5]] 509 | if sig2 != sig 510 | ), 511 | # KEMTLS MUTUAL extra combinations 512 | *( 513 | Experiment("kemtls", 1, kex, kex, sig, sig2, kex, sig2) 514 | for kex in KEMSL1 515 | for sig in [DILITHIUMS[0], FALCONS[0]] 516 | for sig2 in [DILITHIUMS[0], FALCONS[0], *UOVS[1]] 517 | ), 518 | # KEMTLS MUTUAL extra combinations 519 | *( 520 | Experiment("kemtls", 3, kex, kex, sig, sig2, kex, sig2) 521 | for kex in KEMSL3 522 | for sig in [DILITHIUMS[1], FALCONS[1]] 523 | for sig2 in [DILITHIUMS[1], FALCONS[1], *UOVS[3]] 524 | if sig2 != sig 525 | ), 526 | *( 527 | Experiment("kemtls", 5, kex, kex, sig, sig2, kex, sig2) 528 | for kex in KEMSL5 529 | for sig in [DILITHIUMS[2], FALCONS[1]] 530 | for sig2 in [DILITHIUMS[2], FALCONS[1], *UOVS[5]] 531 | if sig2 != sig 532 | ), 533 | # PDK 534 | # Level 535 | *(Experiment("pdk", level, kex, kex) for level in LEVELS for kex in KEMS[level]), 536 | # With mutual auth 537 | *( 538 | Experiment("pdk", level, kex, kex, client_auth=kex, client_ca=sig) 539 | for level in LEVELS 540 | for kex in KEMS[level] 541 | for sig in [*SIGS[level], XMSS[level]] 542 | ), 543 | # Special combos with McEliece 544 | *( 545 | Experiment("pdk", level, kex, leaf=mceliece) 546 | for level in LEVELS 547 | for kex in KEMS[level] 548 | for mceliece in MCELIECES[level] 549 | ), 550 | # McEliece + Mutual 551 | *( 552 | Experiment("pdk", level, kex, mceliece, client_auth=kex, client_ca=sig) 553 | for level in LEVELS 554 | for kex in KEMS[level] 555 | for sig in [*SIGS[level], XMSS[level]] 556 | for mceliece in MCELIECES[level] 557 | ), 558 | # OPTLS 559 | *( 560 | Experiment("optls", 1, alg, alg, "Falcon512", "Falcon512", keygen_cache=True) 561 | for alg in ( 562 | "CSIDH2047M1L226", 563 | "CTIDH2047M1L226", 564 | ) 565 | ), 566 | *( 567 | Experiment("optls", 1, alg, "Falcon512", "Falcon512", keygen_cache=False) 568 | for alg in ( 569 | "CSIDH2047M1L226", 570 | "CTIDH2047M1L226", 571 | ) 572 | ), 573 | } 574 | 575 | # Validate choices 576 | def __validate_experiments() -> None: 577 | nikes: list[str] = [alg.upper() for alg in algorithms.nikes] 578 | known_kexes: list[str] = [kem[1] for kem in algorithms.kems] + ["X25519"] + nikes 579 | known_sigs: list[str] = [sig[1] for sig in algorithms.signs] + ["RSA2048"] 580 | for (type, _, kex, leaf, int, root, client_auth, client_ca, _) in ALGORITHMS: 581 | assert ( 582 | kex in known_kexes 583 | ), f"{kex} is not a known KEM (not in {' '.join(known_kexes)})" 584 | assert (leaf in known_kexes and type in ("kemtls", "pdk", "optls")) or ( 585 | leaf in known_sigs and type not in ("kemtls", "pdk", "optls") 586 | ), f"{leaf} is not a known algorithm for hs authentication with {type}" 587 | assert ( 588 | int is None or int in known_sigs 589 | ), f"{int} is not a known signature algorithm" 590 | assert ( 591 | root is None or root in known_sigs 592 | ), f"{root} is not a known signature algorithm" 593 | assert ( 594 | client_auth is None 595 | or (client_auth in known_sigs and type not in ("kemtls", "pdk")) 596 | or (client_auth in known_kexes and type in ("kemtls", "pdk")) 597 | ), f"{client_auth} is not a known signature algorith or KEM for {type}" 598 | assert ( 599 | client_ca is None or client_ca in known_sigs 600 | ), f"{client_ca} is not a known sigalg" 601 | 602 | 603 | __validate_experiments() 604 | 605 | 606 | def only_unique_experiments() -> None: 607 | """get unique experiments: one of each type""" 608 | global ALGORITHMS 609 | seen: set[tuple[ExperimentType, bool, bool]] = set() 610 | 611 | def update(exp: Experiment) -> Experiment: 612 | seen.add((exp.type, exp.client_auth is None, exp.keygen_cache)) 613 | return exp 614 | 615 | ALGORITHMS = { 616 | update(exp) 617 | for exp in ALGORITHMS 618 | if (exp.type, exp.client_auth is None, exp.keygen_cache) not in seen 619 | } 620 | 621 | 622 | TIMER_REGEX = re.compile(r"(?P