├── test-work ├── .gitignore └── ignore ├── test-resources ├── source-3 │ ├── file5.txt │ ├── file2.txt │ ├── file4.txt │ └── file1.txt ├── source-1 │ ├── file1.txt │ ├── file2.txt │ └── file3.txt └── source-2 │ ├── file2.txt │ ├── file4.txt │ └── file1.txt ├── .gitignore ├── pkg ├── test-scripts │ └── test-rrdpit.sh ├── debian │ ├── description.txt │ └── postinst └── rules │ ├── packages-to-test.yml │ ├── packages-to-build.yml │ └── docker-images-to-build.yml ├── src ├── lib.rs ├── main.rs ├── options.rs ├── sync.rs ├── xml.rs └── rrdp.rs ├── docker └── entrypoint.sh ├── .github └── workflows │ ├── ci.yml │ └── pkg.yml ├── LICENSE ├── Cargo.toml ├── Dockerfile ├── README.md └── Cargo.lock /test-work/.gitignore: -------------------------------------------------------------------------------- 1 | *.xml 2 | -------------------------------------------------------------------------------- /test-resources/source-3/file5.txt: -------------------------------------------------------------------------------- 1 | file5 -------------------------------------------------------------------------------- /test-resources/source-1/file1.txt: -------------------------------------------------------------------------------- 1 | file1 2 | -------------------------------------------------------------------------------- /test-resources/source-1/file2.txt: -------------------------------------------------------------------------------- 1 | file2 2 | -------------------------------------------------------------------------------- /test-resources/source-1/file3.txt: -------------------------------------------------------------------------------- 1 | file3 2 | -------------------------------------------------------------------------------- /test-resources/source-2/file2.txt: -------------------------------------------------------------------------------- 1 | file2 2 | -------------------------------------------------------------------------------- /test-resources/source-2/file4.txt: -------------------------------------------------------------------------------- 1 | file4 2 | -------------------------------------------------------------------------------- /test-resources/source-3/file2.txt: -------------------------------------------------------------------------------- 1 | file2 2 | -------------------------------------------------------------------------------- /test-resources/source-3/file4.txt: -------------------------------------------------------------------------------- 1 | file4 2 | -------------------------------------------------------------------------------- /test-resources/source-2/file1.txt: -------------------------------------------------------------------------------- 1 | file1-a 2 | -------------------------------------------------------------------------------- /test-resources/source-3/file1.txt: -------------------------------------------------------------------------------- 1 | file1-a 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .cargo 2 | .idea/ 3 | .DS_Store 4 | target/ 5 | -------------------------------------------------------------------------------- /test-work/ignore: -------------------------------------------------------------------------------- 1 | test dir where xml files are saved (gitignored) 2 | -------------------------------------------------------------------------------- /pkg/test-scripts/test-rrdpit.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | set -x 5 | 6 | case $1 in 7 | post-install) 8 | echo -e "\nRRDPIT VERSION:" 9 | rrdpit --version 10 | ;; 11 | 12 | post-upgrade) 13 | ;; 14 | esac -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate base64; 2 | extern crate bytes; 3 | extern crate clap; 4 | #[macro_use] 5 | extern crate derive_more; 6 | extern crate core; 7 | extern crate hex; 8 | extern crate ring; 9 | extern crate uuid; 10 | extern crate xml as xmlrs; 11 | 12 | pub mod options; 13 | pub mod rrdp; 14 | pub mod sync; 15 | pub mod xml; 16 | -------------------------------------------------------------------------------- /pkg/debian/description.txt: -------------------------------------------------------------------------------- 1 | "rrdpit" is a small little tool that can be pointed at a directory on your 2 | system, and produce RPKI RRDP (RFC 8182) notification, snapshot, and 3 | delta files. You will need to use an http server of your preferred 4 | flavour to deliver these files to the world. 5 | 6 | Read more here: 7 | https://github.com/NLnetLabs/rrdpit/ 8 | -------------------------------------------------------------------------------- /pkg/rules/packages-to-test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | pkg: 3 | - "rrdpit" 4 | image: 5 | - "ubuntu:focal" # ubuntu/20.04 6 | - "ubuntu:jammy" # ubuntu/22.04 7 | - "ubuntu:noble" # ubuntu/24.04 8 | - "debian:bullseye" # debian/11 9 | - "debian:bookworm" # debian/12 10 | - "debian:trixie" # debian/12 11 | mode: 12 | - "fresh-install" 13 | target: 14 | - "x86_64" 15 | -------------------------------------------------------------------------------- /pkg/debian/postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | KRILL_HOME="/var/lib/rrdpit/" 5 | KRILL_USER="rrdpit" 6 | 7 | create_user() { 8 | if id ${KRILL_USER} > /dev/null 2>&1; then return; fi 9 | adduser --system --home "${KRILL_HOME}" --group ${KRILL_USER} 10 | } 11 | 12 | case "$1" in 13 | configure) 14 | create_user 15 | ;; 16 | esac 17 | 18 | #DEBHELPER# 19 | -------------------------------------------------------------------------------- /pkg/rules/packages-to-build.yml: -------------------------------------------------------------------------------- 1 | --- 2 | pkg: 3 | - "rrdpit" 4 | image: 5 | - "ubuntu:focal" # ubuntu/20.04 6 | - "ubuntu:jammy" # ubuntu/22.04 7 | - "ubuntu:noble" # ubuntu/24.04 8 | - "debian:bullseye" # debian/11 9 | - "debian:bookworm" # debian/12 10 | - "debian:trixie" # debian/13 11 | - "almalinux:8" 12 | - "almalinux:9" 13 | - "almalinux:10" 14 | target: 15 | - "x86_64" 16 | -------------------------------------------------------------------------------- /docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # run rrdpit with variable interpolation 3 | set -e 4 | DATA="${DATA:-/data}" 5 | SOURCE_DIR="${SOURCE_DIR:-$DATA/source}" 6 | TARGET_DIR="${TARGET_DIR:-$DATA/target}" 7 | RSYNC_URI="${RSYNC_URI:-rsync://example.org/test/}" 8 | HTTPS_URI="${HTTPS_URI:-https://example.org/}" 9 | 10 | exec /usr/local/bin/rrdpit \ 11 | --source ${SOURCE_DIR} \ 12 | --target ${TARGET_DIR} \ 13 | --rsync ${RSYNC_URI} \ 14 | --https ${HTTPS_URI} \ 15 | "$@" 16 | -------------------------------------------------------------------------------- /pkg/rules/docker-images-to-build.yml: -------------------------------------------------------------------------------- 1 | # See: https://github.com/NLnetLabs/ploutos/blob/main/docs/docker_packaging.md#docker-build-rules 2 | --- 3 | include: 4 | - platform: 'linux/amd64' 5 | shortname: 'amd64' 6 | mode: 'build' 7 | 8 | - platform: 'linux/arm/v6' 9 | shortname: 'armv6' 10 | crosstarget: 'arm-unknown-linux-musleabihf' 11 | mode: 'copy' 12 | 13 | - platform: 'linux/arm/v7' 14 | shortname: 'armv7' 15 | crosstarget: 'armv7-unknown-linux-musleabihf' 16 | mode: 'copy' 17 | 18 | - platform: 'linux/arm64' 19 | shortname: 'arm64' 20 | crosstarget: 'aarch64-unknown-linux-musl' 21 | mode: 'copy' 22 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches: 5 | - main 6 | - master 7 | pull_request: 8 | branches: 9 | - main 10 | - master 11 | jobs: 12 | test: 13 | name: test 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | os: [ubuntu-latest] 18 | rust: [1.84.0, stable] 19 | steps: 20 | - name: Checkout repository 21 | uses: actions/checkout@v2 22 | - name: Install Rust 23 | uses: hecrj/setup-rust-action@v2 24 | with: 25 | rust-version: ${{ matrix.rust }} 26 | - if: matrix.rust == 'stable' 27 | run: rustup component add clippy 28 | - if: matrix.rust == 'stable' 29 | run: cargo clippy -- -D warnings 30 | - run: cargo build --verbose --locked 31 | - run: cargo test --verbose 32 | -------------------------------------------------------------------------------- /.github/workflows/pkg.yml: -------------------------------------------------------------------------------- 1 | name: Packaging 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - v* 9 | 10 | # Triggering on PRs and arbitrary branch pushes is not enabled because most of the time only the CI build should be 11 | # triggered, not the packaging build. In cases where you want to test changes to this workflow this trigger enables 12 | # you to manually invoke this workflow on an arbitrary branch as needed. 13 | workflow_dispatch: 14 | 15 | jobs: 16 | package: 17 | # See: https://github.com/NLnetLabs/ploutos 18 | uses: NLnetLabs/ploutos/.github/workflows/pkg-rust.yml@v8 19 | secrets: 20 | DOCKER_HUB_ID: ${{ vars.DOCKER_HUB_ID }} 21 | DOCKER_HUB_TOKEN: ${{ secrets.DOCKER_HUB_TOKEN }} 22 | with: 23 | cross_build_args: "" 24 | cross_max_wait_mins: 20 25 | 26 | docker_org: nlnetlabs 27 | docker_repo: rrdpit 28 | docker_build_rules: pkg/rules/docker-images-to-build.yml 29 | docker_sanity_check_command: rrdpit --version 30 | 31 | package_build_rules: pkg/rules/packages-to-build.yml 32 | package_test_rules: pkg/rules/packages-to-test.yml 33 | package_test_scripts_path: pkg/test-scripts/test-.sh 34 | 35 | deb_extra_build_packages: libssl-dev 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, NLnet Labs. All rights reserved. 2 | 3 | This software is open source. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions 7 | are met: 8 | 9 | Redistributions of source code must retain the above copyright notice, 10 | this list of conditions and the following disclaimer. 11 | 12 | Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | Neither the name of the NLNET LABS nor the names of its contributors may 17 | be used to endorse or promote products derived from this software without 18 | specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 26 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 27 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 28 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 29 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 30 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rrdpit" 3 | version = "0.1.1" 4 | authors = ["The NLnet Labs RPKI Team "] 5 | description = "RRDP IT: A disk to RPKI Delta Protocol tool." 6 | repository = "https://github.com/NLnetLabs/rrdpit" 7 | keywords = ["rpki", "routing-security", "bgp"] 8 | categories = ["command-line-utilities"] 9 | edition = "2021" 10 | license = "BSD-3-Clause" 11 | readme = "README.md" 12 | 13 | [dependencies] 14 | base64 = "^0.22" 15 | bytes = "^1.10" 16 | clap = { version = "^4.5.39", features = ["cargo"] } 17 | derive_more = { version = "^2.0", features = ["full"] } 18 | hex = "^0.4.3" 19 | ring = "^0.17" 20 | uuid = { version = "^1.17", features = ["v4"] } 21 | xml-rs = "0.8.26" 22 | 23 | [dev-dependencies] 24 | tempfile = "^3.20" 25 | 26 | # ------------------------------------------------------------------------------ 27 | # START DEBIAN PACKAGING 28 | # 29 | # Configurations for the cargo-deb cargo plugin which builds Debian packages in 30 | # target/debian/ when invoked with: cargo deb. 31 | # 32 | # TODO: 33 | # - Build packages with GH Actions 34 | # - Add man page? 35 | # - Add changelog 36 | # 37 | # NOTE: 38 | # - There is a single binary only (no daemon yet) 39 | 40 | [package.metadata.deb] 41 | name = "rrdpit" 42 | priority = "optional" 43 | section = "net" 44 | extended-description-file = "pkg/debian/description.txt" 45 | license-file = ["LICENSE", "0"] 46 | depends = "" 47 | maintainer-scripts = "pkg/debian/" 48 | changelog = "target/debian/changelog" # this will be generated by the pkg workflow 49 | copyright = "Copyright (c) 2025, NLnet Labs. All rights reserved." 50 | assets = [["target/release/rrdpit", "/usr/bin/rrdpit", "755"]] 51 | 52 | # List target variants 53 | [package.metadata.deb.variants.ubuntu-focal] 54 | 55 | [package.metadata.deb.variants.ubuntu-jammy] 56 | 57 | [package.metadata.deb.variants.ubuntu-noble] 58 | 59 | [package.metadata.deb.variants.debian-buster] 60 | 61 | [package.metadata.deb.variants.debian-bullseye] 62 | 63 | [package.metadata.deb.variants.debian-bookworm] 64 | 65 | # END DEBIAN PACKAGING 66 | # ------------------------------------------------------------------------------ 67 | 68 | # ------------------------------------------------------------------------------ 69 | # START RPM PACKAGING 70 | # 71 | # Configurations for the cargo-generate-rpm cargo plugin which builds RPM 72 | # packages in target/generate-rpm/ when invoked with: cargo generate-rpm 73 | # 74 | [package.metadata.generate-rpm] 75 | assets = [ 76 | { source = "target/release/rrdpit", dest = "/usr/bin/rrdpit", mode = "755" }, 77 | ] 78 | # END RPM PACKAGING 79 | # ------------------------------------------------------------------------------ 80 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate derive_more; 3 | extern crate rrdpit; 4 | extern crate uuid; 5 | 6 | use std::fmt; 7 | use std::path::PathBuf; 8 | use uuid::Uuid; 9 | 10 | use rrdpit::options::Options; 11 | use rrdpit::rrdp::{RepoState, Snapshot}; 12 | use rrdpit::sync::crawl_disk; 13 | use rrdpit::sync::RsyncUri; 14 | 15 | fn main() { 16 | match Options::from_args() { 17 | Ok(options) => match sync(options) { 18 | Ok(()) => {} 19 | Err(e) => { 20 | eprintln!("{e}"); 21 | ::std::process::exit(1); 22 | } 23 | }, 24 | Err(e) => { 25 | eprintln!("{e}"); 26 | ::std::process::exit(1); 27 | } 28 | } 29 | } 30 | 31 | fn snapshot( 32 | session: Uuid, 33 | serial: u64, 34 | source: &PathBuf, 35 | rsync: &RsyncUri, 36 | ) -> Result { 37 | let files = crawl_disk(source, rsync).map_err(Error::custom)?; 38 | Ok(Snapshot::new(session, serial, files)) 39 | } 40 | 41 | fn sync(options: Options) -> Result<(), Error> { 42 | let state = match RepoState::reconstitute(options.https.clone(), options.target.clone()) { 43 | Ok(mut state) => { 44 | let snapshot = snapshot( 45 | state.session(), 46 | state.serial() + 1, 47 | &options.source, 48 | &options.rsync, 49 | ) 50 | .map_err(Error::custom)?; 51 | state.apply(snapshot).map_err(Error::custom)?; 52 | state 53 | } 54 | Err(_) => { 55 | let snapshot = snapshot(Uuid::new_v4(), 1, &options.source, &options.rsync) 56 | .map_err(Error::custom)?; 57 | RepoState::new(snapshot, options.https.clone(), options.target.clone()) 58 | } 59 | }; 60 | 61 | state 62 | .save(options.max_deltas, options.clean) 63 | .map_err(Error::custom) 64 | } 65 | 66 | //------------ Error --------------------------------------------------------- 67 | #[derive(Debug, Display)] 68 | pub enum Error { 69 | #[display("{}", _0)] 70 | Custom(String), 71 | } 72 | 73 | impl Error { 74 | fn custom(e: impl fmt::Display) -> Self { 75 | Error::Custom(e.to_string()) 76 | } 77 | } 78 | 79 | #[cfg(test)] 80 | mod tests { 81 | use std::{fs::{self, File}, io::Write}; 82 | 83 | use rrdpit::options::Options; 84 | use tempfile::tempdir; 85 | use uuid::Uuid; 86 | 87 | use crate::sync; 88 | 89 | #[test] 90 | fn test_max_deltas() { 91 | let source = tempdir().unwrap(); 92 | let target = tempdir().unwrap(); 93 | 94 | fs::create_dir(target.path().join( 95 | Uuid::new_v4().to_string() 96 | )).unwrap(); 97 | 98 | for i in 1..25 { 99 | let mut file = File::create( 100 | source.path().join(format!("test{}", i)) 101 | ).unwrap(); 102 | file.write(b"test").unwrap(); 103 | let options = Options { 104 | source: source.path().to_path_buf(), 105 | target: target.path().to_path_buf(), 106 | rsync: "rsync://example.org/rrdpit".into(), 107 | https: "https://example.org/rrdpit/".into(), 108 | clean: i > 10, 109 | max_deltas: 5, 110 | }; 111 | sync(options).unwrap(); 112 | } 113 | 114 | let paths = fs::read_dir(&target).unwrap(); 115 | assert_eq!(2, paths.count()); 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/options.rs: -------------------------------------------------------------------------------- 1 | use clap::{Arg, Command}; 2 | use std::path::PathBuf; 3 | use crate::sync::{HttpsUri, RsyncUri}; 4 | 5 | pub struct Options { 6 | pub source: PathBuf, 7 | pub target: PathBuf, 8 | pub rsync: RsyncUri, 9 | pub https: HttpsUri, 10 | pub clean: bool, 11 | pub max_deltas: usize, 12 | } 13 | 14 | impl Options { 15 | pub fn from_strs( 16 | source: &str, 17 | target: &str, 18 | rsync: &str, 19 | https: &str, 20 | clean: bool, 21 | max_deltas: &str, 22 | ) -> Result { 23 | let source = PathBuf::from(source); 24 | let target = PathBuf::from(target); 25 | 26 | let rsync = rsync.trim_end_matches("/").to_owned() + "/"; 27 | let https = https.trim_end_matches("/").to_owned() + "/"; 28 | 29 | let rsync = 30 | RsyncUri::base_uri(&rsync).map_err(|_| Error::RsyncBaseUri(rsync.to_string()))?; 31 | let https = 32 | HttpsUri::base_uri(&https).map_err(|_| Error::HttpsBaseUri(https.to_string()))?; 33 | 34 | let max_deltas = max_deltas 35 | .parse::() 36 | .map_err(|_| Error::CannotParseNumber(max_deltas.to_string()))?; 37 | 38 | if !source.is_dir() { 39 | Err(Error::cannot_read(source)) 40 | } else if !target.is_dir() { 41 | Err(Error::cannot_read(target)) 42 | } else { 43 | Ok(Options { 44 | source, 45 | target, 46 | rsync, 47 | https, 48 | clean, 49 | max_deltas, 50 | }) 51 | } 52 | } 53 | 54 | pub fn from_args() -> Result { 55 | let matches = Command::new("rrdpit") 56 | .version(env!("CARGO_PKG_VERSION")) 57 | .about("Dist to RPKI RRDP") 58 | .arg( 59 | Arg::new("source") 60 | .long("source") 61 | .value_name("dir") 62 | .help("source directory") 63 | .required(true), 64 | ) 65 | .arg( 66 | Arg::new("target") 67 | .long("target") 68 | .value_name("dir") 69 | .help("target directory") 70 | .required(true), 71 | ) 72 | .arg( 73 | Arg::new("rsync") 74 | .long("rsync") 75 | .value_name("uri") 76 | .help("base rsync uri") 77 | .required(true), 78 | ) 79 | .arg( 80 | Arg::new("https") 81 | .long("https") 82 | .value_name("uri") 83 | .help("base rrdp uri") 84 | .required(true), 85 | ) 86 | .arg( 87 | Arg::new("clean") 88 | .help("Clean up target dir (handle with care!)") 89 | .required(false), 90 | ) 91 | .arg( 92 | Arg::new("max_deltas") 93 | .long("max_deltas") 94 | .value_name("number") 95 | .help("Limit the maximum number of deltas kept. Default: 25. Minimum: 1") 96 | .required(false), 97 | ) 98 | .get_matches(); 99 | 100 | let source = matches.get_one::("source").unwrap(); 101 | let target = matches.get_one::("target").unwrap(); 102 | let rsync = matches.get_one::("rsync").unwrap(); 103 | let https = matches.get_one::("https").unwrap(); 104 | let max_deltas_default = "25".to_string(); 105 | let max_deltas = matches.get_one::("max_deltas").unwrap_or(&max_deltas_default); 106 | 107 | let clean = matches.contains_id("clean"); 108 | 109 | Self::from_strs(source, target, rsync, https, clean, max_deltas) 110 | } 111 | } 112 | 113 | //------------ Error --------------------------------------------------------- 114 | 115 | #[derive(Debug, Display)] 116 | pub enum Error { 117 | #[display("Not a directory: {}", _0)] 118 | CannotRead(String), 119 | 120 | #[display("Not a directory: {}", _0)] 121 | RsyncBaseUri(String), 122 | 123 | #[display("Not a directory: {}", _0)] 124 | HttpsBaseUri(String), 125 | 126 | #[display("Cannot parse number: {}", _0)] 127 | CannotParseNumber(String), 128 | 129 | #[display("max_deltas must be at least 1")] 130 | MaxDeltasMustBeOneOrHigher, 131 | } 132 | 133 | impl Error { 134 | fn cannot_read(source: PathBuf) -> Self { 135 | Error::CannotRead(source.to_string_lossy().to_string()) 136 | } 137 | } 138 | 139 | //------------ Tests --------------------------------------------------------- 140 | 141 | #[cfg(test)] 142 | pub mod tests { 143 | 144 | use super::*; 145 | 146 | #[test] 147 | fn parse_arguments() { 148 | Options::from_strs( 149 | "./test-resources/source-1", 150 | "./test-work", 151 | "rsync://localhost/repo/", 152 | "https://localhost/repo/", 153 | false, 154 | "25", 155 | ) 156 | .unwrap(); 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # This is a multi-stage Dockerfile, with a selectable first stage. With this 2 | # approach we get: 3 | # 4 | # 1. Separation of dependencies needed to build our app in the 'build' stage 5 | # and those needed to run our app in the 'final' stage, as we don't want 6 | # the build-time dependencies to be included in the final Docker image. 7 | # 8 | # 2. Support for either building our app for the architecture of the base 9 | # image using MODE=build (the default) or for externally built app 10 | # binaries (e.g. cross-compiled) using MODE=copy. 11 | # 12 | # In total there are four stages consisting of: 13 | # - Two possible first stages: 'build' or 'copy'. 14 | # - A special 'source' stage which selects either 'build' or 'copy' as the 15 | # source of binaries to be used by ... 16 | # - The 'final' stage. 17 | 18 | 19 | ### 20 | ### ARG DEFINITIONS ########################################################### 21 | ### 22 | 23 | # This section defines arguments that can be overriden on the command line 24 | # when invoking `docker build` using the argument form: 25 | # 26 | # `--build-arg =`. 27 | 28 | # MODE 29 | # ==== 30 | # Supported values: build (default), copy 31 | # 32 | # By default this Dockerfile will build our app from sources. If the sources 33 | # have already been (cross) compiled by some external process and you wish to 34 | # use the resulting binaries from that process, then: 35 | # 36 | # 1. Create a directory on the host called 'dockerbin/$TARGETPLATFORM' 37 | # containing the already compiled app binaries (where $TARGETPLATFORM 38 | # is a special variable set by Docker BuiltKit). 39 | # 2. Supply arguments `--build-arg MODE=copy` to `docker build`. 40 | ARG MODE=build 41 | 42 | 43 | # BASE_IMG 44 | # ======== 45 | # 46 | # Only used when MODE=build. 47 | ARG BASE_IMG=alpine:3.21 48 | 49 | 50 | # CARGO_ARGS 51 | # ========== 52 | # 53 | # Only used when MODE=build. 54 | # 55 | # This ARG can be used to control the features enabled when compiling the app 56 | # or other compilation settings as necessary. 57 | ARG CARGO_ARGS 58 | 59 | 60 | ### 61 | ### BUILD STAGES ############################################################## 62 | ### 63 | 64 | 65 | # ----------------------------------------------------------------------------- 66 | # Docker stage: build 67 | # ----------------------------------------------------------------------------- 68 | # 69 | # Builds our app binaries from sources. 70 | FROM ${BASE_IMG} AS build 71 | ARG CARGO_ARGS 72 | 73 | RUN apk add --no-cache rust cargo openssl-dev 74 | 75 | WORKDIR /tmp/build 76 | COPY . . 77 | 78 | # `CARGO_HTTP_MULTIPLEXING` forces Cargo to use HTTP/1.1 without pipelining 79 | # instead of HTTP/2 with multiplexing. This seems to help with various 80 | # "spurious network error" warnings when Cargo attempts to fetch from crates.io 81 | # when building this image on Docker Hub and GitHub Actions build machines. 82 | # 83 | # `cargo install` is used instead of `cargo build` because it places just the 84 | # binaries we need into a predictable output directory. We can't control this 85 | # with arguments to cargo build as `--out-dir` is unstable and contentious and 86 | # `--target-dir` still requires us to know which profile and target the 87 | # binaries were built for. By using `cargo install` we can also avoid needing 88 | # to hard-code the set of binary names to copy so that if we add or remove 89 | # built binaries in future this will "just work". Note that `--root /tmp/out` 90 | # actually causes the binaries to be placed in `/tmp/out/bin/`. `cargo install` 91 | # will create the output directory for us. 92 | RUN CARGO_HTTP_MULTIPLEXING=false cargo install \ 93 | --locked \ 94 | --path . \ 95 | --root /tmp/out/ \ 96 | ${CARGO_ARGS} 97 | 98 | 99 | # ----------------------------------------------------------------------------- 100 | # Docker stage: copy 101 | # ----------------------------------------------------------------------------- 102 | # Only used when MODE=copy. 103 | # 104 | # Copy binaries from the host directory 'dockerbin/$TARGETPLATFORM' directory 105 | # into this build stage to the same predictable location that binaries would be 106 | # in if MODE were 'build'. 107 | # 108 | # Requires that `docker build` be invoked with variable `DOCKER_BUILDKIT=1` set 109 | # in the environment. This is necessary so that Docker will skip the unused 110 | # 'build' stage and so that the magic $TARGETPLATFORM ARG will be set for us. 111 | FROM ${BASE_IMG} AS copy 112 | ARG TARGETPLATFORM 113 | ONBUILD COPY dockerbin/$TARGETPLATFORM /tmp/out/bin/ 114 | 115 | 116 | # ----------------------------------------------------------------------------- 117 | # Docker stage: source 118 | # ----------------------------------------------------------------------------- 119 | # This is a "magic" build stage that "labels" a chosen prior build stage as the 120 | # one that the build stage after this one should copy application binaries 121 | # from. It also causes the ONBUILD COPY command from the 'copy' stage to be run 122 | # if needed. Finally, we ensure binaries have the executable flag set because 123 | # when copied in from outside they may not have the flag set, especially if 124 | # they were uploaded as a GH actions artifact then downloaded again which 125 | # causes file permissions to be lost. 126 | # See: https://github.com/actions/upload-artifact#permission-loss 127 | FROM ${MODE} AS source 128 | RUN chmod a+x /tmp/out/bin/* 129 | 130 | 131 | # ----------------------------------------------------------------------------- 132 | # Docker stage: final 133 | # ----------------------------------------------------------------------------- 134 | # Create an image containing just the binaries, configs & scripts needed to run 135 | # our app, and not the things needed to build it. 136 | # 137 | # The previous build stage from which binaries are copied is controlled by the 138 | # MODE ARG (see above). 139 | FROM ${BASE_IMG} AS final 140 | 141 | # Copy binaries from the 'source' build stage into the image we are building 142 | COPY --from=source /tmp/out/bin/* /usr/local/bin/ 143 | 144 | # Build variables for uid and guid of user to run container 145 | ARG RUN_USER=rrdpit 146 | ARG RUN_USER_UID=1012 147 | ARG RUN_USER_GID=1012 148 | 149 | # Install required runtime dependencies 150 | RUN apk add --no-cache bash libgcc openssl tini tzdata util-linux 151 | 152 | # Create the user and group to run the application as 153 | RUN addgroup -g ${RUN_USER_GID} ${RUN_USER} && \ 154 | adduser -D -u ${RUN_USER_UID} -G ${RUN_USER} ${RUN_USER} 155 | 156 | # Create the data directories and create a volume for them 157 | VOLUME /data 158 | RUN mkdir -p /data/source /data/target && \ 159 | chown -R ${RUN_USER_UID}:${RUN_USER_GID} /data 160 | 161 | # Install a Docker entrypoint script that will be executed when the container 162 | # runs 163 | COPY docker/entrypoint.sh /opt/ 164 | RUN chown ${RUN_USER}: /opt/entrypoint.sh 165 | 166 | # Switch to our applications user 167 | USER ${RUN_USER} 168 | 169 | # Use Tini to ensure that our application responds to CTRL-C when run in the 170 | # foreground without the Docker argument "--init" (which is actually another 171 | # way of activating Tini, but cannot be enabled from inside the Docker image). 172 | ENTRYPOINT ["/sbin/tini", "--", "/opt/entrypoint.sh"] 173 | -------------------------------------------------------------------------------- /src/sync.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io::{Read, Write}; 3 | use std::path::{Path, PathBuf}; 4 | use std::str::from_utf8_unchecked; 5 | use std::{fmt, fs, io}; 6 | 7 | use base64::Engine; 8 | use bytes::Bytes; 9 | use ring::digest; 10 | 11 | //------------ RsyncUri ----------------------------------------------------- 12 | 13 | #[derive(Clone, Debug, Display, Eq, Hash, Ord, PartialEq, PartialOrd)] 14 | #[display("{}", _0)] 15 | pub struct RsyncUri(String); 16 | 17 | impl RsyncUri { 18 | pub fn base_uri(s: &str) -> Result { 19 | if s.starts_with("rsync://") && s.ends_with('/') { 20 | Ok(RsyncUri(s.to_string())) 21 | } else { 22 | Err(Error::InvalidRsyncBase) 23 | } 24 | } 25 | 26 | fn resolve(&self, s: &str) -> Self { 27 | RsyncUri(format!("{}{}", self.0, s)) 28 | } 29 | } 30 | 31 | impl From<&str> for RsyncUri { 32 | fn from(s: &str) -> Self { 33 | RsyncUri(s.to_string()) 34 | } 35 | } 36 | 37 | //------------ HttpsUri ----------------------------------------------------- 38 | 39 | #[derive(Clone, Debug, Display, Eq, Hash, PartialEq)] 40 | #[display("{}", _0)] 41 | pub struct HttpsUri(String); 42 | 43 | impl HttpsUri { 44 | pub fn base_uri(s: &str) -> Result { 45 | if s.starts_with("https://") && s.ends_with('/') { 46 | Ok(HttpsUri(s.to_string())) 47 | } else { 48 | Err(Error::InvalidHttpsBase) 49 | } 50 | } 51 | 52 | pub fn resolve(&self, s: &str) -> Self { 53 | HttpsUri(format!("{}{}", self.0, s)) 54 | } 55 | 56 | pub fn relative_to(&self, mut uri: String) -> Option { 57 | if uri.starts_with(&self.0) { 58 | Some(uri.split_off(self.0.len())) 59 | } else { 60 | None 61 | } 62 | } 63 | } 64 | 65 | impl From<&str> for HttpsUri { 66 | fn from(s: &str) -> Self { 67 | HttpsUri(s.to_string()) 68 | } 69 | } 70 | 71 | //------------ Base64 -------------------------------------------------------- 72 | 73 | /// This type contains a base64 encoded structure. The publication protocol 74 | /// deals with objects in their base64 encoded form. 75 | /// 76 | /// Note that we store this in a Bytes to make it cheap to clone this. 77 | #[derive(Clone, Debug, Eq, PartialEq)] 78 | pub struct Base64(Bytes); 79 | 80 | impl Base64 { 81 | pub fn from_content(content: &[u8]) -> Self { 82 | let base64 = base64::engine::general_purpose::STANDARD.encode(content); 83 | Base64(Bytes::from(base64)) 84 | } 85 | 86 | pub fn from_b64_str(s: &str) -> Self { 87 | Base64(Bytes::copy_from_slice(s.as_bytes())) 88 | } 89 | } 90 | 91 | impl fmt::Display for Base64 { 92 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 93 | let s = unsafe { from_utf8_unchecked(self.0.as_ref()) }; 94 | s.fmt(f) 95 | } 96 | } 97 | 98 | //------------ EncodedHash --------------------------------------------------- 99 | 100 | /// This type contains a hex encoded sha256 hash. 101 | /// 102 | /// Note that we store this in a Bytes for cheap cloning. 103 | #[derive(Clone, Debug, Eq, Hash, PartialEq)] 104 | pub struct EncodedHash(Bytes); 105 | 106 | impl EncodedHash { 107 | pub fn from_content(content: &[u8]) -> Self { 108 | let sha256 = Self::sha256(content); 109 | let hex = hex::encode(sha256); 110 | EncodedHash(Bytes::from(hex)) 111 | } 112 | 113 | pub fn sha256(object: &[u8]) -> Bytes { 114 | Bytes::copy_from_slice( 115 | digest::digest(&digest::SHA256, object).as_ref()) 116 | } 117 | } 118 | 119 | impl fmt::Display for EncodedHash { 120 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 121 | let s = unsafe { from_utf8_unchecked(self.0.as_ref()) }; 122 | s.fmt(f) 123 | } 124 | } 125 | 126 | //------------ CurrentFile --------------------------------------------------- 127 | 128 | #[derive(Clone, Debug, Eq, PartialEq)] 129 | pub struct CurrentFile { 130 | /// The full uri for this file. 131 | uri: RsyncUri, 132 | 133 | /// The base64 encoded content of a file. 134 | base64: Base64, 135 | 136 | /// The hex encoded sha-256 hash of the file. 137 | hash: EncodedHash, 138 | } 139 | 140 | impl CurrentFile { 141 | pub fn new(uri: RsyncUri, content: &[u8]) -> Self { 142 | let base64 = Base64::from_content(content); 143 | let hash = EncodedHash::from_content(content); 144 | CurrentFile { uri, base64, hash } 145 | } 146 | 147 | pub fn uri(&self) -> &RsyncUri { 148 | &self.uri 149 | } 150 | pub fn base64(&self) -> &Base64 { 151 | &self.base64 152 | } 153 | pub fn hash(&self) -> &EncodedHash { 154 | &self.hash 155 | } 156 | } 157 | 158 | //------------ CurrentFile --------------------------------------------------- 159 | 160 | /// Reads a file to Bytes 161 | pub fn read(path: &PathBuf) -> Result { 162 | let mut f = File::open(path).map_err(|_| Error::cannot_read(path))?; 163 | let mut bytes = Vec::new(); 164 | f.read_to_end(&mut bytes)?; 165 | Ok(Bytes::from(bytes)) 166 | } 167 | 168 | fn create_file_with_path(path: &Path) -> Result { 169 | if !path.exists() { 170 | if let Some(parent) = path.parent() { 171 | fs::create_dir_all(parent)?; 172 | } 173 | } 174 | File::create(path) 175 | } 176 | 177 | /// Derive the path for this file. 178 | pub fn file_path(base_path: &Path, file_name: &str) -> PathBuf { 179 | let mut path = base_path.to_path_buf(); 180 | path.push(file_name); 181 | path 182 | } 183 | 184 | /// Saves a file, creating parent dirs as needed 185 | pub fn save(content: &[u8], full_path: &Path) -> Result<(), io::Error> { 186 | let mut f = create_file_with_path(full_path)?; 187 | f.write_all(content)?; 188 | Ok(()) 189 | } 190 | 191 | fn recurse_disk( 192 | base_path: &PathBuf, 193 | path: &PathBuf, 194 | rsync_base: &RsyncUri, 195 | ) -> Result, Error> { 196 | let mut res = Vec::new(); 197 | 198 | for entry in fs::read_dir(path).map_err(|_| Error::cannot_read(path))? { 199 | let entry = entry.map_err(|_| Error::cannot_read(path))?; 200 | let path = entry.path(); 201 | if entry 202 | .file_name() 203 | .to_str() 204 | .map(|name| name.starts_with('.')) 205 | .unwrap_or(true) 206 | { 207 | // this is a hidden file / directory (by convention) so skip it 208 | } else if path.is_dir() { 209 | let mut other = recurse_disk(base_path, &path, rsync_base)?; 210 | res.append(&mut other); 211 | } else { 212 | let uri = derive_uri(base_path, &path, rsync_base)?; 213 | let content = read(&path).map_err(|_| Error::cannot_read(&path))?; 214 | let current_file = CurrentFile::new(uri, &content); 215 | 216 | res.push(current_file); 217 | } 218 | } 219 | 220 | Ok(res) 221 | } 222 | 223 | fn derive_uri(base_path: &Path, path: &Path, rsync_base: &RsyncUri) -> Result { 224 | let rel_path = derive_relative_path(base_path, path)?; 225 | Ok(rsync_base.resolve(&rel_path)) 226 | } 227 | 228 | fn derive_relative_path(base_path: &Path, path: &Path) -> Result { 229 | let base_str = base_path.to_string_lossy().to_string(); 230 | let mut path_str = path.to_string_lossy().to_string(); 231 | 232 | if !path_str.starts_with(&base_str) { 233 | Err(Error::OutsideJail(path_str, base_str)) 234 | } else { 235 | let base_len = base_str.len(); 236 | let rel = path_str.split_off(base_len); 237 | Ok(rel) 238 | } 239 | } 240 | 241 | pub fn crawl_disk(base_path: &PathBuf, rsync_base: &RsyncUri) -> Result, Error> { 242 | recurse_disk(base_path, base_path, rsync_base) 243 | } 244 | 245 | /// Cleans up a directory, i.e. it retains any files and/or disks for which the 246 | /// predicate function returns 'true' 247 | pub fn retain_disk

(base_path: &PathBuf, keep: P) -> Result<(), Error> 248 | where 249 | P: Copy + FnOnce(String) -> bool, 250 | { 251 | for entry in fs::read_dir(base_path).map_err(|_| Error::cannot_read(base_path))? { 252 | let entry = entry.map_err(|_| Error::cannot_read(base_path))?; 253 | let rel = derive_relative_path(base_path, &entry.path())?; 254 | 255 | if !keep(rel) { 256 | let _res = fs::remove_dir_all(entry.path()); 257 | } 258 | } 259 | 260 | Ok(()) 261 | } 262 | 263 | //------------ Error --------------------------------------------------------- 264 | #[derive(Debug, Display)] 265 | pub enum Error { 266 | #[display("Invalid rsync uri")] 267 | InvalidRsyncUri, 268 | 269 | #[display("rsync base uri must start with rsync:// end with slash")] 270 | InvalidRsyncBase, 271 | 272 | #[display("https base uri must start with https:// end with slash")] 273 | InvalidHttpsBase, 274 | 275 | #[display("Cannot read: {}", _0)] 276 | CannotRead(String), 277 | 278 | #[display("Unsupported characters: {}", _0)] 279 | UnsupportedFileName(String), 280 | 281 | #[display("File: {} outside of jail: {}", _0, _1)] 282 | OutsideJail(String, String), 283 | } 284 | 285 | impl Error { 286 | fn cannot_read(path: &Path) -> Error { 287 | let str = path.to_string_lossy().to_string(); 288 | Error::CannotRead(str) 289 | } 290 | } 291 | 292 | impl std::error::Error for Error {} 293 | 294 | impl From for io::Error { 295 | fn from(e: Error) -> Self { 296 | io::Error::other(e) 297 | } 298 | } 299 | 300 | //------------ Tests --------------------------------------------------------- 301 | // 302 | #[cfg(test)] 303 | mod tests { 304 | use super::*; 305 | 306 | #[test] 307 | fn should_scan_disk() { 308 | let base_dir = PathBuf::from("./test-resources/"); 309 | let rsync_base = RsyncUri::base_uri("rsync://localhost/repo/").unwrap(); 310 | 311 | let files = crawl_disk(&base_dir, &rsync_base).unwrap(); 312 | 313 | let expected = vec![ 314 | "rsync://localhost/repo/source-1/file1.txt", 315 | "rsync://localhost/repo/source-1/file2.txt", 316 | "rsync://localhost/repo/source-1/file3.txt", 317 | "rsync://localhost/repo/source-2/file1.txt", 318 | "rsync://localhost/repo/source-2/file2.txt", 319 | "rsync://localhost/repo/source-2/file4.txt", 320 | "rsync://localhost/repo/source-3/file1.txt", 321 | "rsync://localhost/repo/source-3/file2.txt", 322 | "rsync://localhost/repo/source-3/file4.txt", 323 | "rsync://localhost/repo/source-3/file5.txt", 324 | ]; 325 | let mut expected: Vec = expected.into_iter().map(RsyncUri::from).collect(); 326 | expected.sort(); 327 | 328 | let mut found: Vec = files.iter().map(|f| f.uri.clone()).collect(); 329 | found.sort(); 330 | 331 | assert_eq!(expected, found); 332 | } 333 | } 334 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RRDPIT 2 | 3 | "rrdpit" is a small little tool that can be pointed at a directory on your 4 | system, and produce RPKI RRDP (RFC 8182) notification, snapshot, and 5 | delta files. You will need to use an http server of your preferred 6 | flavour to deliver these files to the world. 7 | 8 | WARNING: 9 | ======== 10 | 11 | This tool is designed to be run *in between* publication runs, but not 12 | during. As part of the syncing process the source directory is crawled 13 | recursively. If there are any changes made to the paths during the sync 14 | then this can result in RRDP snapshots and delta containing inconsistent 15 | repository state. 16 | 17 | This situation would resolve itself when rrdpit runs again while there 18 | are no changes being made, but it could cause errors and noise for RPKI 19 | validators. 20 | 21 | So, the best option is to run this when it is known that there will be 22 | no changes made to the source. E.g. when a non RRDP capable publication 23 | server knows that it's done writing content, it could trigger rrdpit, 24 | and wait with writing new content until rrdpit is done. 25 | 26 | Of course, the safest option might still be to use an RRDP capable RPKI 27 | Publication Server instead so this extra helper tool would not be needed. 28 | 29 | ## Changelog 30 | 31 | ### Release 0.1.1 32 | 33 | Added support for Debian Trixie. This also removes support for Debian Buster. 34 | 35 | Fixed a bug where clean would remove all folders. 36 | 37 | ### Release 0.1.0 38 | 39 | Updated all dependencies to their most recent version. 40 | 41 | The command line interface was rewritten. Some arguments may no longer work 42 | the same way as they did (most notably the shorthands such as -h). 43 | 44 | The `--rsync` and `--https` arguments have been normalised to always end in a 45 | `/`. 46 | 47 | There are now prebuilt packages for: 48 | - Debian (11/12/13) 49 | - Ubuntu (20.04/22.04/24.04) 50 | - RHEL (8/9/10) 51 | 52 | ### Release 0.0.4 53 | 54 | Updated _ring_ to 0.17. 55 | 56 | ### Release 0.0.3 57 | 58 | Add option to limit the maximum number of deltas using --max_deltas. 59 | Keeping too many deltas will result in large RRDP notification files if 60 | the individual deltas are much smaller than the snapshot. This can have 61 | a big impact on the server if many RPs request a large notification file. 62 | 63 | The default limit is set to 25. This value will work well if rrdpit runs 64 | every minute as it's more than twice the number of the typical RP fetch 65 | interval (10 minutes). If rrdpit runs less frequently then this number 66 | can be lowered. Essentially, one should keep enough deltas so that returning 67 | RPs never need to load the snapshot. 68 | 69 | The minimum value of this setting is 1. 70 | 71 | ### Release 0.0.2 72 | 73 | Ignore hidden files in the source directory. I.e. exclude any and all files 74 | and folders starting with a '.' character. 75 | 76 | ### Release 0.0.1 77 | 78 | Initial release. 79 | 80 | 81 | ## Installing rrdpit 82 | 83 | ### NLnet Labs repository 84 | 85 | #### Debian / Ubuntu 86 | 87 | First update the `apt` package index: 88 | 89 | ``` bash 90 | sudo apt update 91 | ``` 92 | 93 | Then install packages to allow `apt` to use a repository over HTTPS: 94 | 95 | ``` bash 96 | sudo apt install \ 97 | ca-certificates \ 98 | curl \ 99 | gnupg \ 100 | lsb-release 101 | ``` 102 | 103 | Add the GPG key from NLnet Labs: 104 | 105 | ``` bash 106 | curl -fsSL https://packages.nlnetlabs.nl/aptkey.asc | sudo gpg --dearmor -o /usr/share/keyrings/nlnetlabs-archive-keyring.gpg 107 | ``` 108 | 109 | Now, use the following command to set up the *main* repository: 110 | 111 | ##### Debian 112 | 113 | ``` bash 114 | echo \ 115 | "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/nlnetlabs-archive-keyring.gpg] https://packages.nlnetlabs.nl/linux/debian \ 116 | $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/nlnetlabs.list /dev/null 117 | ``` 118 | 119 | ##### Ubuntu 120 | 121 | ```bash 122 | echo \ 123 | "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/nlnetlabs-archive-keyring.gpg] https://packages.nlnetlabs.nl/linux/ubuntu \ 124 | $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/nlnetlabs.list > /dev/null 125 | ``` 126 | 127 | Update the `apt` package index once more: 128 | 129 | ``` bash 130 | sudo apt update 131 | ``` 132 | 133 | You can now install rrdpit with: 134 | 135 | ``` bash 136 | sudo apt install rrdpit 137 | ``` 138 | 139 | 140 | #### RHEL / Rocky Linux / Alma Linux 141 | 142 | First create a file named `/etc/yum.repos.d/nlnetlabs.repo`, enter this 143 | configuration and save it: 144 | 145 | ``` text 146 | [nlnetlabs] 147 | name=NLnet Labs 148 | baseurl=https://packages.nlnetlabs.nl/linux/centos/$releasever/main/$basearch 149 | enabled=1 150 | ``` 151 | 152 | Add the GPG key from NLnet Labs: 153 | 154 | ``` bash 155 | sudo rpm --import https://packages.nlnetlabs.nl/aptkey.asc 156 | ``` 157 | 158 | You can now install rrdpit with: 159 | 160 | ``` bash 161 | sudo yum install -y rrdpit 162 | ``` 163 | 164 | 165 | ### Compile it yourself 166 | 167 | Assuming you have rsync and the C toolchain but not yet Rust, here’s how 168 | you get rust installed. 169 | 170 | ```bash 171 | curl https://sh.rustup.rs -sSf | sh 172 | source ~/.cargo/env 173 | ``` 174 | 175 | If you have an old version of rust installed you may have to update it. 176 | ```bash 177 | rustup update 178 | ``` 179 | 180 | To install 'rrdpit' under your user's home directory, use this: 181 | 182 | ```bash 183 | git clone git@github.com:NLnetLabs/rrdpit.git 184 | cd rrdpit 185 | cargo install 186 | ``` 187 | 188 | If you have an older version of rrdpit installed, you can update via 189 | 190 | ```bash 191 | cargo install -f 192 | ``` 193 | 194 | ## Using rrdpit 195 | 196 | You can ask rrdpit for help. 197 | 198 | ```bash 199 | rrdpit --help 200 | Dist to RPKI RRDP 201 | 202 | Usage: rrdpit [OPTIONS] --source

--target --rsync --https [clean] 203 | 204 | Arguments: 205 | [clean] Clean up target dir (handle with care!) 206 | 207 | Options: 208 | --source source directory 209 | --target target directory 210 | --rsync base rsync uri 211 | --https base rrdp uri 212 | --max_deltas Limit the maximum number of deltas kept. Default: 25. Minimum: 1 213 | -h, --help Print help 214 | -V, --version Print version 215 | ``` 216 | 217 | Note that 'clean' is optional. If used rrdpit will try to clean out the target 218 | dir, i.e. it will remove unused session id dirs, and unused version directories 219 | for delta files which are no longer referenced. 220 | 221 | Use this option with care. You do NOT want to use this and accidentally use a 222 | system directory for the `--target` option. Especially if you run this as root, 223 | which would be ill-advised as well. 224 | 225 | ### Examples 226 | 227 | Sync the entire ARIN RPKI repository: 228 | ```bash 229 | 230 | $ mkdir -p tmp/arin 231 | $ cd tmp/arin/ 232 | $ mkdir source 233 | $ rsync -zarvh rsync://rpki.arin.net/repository ./source/ 234 | receiving file list ... done 235 | ./ 236 | arin-rpki-ta.cer 237 | arin-rpki-ta/ 238 | arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3.cer 239 | arin-rpki-ta/arin-rpki-ta.crl 240 | arin-rpki-ta/arin-rpki-ta.mft 241 | arin-rpki-ta/5e4a23ea-e80a-403e-b08c-2171da2157d3/ 242 | ..... 243 | ..... 244 | 245 | sent 158.78K bytes received 11.08M bytes 976.85K bytes/sec 246 | total size is 14.25M speedup is 1.27 247 | ``` 248 | 249 | Now create RRDP files in a target dir: 250 | ```bash 251 | $ mkdir target 252 | $ time rrdpit --https https://rpki.arin.net/rrdp/ \ 253 | --rsync rsync://rpki.arin.net/repository/ \ 254 | --source ./source/ \ 255 | --target ./target/ 256 | 257 | real 0m0.848s 258 | user 0m0.385s 259 | sys 0m0.258s 260 | ``` 261 | 262 | Check that all expected files are there, or well, at least the number: 263 | ```bash 264 | $ find ./source/ -type f | wc -l 265 | 7031 266 | 267 | $ grep uri ./target/8e142e20-236c-4694-8430-b05693fab150/1/snapshot.xml | wc -l 268 | 7031 269 | ``` 270 | 271 | (note that that uuid is a randomly generated session id, used when the target dir is empty) 272 | 273 | ```bash 274 | $ rm source/arin-rpki-ta.cer 275 | $ time rrdpit --https https://rpki.arin.net/rrdp/ \ 276 | --rsync rsync://rpki.arin.net/repository/ \ 277 | --source ./source/ \ 278 | --target ./target/ 279 | 280 | real 0m1.484s 281 | user 0m1.285s 282 | sys 0m0.186s 283 | 284 | $ find target 285 | target 286 | target/8e142e20-236c-4694-8430-b05693fab150 287 | target/8e142e20-236c-4694-8430-b05693fab150/1 288 | target/8e142e20-236c-4694-8430-b05693fab150/1/snapshot.xml 289 | target/8e142e20-236c-4694-8430-b05693fab150/2 290 | target/8e142e20-236c-4694-8430-b05693fab150/2/snapshot.xml 291 | target/8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 292 | target/notification.xml 293 | 294 | $ cat ./target/notification.xml 295 | 296 | 297 | 298 | 299 | 300 | $ cat ./target/8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 301 | 302 | 303 | 304 | ``` 305 | 306 | Note that if you sync again, and there are no changes in the source dir, no deltas will be written: 307 | 308 | ```bash 309 | $ time rrdpit --https https://rpki.arin.net/rrdp/ \ 310 | --rsync rsync://rpki.arin.net/repository/ \ 311 | --source ./source/ \ 312 | --target ./target/ 313 | 314 | real 0m1.495s 315 | user 0m1.292s 316 | sys 0m0.190s 317 | 318 | $ find target 319 | target 320 | target/8e142e20-236c-4694-8430-b05693fab150 321 | target/8e142e20-236c-4694-8430-b05693fab150/1 322 | target/8e142e20-236c-4694-8430-b05693fab150/1/snapshot.xml 323 | target/8e142e20-236c-4694-8430-b05693fab150/2 324 | target/8e142e20-236c-4694-8430-b05693fab150/2/snapshot.xml 325 | target/8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 326 | target/notification.xml 327 | ``` 328 | 329 | rrdpit will also perform some sanity checks on the existing RRDP files, and if it finds an issue it will use a new session: 330 | 331 | ```bash 332 | $ find target -type f 333 | target/8e142e20-236c-4694-8430-b05693fab150/1/snapshot.xml 334 | target/8e142e20-236c-4694-8430-b05693fab150/2/snapshot.xml 335 | target/8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 336 | target/notification.xml 337 | 338 | $ echo "corrupt" > target//8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 339 | 340 | 341 | $ rrdpit --https https://rpki.arin.net/rrdp/ \ 342 | --rsync rsync://rpki.arin.net/repository/ \ 343 | --source ./source/ \ 344 | --target ./target/ 345 | 346 | $ find target -type f 347 | target/07cfc1ce-e7d9-4bec-8a70-9feb76778700/1/snapshot.xml 348 | target/8e142e20-236c-4694-8430-b05693fab150/1/snapshot.xml 349 | target/8e142e20-236c-4694-8430-b05693fab150/2/snapshot.xml 350 | target/8e142e20-236c-4694-8430-b05693fab150/2/delta.xml 351 | target/notification.xml 352 | ``` 353 | 354 | Optionally you can let rrdpit clean up old files as well: 355 | ```bash 356 | $ rrdpit --https https://rpki.arin.net/rrdp/ \ 357 | --rsync rsync://rpki.arin.net/repository/ \ 358 | --source ./source/ \ 359 | --target ./target/ \ 360 | clean 361 | 362 | $ find target -type f 363 | target/07cfc1ce-e7d9-4bec-8a70-9feb76778700/1/snapshot.xml 364 | target/notification.xml 365 | ``` 366 | 367 | 368 | 369 | ## Future 370 | 371 | This code can possibly use more testing. And some things can be cleaned up. However, it seems to 372 | work well from the testing we have done. 373 | 374 | Of course you can create issues, but given that our main effort is directed at Krill for the 375 | moment, which includes its own RRDP server, we cannot guarantee that issues will get a high 376 | priority. Pull requests may get more mileage ;) 377 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "anstream" 7 | version = "0.6.18" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" 10 | dependencies = [ 11 | "anstyle", 12 | "anstyle-parse", 13 | "anstyle-query", 14 | "anstyle-wincon", 15 | "colorchoice", 16 | "is_terminal_polyfill", 17 | "utf8parse", 18 | ] 19 | 20 | [[package]] 21 | name = "anstyle" 22 | version = "1.0.10" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" 25 | 26 | [[package]] 27 | name = "anstyle-parse" 28 | version = "0.2.6" 29 | source = "registry+https://github.com/rust-lang/crates.io-index" 30 | checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" 31 | dependencies = [ 32 | "utf8parse", 33 | ] 34 | 35 | [[package]] 36 | name = "anstyle-query" 37 | version = "1.1.2" 38 | source = "registry+https://github.com/rust-lang/crates.io-index" 39 | checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" 40 | dependencies = [ 41 | "windows-sys 0.59.0", 42 | ] 43 | 44 | [[package]] 45 | name = "anstyle-wincon" 46 | version = "3.0.8" 47 | source = "registry+https://github.com/rust-lang/crates.io-index" 48 | checksum = "6680de5231bd6ee4c6191b8a1325daa282b415391ec9d3a37bd34f2060dc73fa" 49 | dependencies = [ 50 | "anstyle", 51 | "once_cell_polyfill", 52 | "windows-sys 0.59.0", 53 | ] 54 | 55 | [[package]] 56 | name = "base64" 57 | version = "0.22.1" 58 | source = "registry+https://github.com/rust-lang/crates.io-index" 59 | checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" 60 | 61 | [[package]] 62 | name = "bitflags" 63 | version = "2.9.1" 64 | source = "registry+https://github.com/rust-lang/crates.io-index" 65 | checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" 66 | 67 | [[package]] 68 | name = "bumpalo" 69 | version = "3.17.0" 70 | source = "registry+https://github.com/rust-lang/crates.io-index" 71 | checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" 72 | 73 | [[package]] 74 | name = "bytes" 75 | version = "1.10.1" 76 | source = "registry+https://github.com/rust-lang/crates.io-index" 77 | checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" 78 | 79 | [[package]] 80 | name = "cc" 81 | version = "1.2.25" 82 | source = "registry+https://github.com/rust-lang/crates.io-index" 83 | checksum = "d0fc897dc1e865cc67c0e05a836d9d3f1df3cbe442aa4a9473b18e12624a4951" 84 | dependencies = [ 85 | "shlex", 86 | ] 87 | 88 | [[package]] 89 | name = "cfg-if" 90 | version = "1.0.0" 91 | source = "registry+https://github.com/rust-lang/crates.io-index" 92 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 93 | 94 | [[package]] 95 | name = "clap" 96 | version = "4.5.39" 97 | source = "registry+https://github.com/rust-lang/crates.io-index" 98 | checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f" 99 | dependencies = [ 100 | "clap_builder", 101 | ] 102 | 103 | [[package]] 104 | name = "clap_builder" 105 | version = "4.5.39" 106 | source = "registry+https://github.com/rust-lang/crates.io-index" 107 | checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51" 108 | dependencies = [ 109 | "anstream", 110 | "anstyle", 111 | "clap_lex", 112 | "strsim", 113 | ] 114 | 115 | [[package]] 116 | name = "clap_lex" 117 | version = "0.7.4" 118 | source = "registry+https://github.com/rust-lang/crates.io-index" 119 | checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" 120 | 121 | [[package]] 122 | name = "colorchoice" 123 | version = "1.0.3" 124 | source = "registry+https://github.com/rust-lang/crates.io-index" 125 | checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" 126 | 127 | [[package]] 128 | name = "convert_case" 129 | version = "0.7.1" 130 | source = "registry+https://github.com/rust-lang/crates.io-index" 131 | checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" 132 | dependencies = [ 133 | "unicode-segmentation", 134 | ] 135 | 136 | [[package]] 137 | name = "derive_more" 138 | version = "2.0.1" 139 | source = "registry+https://github.com/rust-lang/crates.io-index" 140 | checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" 141 | dependencies = [ 142 | "derive_more-impl", 143 | ] 144 | 145 | [[package]] 146 | name = "derive_more-impl" 147 | version = "2.0.1" 148 | source = "registry+https://github.com/rust-lang/crates.io-index" 149 | checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" 150 | dependencies = [ 151 | "convert_case", 152 | "proc-macro2", 153 | "quote", 154 | "syn", 155 | "unicode-xid", 156 | ] 157 | 158 | [[package]] 159 | name = "errno" 160 | version = "0.3.13" 161 | source = "registry+https://github.com/rust-lang/crates.io-index" 162 | checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" 163 | dependencies = [ 164 | "libc", 165 | "windows-sys 0.59.0", 166 | ] 167 | 168 | [[package]] 169 | name = "fastrand" 170 | version = "2.3.0" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" 173 | 174 | [[package]] 175 | name = "getrandom" 176 | version = "0.2.16" 177 | source = "registry+https://github.com/rust-lang/crates.io-index" 178 | checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" 179 | dependencies = [ 180 | "cfg-if", 181 | "libc", 182 | "wasi 0.11.0+wasi-snapshot-preview1", 183 | ] 184 | 185 | [[package]] 186 | name = "getrandom" 187 | version = "0.3.3" 188 | source = "registry+https://github.com/rust-lang/crates.io-index" 189 | checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" 190 | dependencies = [ 191 | "cfg-if", 192 | "libc", 193 | "r-efi", 194 | "wasi 0.14.2+wasi-0.2.4", 195 | ] 196 | 197 | [[package]] 198 | name = "hex" 199 | version = "0.4.3" 200 | source = "registry+https://github.com/rust-lang/crates.io-index" 201 | checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" 202 | 203 | [[package]] 204 | name = "is_terminal_polyfill" 205 | version = "1.70.1" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" 208 | 209 | [[package]] 210 | name = "js-sys" 211 | version = "0.3.77" 212 | source = "registry+https://github.com/rust-lang/crates.io-index" 213 | checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" 214 | dependencies = [ 215 | "once_cell", 216 | "wasm-bindgen", 217 | ] 218 | 219 | [[package]] 220 | name = "libc" 221 | version = "0.2.172" 222 | source = "registry+https://github.com/rust-lang/crates.io-index" 223 | checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" 224 | 225 | [[package]] 226 | name = "linux-raw-sys" 227 | version = "0.9.4" 228 | source = "registry+https://github.com/rust-lang/crates.io-index" 229 | checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" 230 | 231 | [[package]] 232 | name = "log" 233 | version = "0.4.27" 234 | source = "registry+https://github.com/rust-lang/crates.io-index" 235 | checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" 236 | 237 | [[package]] 238 | name = "once_cell" 239 | version = "1.21.3" 240 | source = "registry+https://github.com/rust-lang/crates.io-index" 241 | checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" 242 | 243 | [[package]] 244 | name = "once_cell_polyfill" 245 | version = "1.70.1" 246 | source = "registry+https://github.com/rust-lang/crates.io-index" 247 | checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" 248 | 249 | [[package]] 250 | name = "proc-macro2" 251 | version = "1.0.95" 252 | source = "registry+https://github.com/rust-lang/crates.io-index" 253 | checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" 254 | dependencies = [ 255 | "unicode-ident", 256 | ] 257 | 258 | [[package]] 259 | name = "quote" 260 | version = "1.0.40" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" 263 | dependencies = [ 264 | "proc-macro2", 265 | ] 266 | 267 | [[package]] 268 | name = "r-efi" 269 | version = "5.2.0" 270 | source = "registry+https://github.com/rust-lang/crates.io-index" 271 | checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" 272 | 273 | [[package]] 274 | name = "ring" 275 | version = "0.17.14" 276 | source = "registry+https://github.com/rust-lang/crates.io-index" 277 | checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" 278 | dependencies = [ 279 | "cc", 280 | "cfg-if", 281 | "getrandom 0.2.16", 282 | "libc", 283 | "untrusted", 284 | "windows-sys 0.52.0", 285 | ] 286 | 287 | [[package]] 288 | name = "rrdpit" 289 | version = "0.1.1" 290 | dependencies = [ 291 | "base64", 292 | "bytes", 293 | "clap", 294 | "derive_more", 295 | "hex", 296 | "ring", 297 | "tempfile", 298 | "uuid", 299 | "xml-rs", 300 | ] 301 | 302 | [[package]] 303 | name = "rustix" 304 | version = "1.0.7" 305 | source = "registry+https://github.com/rust-lang/crates.io-index" 306 | checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" 307 | dependencies = [ 308 | "bitflags", 309 | "errno", 310 | "libc", 311 | "linux-raw-sys", 312 | "windows-sys 0.59.0", 313 | ] 314 | 315 | [[package]] 316 | name = "rustversion" 317 | version = "1.0.21" 318 | source = "registry+https://github.com/rust-lang/crates.io-index" 319 | checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" 320 | 321 | [[package]] 322 | name = "shlex" 323 | version = "1.3.0" 324 | source = "registry+https://github.com/rust-lang/crates.io-index" 325 | checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" 326 | 327 | [[package]] 328 | name = "strsim" 329 | version = "0.11.1" 330 | source = "registry+https://github.com/rust-lang/crates.io-index" 331 | checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" 332 | 333 | [[package]] 334 | name = "syn" 335 | version = "2.0.101" 336 | source = "registry+https://github.com/rust-lang/crates.io-index" 337 | checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" 338 | dependencies = [ 339 | "proc-macro2", 340 | "quote", 341 | "unicode-ident", 342 | ] 343 | 344 | [[package]] 345 | name = "tempfile" 346 | version = "3.20.0" 347 | source = "registry+https://github.com/rust-lang/crates.io-index" 348 | checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" 349 | dependencies = [ 350 | "fastrand", 351 | "getrandom 0.3.3", 352 | "once_cell", 353 | "rustix", 354 | "windows-sys 0.59.0", 355 | ] 356 | 357 | [[package]] 358 | name = "unicode-ident" 359 | version = "1.0.18" 360 | source = "registry+https://github.com/rust-lang/crates.io-index" 361 | checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" 362 | 363 | [[package]] 364 | name = "unicode-segmentation" 365 | version = "1.12.0" 366 | source = "registry+https://github.com/rust-lang/crates.io-index" 367 | checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" 368 | 369 | [[package]] 370 | name = "unicode-xid" 371 | version = "0.2.6" 372 | source = "registry+https://github.com/rust-lang/crates.io-index" 373 | checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" 374 | 375 | [[package]] 376 | name = "untrusted" 377 | version = "0.9.0" 378 | source = "registry+https://github.com/rust-lang/crates.io-index" 379 | checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" 380 | 381 | [[package]] 382 | name = "utf8parse" 383 | version = "0.2.2" 384 | source = "registry+https://github.com/rust-lang/crates.io-index" 385 | checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" 386 | 387 | [[package]] 388 | name = "uuid" 389 | version = "1.17.0" 390 | source = "registry+https://github.com/rust-lang/crates.io-index" 391 | checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" 392 | dependencies = [ 393 | "getrandom 0.3.3", 394 | "js-sys", 395 | "wasm-bindgen", 396 | ] 397 | 398 | [[package]] 399 | name = "wasi" 400 | version = "0.11.0+wasi-snapshot-preview1" 401 | source = "registry+https://github.com/rust-lang/crates.io-index" 402 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 403 | 404 | [[package]] 405 | name = "wasi" 406 | version = "0.14.2+wasi-0.2.4" 407 | source = "registry+https://github.com/rust-lang/crates.io-index" 408 | checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" 409 | dependencies = [ 410 | "wit-bindgen-rt", 411 | ] 412 | 413 | [[package]] 414 | name = "wasm-bindgen" 415 | version = "0.2.100" 416 | source = "registry+https://github.com/rust-lang/crates.io-index" 417 | checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" 418 | dependencies = [ 419 | "cfg-if", 420 | "once_cell", 421 | "rustversion", 422 | "wasm-bindgen-macro", 423 | ] 424 | 425 | [[package]] 426 | name = "wasm-bindgen-backend" 427 | version = "0.2.100" 428 | source = "registry+https://github.com/rust-lang/crates.io-index" 429 | checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" 430 | dependencies = [ 431 | "bumpalo", 432 | "log", 433 | "proc-macro2", 434 | "quote", 435 | "syn", 436 | "wasm-bindgen-shared", 437 | ] 438 | 439 | [[package]] 440 | name = "wasm-bindgen-macro" 441 | version = "0.2.100" 442 | source = "registry+https://github.com/rust-lang/crates.io-index" 443 | checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" 444 | dependencies = [ 445 | "quote", 446 | "wasm-bindgen-macro-support", 447 | ] 448 | 449 | [[package]] 450 | name = "wasm-bindgen-macro-support" 451 | version = "0.2.100" 452 | source = "registry+https://github.com/rust-lang/crates.io-index" 453 | checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" 454 | dependencies = [ 455 | "proc-macro2", 456 | "quote", 457 | "syn", 458 | "wasm-bindgen-backend", 459 | "wasm-bindgen-shared", 460 | ] 461 | 462 | [[package]] 463 | name = "wasm-bindgen-shared" 464 | version = "0.2.100" 465 | source = "registry+https://github.com/rust-lang/crates.io-index" 466 | checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" 467 | dependencies = [ 468 | "unicode-ident", 469 | ] 470 | 471 | [[package]] 472 | name = "windows-sys" 473 | version = "0.52.0" 474 | source = "registry+https://github.com/rust-lang/crates.io-index" 475 | checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" 476 | dependencies = [ 477 | "windows-targets", 478 | ] 479 | 480 | [[package]] 481 | name = "windows-sys" 482 | version = "0.59.0" 483 | source = "registry+https://github.com/rust-lang/crates.io-index" 484 | checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" 485 | dependencies = [ 486 | "windows-targets", 487 | ] 488 | 489 | [[package]] 490 | name = "windows-targets" 491 | version = "0.52.6" 492 | source = "registry+https://github.com/rust-lang/crates.io-index" 493 | checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" 494 | dependencies = [ 495 | "windows_aarch64_gnullvm", 496 | "windows_aarch64_msvc", 497 | "windows_i686_gnu", 498 | "windows_i686_gnullvm", 499 | "windows_i686_msvc", 500 | "windows_x86_64_gnu", 501 | "windows_x86_64_gnullvm", 502 | "windows_x86_64_msvc", 503 | ] 504 | 505 | [[package]] 506 | name = "windows_aarch64_gnullvm" 507 | version = "0.52.6" 508 | source = "registry+https://github.com/rust-lang/crates.io-index" 509 | checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" 510 | 511 | [[package]] 512 | name = "windows_aarch64_msvc" 513 | version = "0.52.6" 514 | source = "registry+https://github.com/rust-lang/crates.io-index" 515 | checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" 516 | 517 | [[package]] 518 | name = "windows_i686_gnu" 519 | version = "0.52.6" 520 | source = "registry+https://github.com/rust-lang/crates.io-index" 521 | checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" 522 | 523 | [[package]] 524 | name = "windows_i686_gnullvm" 525 | version = "0.52.6" 526 | source = "registry+https://github.com/rust-lang/crates.io-index" 527 | checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" 528 | 529 | [[package]] 530 | name = "windows_i686_msvc" 531 | version = "0.52.6" 532 | source = "registry+https://github.com/rust-lang/crates.io-index" 533 | checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" 534 | 535 | [[package]] 536 | name = "windows_x86_64_gnu" 537 | version = "0.52.6" 538 | source = "registry+https://github.com/rust-lang/crates.io-index" 539 | checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" 540 | 541 | [[package]] 542 | name = "windows_x86_64_gnullvm" 543 | version = "0.52.6" 544 | source = "registry+https://github.com/rust-lang/crates.io-index" 545 | checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" 546 | 547 | [[package]] 548 | name = "windows_x86_64_msvc" 549 | version = "0.52.6" 550 | source = "registry+https://github.com/rust-lang/crates.io-index" 551 | checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" 552 | 553 | [[package]] 554 | name = "wit-bindgen-rt" 555 | version = "0.39.0" 556 | source = "registry+https://github.com/rust-lang/crates.io-index" 557 | checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" 558 | dependencies = [ 559 | "bitflags", 560 | ] 561 | 562 | [[package]] 563 | name = "xml-rs" 564 | version = "0.8.26" 565 | source = "registry+https://github.com/rust-lang/crates.io-index" 566 | checksum = "a62ce76d9b56901b19a74f19431b0d8b3bc7ca4ad685a746dfd78ca8f4fc6bda" 567 | -------------------------------------------------------------------------------- /src/xml.rs: -------------------------------------------------------------------------------- 1 | //! Support for RPKI XML structures. 2 | use base64::{self, Engine}; 3 | use base64::DecodeError; 4 | use bytes::Bytes; 5 | use hex; 6 | use hex::FromHexError; 7 | use std::fs::File; 8 | use std::path::Path; 9 | use std::{fs, io}; 10 | use xmlrs::attribute::OwnedAttribute; 11 | use xmlrs::reader::XmlEvent; 12 | use xmlrs::{reader, writer}; 13 | use xmlrs::{EmitterConfig, EventReader, EventWriter, ParserConfig}; 14 | 15 | //------------ XmlReader ----------------------------------------------------- 16 | 17 | /// A convenience wrapper for RPKI XML parsing 18 | /// 19 | /// This type only exposes things we need for the RPKI XML structures. 20 | pub struct XmlReader { 21 | /// The underlying xml-rs reader 22 | reader: EventReader, 23 | 24 | /// Placeholder for an event so that 'peak' can be supported, as 25 | /// well as temporarily caching a close event in case a list of 26 | /// inner elements is processed. 27 | cached_event: Option, 28 | 29 | /// Name of the next start element, if any 30 | next_start_name: Option, 31 | } 32 | 33 | /// Reader methods 34 | impl XmlReader { 35 | /// Gets the next XmlEvent 36 | /// 37 | /// Will take cached event if there is one 38 | fn next(&mut self) -> Result { 39 | match self.cached_event.take() { 40 | Some(e) => Ok(e), 41 | None => Ok(self.reader.next()?), 42 | } 43 | } 44 | 45 | /// Puts an XmlEvent back so that it can be retrieved by 'next' 46 | fn cache(&mut self, e: XmlEvent) { 47 | self.cached_event = Some(e); 48 | } 49 | } 50 | 51 | /// Basic operations to parse the XML. 52 | /// 53 | /// These methods are private because they are used by the higher level 54 | /// closure based methods, defined below, that one should use to parse 55 | /// XML safely. 56 | impl XmlReader { 57 | /// Takes the next element and expects a start of document. 58 | fn start_document(&mut self) -> Result<(), XmlReaderErr> { 59 | match self.next() { 60 | Ok(reader::XmlEvent::StartDocument { .. }) => Ok(()), 61 | _ => Err(XmlReaderErr::ExpectedStartDocument), 62 | } 63 | } 64 | 65 | /// Takes the next element and expects a start element with the given name. 66 | fn expect_element(&mut self) -> Result<(Tag, Attributes), XmlReaderErr> { 67 | match self.next() { 68 | Ok(reader::XmlEvent::StartElement { 69 | name, attributes, .. 70 | }) => Ok(( 71 | Tag { 72 | name: name.local_name, 73 | }, 74 | Attributes { attributes }, 75 | )), 76 | _ => Err(XmlReaderErr::ExpectedStart), 77 | } 78 | } 79 | 80 | /// Takes the next element and expects a close element with the given name. 81 | fn expect_close(&mut self, tag: Tag) -> Result<(), XmlReaderErr> { 82 | match self.next() { 83 | Ok(reader::XmlEvent::EndElement { name, .. }) => { 84 | if name.local_name == tag.name { 85 | Ok(()) 86 | } else { 87 | Err(XmlReaderErr::ExpectedClose(tag.name)) 88 | } 89 | } 90 | _ => Err(XmlReaderErr::ExpectedClose(tag.name)), 91 | } 92 | } 93 | 94 | /// Takes the next element and expects the end of document. 95 | /// 96 | /// Returns Ok(true) if the element is the end of document, or 97 | /// an error otherwise. 98 | fn end_document(&mut self) -> Result<(), XmlReaderErr> { 99 | match self.next() { 100 | Ok(reader::XmlEvent::EndDocument) => Ok(()), 101 | _ => Err(XmlReaderErr::ExpectedEnd), 102 | } 103 | } 104 | } 105 | 106 | /// Closure based parsing of XML. 107 | /// 108 | /// This approach ensures that the consumer can only get opening tags, or 109 | /// content (such as Characters), and process the enclosed content. In 110 | /// particular it ensures that the consumer cannot accidentally get close 111 | /// tags - so it forces that execution returns. 112 | impl XmlReader { 113 | /// Decodes an XML structure 114 | /// 115 | /// This method checks that the document starts, then passes a reader 116 | /// instance to the provided closure, and will return the result from 117 | /// that after checking that the XML document is fully processed. 118 | pub fn decode(source: R, op: F) -> Result 119 | where 120 | F: FnOnce(&mut Self) -> Result, 121 | E: From, 122 | { 123 | let mut config = ParserConfig::new(); 124 | config.trim_whitespace = true; 125 | config.ignore_comments = true; 126 | 127 | let mut xml = XmlReader { 128 | reader: config.create_reader(source), 129 | cached_event: None, 130 | next_start_name: None, 131 | }; 132 | 133 | xml.start_document()?; 134 | let res = op(&mut xml)?; 135 | xml.end_document()?; 136 | 137 | Ok(res) 138 | } 139 | 140 | /// Takes an element and process it in a closure 141 | /// 142 | /// This method checks that the next element is indeed a Start Element, 143 | /// and passes the Tag and Attributes and this reader to a closure. After 144 | /// the closure completes it will verify that the next element is the 145 | /// Close Element for this Tag, and returns the result from the closure. 146 | pub fn take_element(&mut self, op: F) -> Result 147 | where 148 | F: FnOnce(&Tag, Attributes, &mut Self) -> Result, 149 | E: From, 150 | { 151 | let (tag, attr) = self.expect_element()?; 152 | let res = op(&tag, attr, self)?; 153 | self.expect_close(tag)?; 154 | Ok(res) 155 | } 156 | 157 | /// Takes a named element and process it in a closure 158 | /// 159 | /// Checks that the element has the expected name and passed the closure 160 | /// to the generic take_element method. 161 | pub fn take_named_element(&mut self, name: &str, op: F) -> Result 162 | where 163 | F: FnOnce(Attributes, &mut Self) -> Result, 164 | E: From, 165 | { 166 | self.take_element(|t, a, r| { 167 | if t.name != name { 168 | Err(XmlReaderErr::ExpectedNamedStart(name.to_string()).into()) 169 | } else { 170 | op(a, r) 171 | } 172 | }) 173 | } 174 | 175 | /// Takes the next element that is part of a list of elements under the 176 | /// current element, and processes it using a closure. When the end of the 177 | /// list is encountered, i.e. the next element is not a start element, then 178 | /// the closure is not executed and Ok(None) is returned. The element is 179 | /// put back on the cache for processing by the parent structure. 180 | /// 181 | /// Note: This will break if we encounter a parent XML element that has 182 | /// both a list of children XML elements *and* some (character) content. 183 | /// However, this is not used by the RPKI XML structures. Also, provided 184 | /// that a 'take_*' method with a closure was used for the parent element, 185 | /// then we will get a clear error there (expect end element). 186 | pub fn take_opt_element(&mut self, op: F) -> Result, E> 187 | where 188 | F: FnOnce(&Tag, Attributes, &mut Self) -> Result, E>, 189 | E: From, 190 | { 191 | let n = self.next()?; 192 | match n { 193 | XmlEvent::StartElement { 194 | name, attributes, .. 195 | } => { 196 | let tag = Tag { 197 | name: name.local_name, 198 | }; 199 | let res = op(&tag, Attributes { attributes }, self)?; 200 | self.expect_close(tag)?; 201 | Ok(res) 202 | } 203 | _ => { 204 | self.cache(n); 205 | Ok(None) 206 | } 207 | } 208 | } 209 | 210 | /// Takes characters 211 | pub fn take_chars(&mut self) -> Result { 212 | match self.next() { 213 | Ok(reader::XmlEvent::Characters(chars)) => Ok(chars), 214 | _ => Err(XmlReaderErr::ExpectedCharacters), 215 | } 216 | } 217 | 218 | /// Takes base64 encoded bytes from the next 'characters' event. 219 | pub fn take_bytes_std(&mut self) -> Result { 220 | self.take_bytes(base64::engine::general_purpose::STANDARD_NO_PAD) 221 | } 222 | 223 | /// Takes base64 encoded bytes from the next 'characters' event. 224 | pub fn take_bytes_url_safe_pad(&mut self) -> Result { 225 | self.take_bytes(base64::engine::general_purpose::URL_SAFE_NO_PAD) 226 | } 227 | 228 | fn take_bytes(&mut self, config: base64::engine::GeneralPurpose) -> Result { 229 | let chars = self.take_chars()?; 230 | // strip whitespace and padding (we are liberal in what we accept here) 231 | // TODO: Avoid allocation, pass in an AsRef<[u8]> that 232 | // removes any whitespace on the fly. 233 | let chars: Vec = chars 234 | .into_bytes() 235 | .into_iter() 236 | .filter(|c| !b" \n\t\r\x0b\x0c=".contains(c)) 237 | .collect(); 238 | 239 | let b64 = config.decode(&chars)?; 240 | Ok(Bytes::from(b64)) 241 | } 242 | 243 | pub fn take_empty(&mut self) -> Result<(), XmlReaderErr> { 244 | Ok(()) 245 | } 246 | 247 | /// Returns the name of the next start element or None if the next 248 | /// element is not a start element. Also ensures that the next element 249 | /// is kept in the cache for normal subsequent processing. 250 | pub fn next_start_name(&mut self) -> Option<&str> { 251 | match self.next() { 252 | Err(_) => None, 253 | Ok(e) => { 254 | if let XmlEvent::StartElement { ref name, .. } = e { 255 | // XXX not the most efficient.. but need a different 256 | // underlying XML parser to get around ownership 257 | // issues. 258 | self.next_start_name = Some(name.local_name.clone()) 259 | } else { 260 | self.next_start_name = None; 261 | } 262 | self.cache(e); 263 | self.next_start_name.as_ref().map(AsRef::as_ref) 264 | } 265 | } 266 | } 267 | } 268 | 269 | impl XmlReader { 270 | /// Opens a file and decodes it as an XML file. 271 | pub fn open(path: P, op: F) -> Result 272 | where 273 | F: FnOnce(&mut Self) -> Result, 274 | P: AsRef, 275 | E: From + From, 276 | { 277 | Self::decode(fs::File::open(path)?, op) 278 | } 279 | } 280 | 281 | //------------ XmlReaderErr -------------------------------------------------- 282 | 283 | #[derive(Debug, Display)] 284 | pub enum XmlReaderErr { 285 | #[display("Expected Start of Document")] 286 | ExpectedStartDocument, 287 | 288 | #[display("Expected Start Element")] 289 | ExpectedStart, 290 | 291 | #[display("Expected Start Element with name: {}", _0)] 292 | ExpectedNamedStart(String), 293 | 294 | #[display("Expected Characters Element")] 295 | ExpectedCharacters, 296 | 297 | #[display("Expected Close Element with name: {}", _0)] 298 | ExpectedClose(String), 299 | 300 | #[display("Expected End of Document")] 301 | ExpectedEnd, 302 | 303 | #[display("Error reading file: {}", _0)] 304 | IoError(io::Error), 305 | 306 | #[display("Attributes Error: {}", _0)] 307 | AttributesError(AttributesError), 308 | 309 | #[display("XML Reader Error: {}", _0)] 310 | ReaderError(reader::Error), 311 | 312 | #[display("Base64 decoding issue: {}", _0)] 313 | Base64Error(DecodeError), 314 | } 315 | 316 | impl From for XmlReaderErr { 317 | fn from(e: io::Error) -> XmlReaderErr { 318 | XmlReaderErr::IoError(e) 319 | } 320 | } 321 | 322 | impl From for XmlReaderErr { 323 | fn from(e: AttributesError) -> XmlReaderErr { 324 | XmlReaderErr::AttributesError(e) 325 | } 326 | } 327 | 328 | impl From for XmlReaderErr { 329 | fn from(e: reader::Error) -> XmlReaderErr { 330 | XmlReaderErr::ReaderError(e) 331 | } 332 | } 333 | 334 | impl From for XmlReaderErr { 335 | fn from(e: DecodeError) -> XmlReaderErr { 336 | XmlReaderErr::Base64Error(e) 337 | } 338 | } 339 | 340 | //------------ Attributes ---------------------------------------------------- 341 | 342 | /// A convenient wrapper for XML tag attributes 343 | pub struct Attributes { 344 | /// The underlying xml-rs structure 345 | attributes: Vec, 346 | } 347 | 348 | impl Attributes { 349 | /// Takes an optional attribute by name 350 | pub fn take_opt(&mut self, name: &str) -> Option { 351 | let i = self 352 | .attributes 353 | .iter() 354 | .position(|a| a.name.local_name == name); 355 | match i { 356 | Some(i) => { 357 | let a = self.attributes.swap_remove(i); 358 | Some(a.value) 359 | } 360 | None => None, 361 | } 362 | } 363 | 364 | /// Takes an optional hexencoded attribute and converts it to Bytes 365 | pub fn take_opt_hex(&mut self, name: &str) -> Option { 366 | self.take_req_hex(name).ok() 367 | } 368 | 369 | /// Takes a required attribute by name 370 | pub fn take_req(&mut self, name: &str) -> Result { 371 | self.take_opt(name) 372 | .ok_or_else(|| AttributesError::MissingAttribute(name.to_string())) 373 | } 374 | 375 | /// Takes a required hexencoded attribute and converts it to Bytes 376 | pub fn take_req_hex(&mut self, name: &str) -> Result { 377 | match hex::decode(self.take_req(name)?) { 378 | Err(e) => Err(AttributesError::HexError(e)), 379 | Ok(b) => Ok(Bytes::from(b)), 380 | } 381 | } 382 | 383 | /// Verifies that there are no more attributes 384 | pub fn exhausted(&self) -> Result<(), AttributesError> { 385 | if self.attributes.is_empty() { 386 | Ok(()) 387 | } else { 388 | Err(AttributesError::extras(&self.attributes)) 389 | } 390 | } 391 | } 392 | 393 | //------------ AttributesError ----------------------------------------------- 394 | 395 | #[derive(Debug, Display)] 396 | pub enum AttributesError { 397 | #[display("Required attribute missing: {}", _0)] 398 | MissingAttribute(String), 399 | 400 | #[display("Extra attributes found: {}", _0)] 401 | ExtraAttributes(String), 402 | 403 | #[display("Wrong hex encoding: {}", _0)] 404 | HexError(FromHexError), 405 | } 406 | 407 | impl AttributesError { 408 | fn extras(atts: &[OwnedAttribute]) -> Self { 409 | let atts: Vec = atts.iter().map(|a| format!("{a}")).collect(); 410 | let atts = atts.join(", "); 411 | AttributesError::ExtraAttributes(atts) 412 | } 413 | } 414 | 415 | //------------ Tag ----------------------------------------------------------- 416 | 417 | pub struct Tag { 418 | pub name: String, 419 | } 420 | 421 | //------------ XmlWriter ----------------------------------------------------- 422 | 423 | /// A convenience wrapper for RPKI XML generation 424 | /// 425 | /// This type only exposes things we need for the RPKI XML structures. 426 | pub struct XmlWriter { 427 | /// The underlying xml-rs writer 428 | writer: EventWriter, 429 | } 430 | 431 | /// Generate the XML. 432 | impl XmlWriter { 433 | fn unwrap_emitter_error(r: Result) -> Result { 434 | match r { 435 | Ok(t) => Ok(t), 436 | Err(e) => { 437 | match e { 438 | writer::Error::Io(io) => Err(io), 439 | _ => { 440 | // The other errors can only happen for stuff like 441 | // not closing tags, starting a doc twice etc. But 442 | // the XmlWriter lib already ensures that these things 443 | // do not happen. They are not dependent on input. 444 | panic!("XmlWriter library error: {e:?}") 445 | } 446 | } 447 | } 448 | } 449 | } 450 | 451 | /// Adds an element 452 | pub fn put_element( 453 | &mut self, 454 | name: &str, 455 | attr: Option<&[(&str, &str)]>, 456 | op: F, 457 | ) -> Result<(), io::Error> 458 | where 459 | F: FnOnce(&mut Self) -> Result<(), io::Error>, 460 | { 461 | let mut start = writer::XmlEvent::start_element(name); 462 | 463 | if let Some(v) = attr { 464 | for a in v { 465 | start = start.attr(a.0, a.1); 466 | } 467 | } 468 | 469 | Self::unwrap_emitter_error(self.writer.write(start))?; 470 | op(self)?; 471 | Self::unwrap_emitter_error(self.writer.write(writer::XmlEvent::end_element()))?; 472 | 473 | Ok(()) 474 | } 475 | 476 | /// Puts some String in a characters element 477 | pub fn put_text(&mut self, text: &str) -> Result<(), io::Error> { 478 | Self::unwrap_emitter_error(self.writer.write(writer::XmlEvent::Characters(text)))?; 479 | Ok(()) 480 | } 481 | 482 | /// Converts bytes to base64 encoded Characters as the content, using the 483 | /// Standard character set, without padding. 484 | pub fn put_base64_std(&mut self, bytes: &Bytes) -> Result<(), io::Error> { 485 | let b64 = base64::engine::general_purpose::STANDARD.encode(bytes); 486 | self.put_text(b64.as_ref()) 487 | } 488 | 489 | /// Converts bytes to base64 encoded Characters as the content, using the 490 | /// URL safe character set and padding. 491 | pub fn put_base64_url_safe(&mut self, bytes: &[u8]) -> Result<(), io::Error> { 492 | let b64 = base64::engine::general_purpose::URL_SAFE.encode(bytes); 493 | self.put_text(b64.as_ref()) 494 | } 495 | 496 | /// Use this for convenience where empty content is required 497 | pub fn empty(&mut self) -> Result<(), io::Error> { 498 | Ok(()) 499 | } 500 | 501 | /// Sets up the writer config and returns a closure that is expected 502 | /// to add the actual content of the XML. 503 | /// 504 | /// This method is private because one should use the pub encode_vec 505 | /// method, and in future others like it, to set up the writer for a 506 | /// specific type (Vec, File, etc.). 507 | fn encode(w: W, op: F) -> Result<(), io::Error> 508 | where 509 | F: FnOnce(&mut Self) -> Result<(), io::Error>, 510 | { 511 | let writer = EmitterConfig::new() 512 | .write_document_declaration(false) 513 | .normalize_empty_elements(true) 514 | .perform_indent(true) 515 | .create_writer(w); 516 | 517 | let mut x = XmlWriter { writer }; 518 | 519 | op(&mut x) 520 | } 521 | } 522 | 523 | impl XmlWriter<()> { 524 | /// Call this to encode XML into a Vec 525 | pub fn encode_vec(op: F) -> Vec 526 | where 527 | F: FnOnce(&mut XmlWriter<&mut Vec>) -> Result<(), io::Error>, 528 | { 529 | let mut b = Vec::new(); 530 | XmlWriter::encode(&mut b, op).unwrap(); // IO error impossible for vec 531 | b 532 | } 533 | 534 | pub fn encode_to_file(file: &mut File, op: F) -> Result<(), io::Error> 535 | where 536 | F: FnOnce(&mut XmlWriter<&mut File>) -> Result<(), io::Error>, 537 | { 538 | XmlWriter::encode(file, op) 539 | } 540 | } 541 | 542 | //------------ Tests --------------------------------------------------------- 543 | 544 | #[cfg(test)] 545 | mod tests { 546 | 547 | use super::*; 548 | use std::str; 549 | 550 | #[test] 551 | fn should_write_xml() { 552 | let xml = XmlWriter::encode_vec(|w| { 553 | w.put_element("a", Some(&[("xmlns", "http://ns/"), ("c", "d")]), |w| { 554 | w.put_element("b", None, |w| w.put_base64_std(&Bytes::from("X"))) 555 | }) 556 | }); 557 | 558 | assert_eq!( 559 | str::from_utf8(&xml).unwrap(), 560 | "\n WA==\n" 561 | ); 562 | } 563 | } 564 | -------------------------------------------------------------------------------- /src/rrdp.rs: -------------------------------------------------------------------------------- 1 | //! Data objects used in the (RRDP) repository. I.e. the publish, update, and 2 | //! withdraw elements, as well as the notification, snapshot and delta file 3 | //! definitions. 4 | use std::collections::{HashMap, VecDeque}; 5 | use std::num::ParseIntError; 6 | use std::path::PathBuf; 7 | use std::str::FromStr; 8 | use std::{fmt, io}; 9 | 10 | use base64::Engine; 11 | use bytes::Bytes; 12 | use uuid::Uuid; 13 | 14 | use crate::sync::{self, Base64, CurrentFile, EncodedHash, HttpsUri, RsyncUri}; 15 | use crate::xml::{AttributesError, XmlReader, XmlReaderErr, XmlWriter}; 16 | 17 | const VERSION: &str = "1"; 18 | const NS: &str = "http://www.ripe.net/rpki/rrdp"; 19 | 20 | //------------ PublishElement ------------------------------------------------ 21 | 22 | /// The publishes as used in the RRDP protocol. 23 | /// 24 | /// Note that the difference with the publication protocol is the absence of 25 | /// the tag. 26 | #[derive(Clone, Debug, Eq, PartialEq)] 27 | pub struct PublishElement { 28 | base64: Base64, 29 | uri: RsyncUri, 30 | } 31 | 32 | impl PublishElement { 33 | pub fn new(base64: Base64, uri: RsyncUri) -> Self { 34 | PublishElement { base64, uri } 35 | } 36 | 37 | pub fn base64(&self) -> &Base64 { 38 | &self.base64 39 | } 40 | pub fn uri(&self) -> &RsyncUri { 41 | &self.uri 42 | } 43 | } 44 | 45 | //------------ UpdateElement ------------------------------------------------- 46 | 47 | /// The updates as used in the RRDP protocol. 48 | /// 49 | /// Note that the difference with the publication protocol is the absence of 50 | /// the tag. 51 | #[derive(Clone, Debug, Eq, PartialEq)] 52 | pub struct UpdateElement { 53 | uri: RsyncUri, 54 | hash: EncodedHash, 55 | base64: Base64, 56 | } 57 | 58 | impl UpdateElement { 59 | pub fn uri(&self) -> &RsyncUri { 60 | &self.uri 61 | } 62 | pub fn hash(&self) -> &EncodedHash { 63 | &self.hash 64 | } 65 | pub fn base64(&self) -> &Base64 { 66 | &self.base64 67 | } 68 | } 69 | 70 | //------------ WithdrawElement ----------------------------------------------- 71 | 72 | /// The withdraws as used in the RRDP protocol. 73 | /// 74 | /// Note that the difference with the publication protocol is the absence of 75 | /// the tag. 76 | #[derive(Clone, Debug, Eq, PartialEq)] 77 | pub struct WithdrawElement { 78 | uri: RsyncUri, 79 | hash: EncodedHash, 80 | } 81 | 82 | impl WithdrawElement { 83 | pub fn uri(&self) -> &RsyncUri { 84 | &self.uri 85 | } 86 | pub fn hash(&self) -> &EncodedHash { 87 | &self.hash 88 | } 89 | } 90 | 91 | //------------ Notification -------------------------------------------------- 92 | 93 | #[derive(Clone, Debug)] 94 | pub struct Notification { 95 | session: Uuid, 96 | serial: u64, 97 | snapshot: SnapshotRef, 98 | deltas: VecDeque, 99 | } 100 | 101 | impl Notification { 102 | pub fn new( 103 | session: Uuid, 104 | serial: u64, 105 | snapshot: SnapshotRef, 106 | deltas: VecDeque, 107 | ) -> Self { 108 | Notification { 109 | session, 110 | serial, 111 | snapshot, 112 | deltas, 113 | } 114 | } 115 | 116 | pub fn write_xml(&self) -> Bytes { 117 | Bytes::from(XmlWriter::encode_vec(|w| { 118 | let a = [ 119 | ("xmlns", NS), 120 | ("version", VERSION), 121 | ("session_id", &format!("{}", self.session)), 122 | ("serial", &format!("{}", self.serial)), 123 | ]; 124 | 125 | w.put_element("notification", Some(&a), |w| { 126 | { 127 | // snapshot ref 128 | let uri = self.snapshot.uri.to_string(); 129 | let hash = self.snapshot.hash.to_string(); 130 | let a = [("uri", uri.as_str()), ("hash", hash.as_str())]; 131 | w.put_element("snapshot", Some(&a), |w| w.empty())?; 132 | } 133 | 134 | { 135 | // delta refs 136 | for delta in &self.deltas { 137 | let serial = format!("{}", delta.serial); 138 | let uri = delta.file_ref.uri.to_string(); 139 | let hash = delta.file_ref.hash.to_string(); 140 | let a = [ 141 | ("serial", serial.as_ref()), 142 | ("uri", uri.as_str()), 143 | ("hash", hash.as_str()), 144 | ]; 145 | w.put_element("delta", Some(&a), |w| w.empty())?; 146 | } 147 | } 148 | 149 | Ok(()) 150 | }) 151 | })) 152 | } 153 | } 154 | 155 | //------------ RepoState ------------------------------------------------------ 156 | 157 | /// This type defines the state of the RRDP repository. It can be saved to disk 158 | /// to save the new notification file, snapshot and delta. It can also purge any 159 | /// deprecated delta files and/or files for deprecated sessions. 160 | /// 161 | /// It can be reconstituted by reading the current state from disk starting with 162 | /// a notification file, and ensuring that the included snapshot and deltas all 163 | /// exist and are not tempered with. 164 | /// 165 | /// In case the current state cannot be reconstituted this way, a new RepoState, 166 | /// using a new session id will be used. 167 | #[derive(Clone, Debug, Eq, PartialEq)] 168 | pub struct RepoState { 169 | session: Uuid, 170 | serial: u64, 171 | snapshot: Snapshot, 172 | new_delta: Option, 173 | deltas: VecDeque, 174 | base_uri: HttpsUri, 175 | base_dir: PathBuf, 176 | } 177 | 178 | /// # Data Access 179 | /// 180 | impl RepoState { 181 | pub fn session(&self) -> Uuid { 182 | self.session 183 | } 184 | pub fn serial(&self) -> u64 { 185 | self.serial 186 | } 187 | } 188 | 189 | impl RepoState { 190 | /// Creates a new repo state, with a new session id, and serial starting at 1. 191 | pub fn new(snapshot: Snapshot, base_uri: HttpsUri, base_dir: PathBuf) -> Self { 192 | let session = snapshot.session; 193 | let serial = 1; 194 | 195 | let new_delta = None; 196 | let deltas = VecDeque::new(); 197 | 198 | RepoState { 199 | session, 200 | serial, 201 | snapshot, 202 | new_delta, 203 | deltas, 204 | base_uri, 205 | base_dir, 206 | } 207 | } 208 | 209 | /// Saves a notification file, the snapshot, and the optional new delta to disk. 210 | /// 211 | /// If clean is true, this will also delete old sessions and delta/snapshot dirs for 212 | /// old versions which are no longer referenced in the notification file. 213 | pub fn save(mut self, max_deltas: usize, clean: bool) -> Result<(), io::Error> { 214 | let serial = self.serial; 215 | let session = self.session; 216 | 217 | // Save new snapshot 218 | let snapshot_xml = self.snapshot.write_xml(); 219 | let snapshot_ref = SnapshotRef::new(self.snapshot_uri(serial), &snapshot_xml); 220 | let snapshot_path = self.snapshot_path(serial); 221 | sync::save(snapshot_xml.as_ref(), &snapshot_path)?; 222 | 223 | // If there is a new delta, save it and add it to top of the list of delta references 224 | if let Some(delta) = &self.new_delta { 225 | let delta_xml = delta.write_xml(); 226 | let delta_file_ref = FileRef::new(self.delta_uri(serial), &delta_xml); 227 | let delta_ref = DeltaRef::new(serial, delta_file_ref); 228 | let delta_path = self.delta_path(serial); 229 | 230 | sync::save(delta_xml.as_ref(), &delta_path)?; 231 | self.deltas.push_front(delta_ref); 232 | } 233 | 234 | // First purge deltas in excess of snapshot size 235 | let snapshot_size = snapshot_ref.size(); 236 | let mut deltas_size = 0; 237 | self.deltas.retain(|d| { 238 | let add = snapshot_size > deltas_size; 239 | deltas_size += d.size(); 240 | add 241 | }); 242 | 243 | // Truncate any deltas that exceed the max_deltas number 244 | self.deltas.truncate(max_deltas); 245 | 246 | let last_serial = self.deltas.back().map(|d| d.serial); 247 | 248 | let notification_path = self.notification_path(); 249 | let notification = Notification::new(self.session, self.serial, snapshot_ref, self.deltas); 250 | let notification_xml = notification.write_xml(); 251 | 252 | sync::save(notification_xml.as_ref(), ¬ification_path)?; 253 | 254 | if clean { 255 | // Clean up disk: unused session uuid dirs and unused delta dirs 256 | let session_str = session.to_string(); 257 | sync::retain_disk(&self.base_dir, 258 | |name| name.contains(&session_str))?; 259 | 260 | if let Some(last_serial) = last_serial { 261 | let session_dir = self.base_dir.join(format!("{}/", self.session)); 262 | sync::retain_disk(&session_dir, |name| { 263 | if let Ok(dir_serial) = u64::from_str(&name) { 264 | dir_serial >= last_serial 265 | } else { 266 | eprintln!("Found dir: {}", &name); 267 | true // keep any other things the user might have added 268 | } 269 | })?; 270 | } 271 | } 272 | 273 | Ok(()) 274 | } 275 | 276 | fn notification_path(&self) -> PathBuf { 277 | self.base_dir.join(PathBuf::from("notification.xml")) 278 | } 279 | 280 | fn snapshot_uri(&self, serial: u64) -> HttpsUri { 281 | self.base_uri.resolve(&self.snapshot_rel(serial)) 282 | } 283 | 284 | fn snapshot_path(&self, serial: u64) -> PathBuf { 285 | self.base_dir.join(PathBuf::from(self.snapshot_rel(serial))) 286 | } 287 | 288 | fn snapshot_rel(&self, serial: u64) -> String { 289 | format!("{}/{}/snapshot.xml", &self.session, serial) 290 | } 291 | 292 | fn delta_uri(&self, serial: u64) -> HttpsUri { 293 | self.base_uri.resolve(&self.delta_rel(serial)) 294 | } 295 | 296 | fn delta_path(&self, serial: u64) -> PathBuf { 297 | self.base_dir.join(PathBuf::from(self.delta_rel(serial))) 298 | } 299 | 300 | fn delta_rel(&self, serial: u64) -> String { 301 | format!("{}/{}/delta.xml", &self.session, serial) 302 | } 303 | 304 | pub fn reconstitute(base_uri: HttpsUri, base_dir: PathBuf) -> Result { 305 | let notification_path = base_dir.join("notification.xml"); 306 | let notification = sync::read(¬ification_path).map_err(|_| Error::InvalidRepoState)?; 307 | 308 | XmlReader::decode(notification.as_ref(), |r| { 309 | r.take_named_element("notification", |mut a, r| { 310 | let version = a.take_req("version")?; 311 | if version != "1" { 312 | return Err(Error::InvalidRepoState); 313 | } 314 | 315 | let session = a.take_req("session_id")?; 316 | let session = Uuid::parse_str(&session)?; 317 | 318 | let serial = a.take_req("serial")?; 319 | let serial = u64::from_str(&serial)?; 320 | 321 | a.exhausted().map_err(Error::invalid_xml)?; 322 | 323 | let snapshot = r.take_named_element("snapshot", |mut a, _r| { 324 | let uri = a.take_req("uri")?; 325 | let hash = a.take_req("hash")?; 326 | a.exhausted()?; 327 | 328 | let snapshot_rel = base_uri.relative_to(uri).ok_or(Error::InvalidRepoState)?; 329 | let snapshot_path = base_dir.join(snapshot_rel); 330 | let snapshot = 331 | sync::read(&snapshot_path).map_err(|_| Error::InvalidRepoState)?; 332 | 333 | let snapshot_hash = EncodedHash::from_content(snapshot.as_ref()); 334 | 335 | if snapshot_hash.to_string() != hash { 336 | return Err(Error::InvalidRepoState); 337 | } 338 | 339 | Snapshot::from_xml(snapshot) 340 | })?; 341 | 342 | let new_delta = None; 343 | 344 | let mut deltas = VecDeque::new(); 345 | 346 | while let Some(delta) = 347 | r.take_opt_element(|t, mut a, _r| match t.name.as_ref() { 348 | "delta" => { 349 | let serial = a.take_req("serial")?; 350 | let serial = u64::from_str(&serial)?; 351 | 352 | let uri = a.take_req("uri")?; 353 | let hash = a.take_req("hash")?; 354 | a.exhausted()?; 355 | 356 | let rel = base_uri.relative_to(uri).ok_or(Error::InvalidRepoState)?; 357 | 358 | let uri = base_uri.resolve(&rel); 359 | let path = base_dir.join(rel); 360 | 361 | let file = sync::read(&path).map_err(|_| Error::InvalidRepoState)?; 362 | let file_ref = FileRef::new(uri, &file); 363 | 364 | if file_ref.hash().to_string() != hash { 365 | return Err(Error::InvalidRepoState); 366 | } 367 | 368 | Ok(Some(DeltaRef::new(serial, file_ref))) 369 | } 370 | _ => Err(Error::InvalidXml(format!("Unexpected tag: {}", t.name))), 371 | })? 372 | { 373 | deltas.push_back(delta) 374 | } 375 | 376 | Ok(RepoState { 377 | session, 378 | serial, 379 | snapshot, 380 | new_delta, 381 | deltas, 382 | base_uri, 383 | base_dir, 384 | }) 385 | }) 386 | }) 387 | } 388 | 389 | /// Update this RepoState with new snapshot. This will derive the delta. 390 | /// Returns an error in case the new snapshot is not for the next serial in 391 | /// the current session. 392 | pub fn apply(&mut self, new_snapshot: Snapshot) -> Result<(), Error> { 393 | // Cannot have any pending stuff. One delta only! 394 | if self.new_delta.is_some() { 395 | return Err(Error::InvalidDelta); 396 | } 397 | 398 | // Must be the next snapshot for this state. 399 | if new_snapshot.serial != self.serial + 1 || new_snapshot.session != self.session { 400 | return Err(Error::InvalidDelta); 401 | } 402 | 403 | let delta = self.snapshot.to(&new_snapshot)?; 404 | 405 | if !delta.is_empty() { 406 | self.snapshot = new_snapshot; 407 | self.new_delta = Some(delta); 408 | self.serial += 1; 409 | } 410 | 411 | Ok(()) 412 | } 413 | } 414 | 415 | //------------ FileRef ------------------------------------------------------- 416 | 417 | #[derive(Clone, Debug, Eq, PartialEq)] 418 | pub struct FileRef { 419 | uri: HttpsUri, 420 | hash: EncodedHash, 421 | size: usize, 422 | } 423 | 424 | impl FileRef { 425 | pub fn new(uri: HttpsUri, bytes: &Bytes) -> Self { 426 | let hash = EncodedHash::from_content(bytes.as_ref()); 427 | let size = bytes.len(); 428 | 429 | FileRef { uri, hash, size } 430 | } 431 | pub fn uri(&self) -> &HttpsUri { 432 | &self.uri 433 | } 434 | 435 | pub fn hash(&self) -> &EncodedHash { 436 | &self.hash 437 | } 438 | 439 | pub fn size(&self) -> usize { 440 | self.size 441 | } 442 | } 443 | 444 | pub type SnapshotRef = FileRef; 445 | 446 | #[derive(Clone, Debug, Eq, PartialEq)] 447 | pub struct DeltaRef { 448 | serial: u64, 449 | file_ref: FileRef, 450 | } 451 | 452 | impl DeltaRef { 453 | pub fn new(serial: u64, file_ref: FileRef) -> Self { 454 | DeltaRef { serial, file_ref } 455 | } 456 | 457 | pub fn serial(&self) -> u64 { 458 | self.serial 459 | } 460 | 461 | pub fn size(&self) -> usize { 462 | self.file_ref.size() 463 | } 464 | } 465 | 466 | impl AsRef for DeltaRef { 467 | fn as_ref(&self) -> &FileRef { 468 | &self.file_ref 469 | } 470 | } 471 | 472 | //------------ Snapshot ------------------------------------------------------ 473 | 474 | /// A structure to contain the RRDP snapshot data. 475 | #[derive(Clone, Debug, Eq, PartialEq)] 476 | pub struct Snapshot { 477 | session: Uuid, 478 | serial: u64, 479 | current_objects: Vec, 480 | } 481 | 482 | impl Snapshot { 483 | pub fn new(session: Uuid, serial: u64, current_objects: Vec) -> Self { 484 | Snapshot { 485 | session, 486 | serial, 487 | current_objects, 488 | } 489 | } 490 | 491 | pub fn to(&self, new_snapshot: &Snapshot) -> Result { 492 | if self.serial != new_snapshot.serial - 1 || self.session != new_snapshot.session { 493 | return Err(Error::InvalidDelta); 494 | } 495 | 496 | let old_files: HashMap<_, _> = self.current_objects.iter().map(|o| (o.uri(), o)).collect(); 497 | 498 | let mut new_files: HashMap<_, _> = new_snapshot 499 | .current_objects 500 | .iter() 501 | .map(|o| (o.uri(), o)) 502 | .collect(); 503 | 504 | let mut publishes = vec![]; 505 | let mut updates = vec![]; 506 | let mut withdraws = vec![]; 507 | 508 | for (uri, old_file) in old_files.into_iter() { 509 | match new_files.remove(uri) { 510 | Some(new_file) => { 511 | if new_file != old_file { 512 | updates.push(UpdateElement { 513 | uri: uri.clone(), 514 | hash: old_file.hash().clone(), 515 | base64: new_file.base64().clone(), 516 | }) 517 | } 518 | } 519 | None => withdraws.push(WithdrawElement { 520 | uri: uri.clone(), 521 | hash: old_file.hash().clone(), 522 | }), 523 | } 524 | } 525 | 526 | for (uri, new_file) in new_files.into_iter() { 527 | publishes.push(PublishElement { 528 | uri: uri.clone(), 529 | base64: new_file.base64().clone(), 530 | }) 531 | } 532 | 533 | let elements = DeltaElements { 534 | publishes, 535 | updates, 536 | withdraws, 537 | }; 538 | 539 | Ok(Delta { 540 | session: new_snapshot.session, 541 | serial: new_snapshot.serial, 542 | elements, 543 | }) 544 | } 545 | 546 | pub fn len(&self) -> usize { 547 | self.current_objects.len() 548 | } 549 | 550 | pub fn is_empty(&self) -> bool { 551 | self.current_objects.is_empty() 552 | } 553 | 554 | pub fn write_xml(&self) -> Bytes { 555 | Bytes::from(XmlWriter::encode_vec(|w| { 556 | let a = [ 557 | ("xmlns", NS), 558 | ("version", VERSION), 559 | ("session_id", &format!("{}", self.session)), 560 | ("serial", &format!("{}", self.serial)), 561 | ]; 562 | 563 | w.put_element("snapshot", Some(&a), |w| { 564 | for el in &self.current_objects { 565 | let uri = el.uri().to_string(); 566 | let b64 = el.base64().to_string(); 567 | let atr = [("uri", uri.as_ref())]; 568 | w.put_element("publish", Some(&atr), |w| w.put_text(&b64))?; 569 | } 570 | Ok(()) 571 | }) 572 | })) 573 | } 574 | 575 | pub fn from_xml(bytes: Bytes) -> Result { 576 | XmlReader::decode(bytes.as_ref(), |r| { 577 | r.take_named_element("snapshot", |mut a, r| { 578 | let _version = a.take_req("version")?; 579 | let session = a.take_req("session_id")?; 580 | let session = Uuid::from_str(&session)?; 581 | let serial = a.take_req("serial")?; 582 | let serial = u64::from_str(serial.as_str())?; 583 | a.exhausted()?; 584 | 585 | let mut files = vec![]; 586 | while let Some(file) = r.take_opt_element(|t, mut a, r| match t.name.as_ref() { 587 | "publish" => { 588 | let uri = a.take_req("uri")?; 589 | let uri = RsyncUri::from(uri.as_str()); 590 | a.exhausted()?; 591 | 592 | let base64 = r.take_chars()?; 593 | let content = base64::engine::general_purpose::STANDARD.decode(&base64)?; 594 | 595 | Ok(Some(CurrentFile::new(uri, &content))) 596 | } 597 | _ => Err(Error::InvalidXml(format!("Unexpected tag: {}", t.name))), 598 | })? { 599 | files.push(file); 600 | } 601 | 602 | Ok(Snapshot::new(session, serial, files)) 603 | }) 604 | }) 605 | } 606 | } 607 | 608 | //------------ DeltaElements ------------------------------------------------- 609 | 610 | /// Defines the elements for an RRDP delta. 611 | #[derive(Clone, Debug, Eq, PartialEq)] 612 | pub struct DeltaElements { 613 | publishes: Vec, 614 | updates: Vec, 615 | withdraws: Vec, 616 | } 617 | 618 | impl DeltaElements { 619 | pub fn unwrap( 620 | self, 621 | ) -> ( 622 | Vec, 623 | Vec, 624 | Vec, 625 | ) { 626 | (self.publishes, self.updates, self.withdraws) 627 | } 628 | 629 | pub fn len(&self) -> usize { 630 | self.publishes.len() + self.updates.len() + self.withdraws.len() 631 | } 632 | 633 | pub fn is_empty(&self) -> bool { 634 | self.len() == 0 635 | } 636 | 637 | pub fn publishes(&self) -> &Vec { 638 | &self.publishes 639 | } 640 | 641 | pub fn updates(&self) -> &Vec { 642 | &self.updates 643 | } 644 | 645 | pub fn withdraws(&self) -> &Vec { 646 | &self.withdraws 647 | } 648 | } 649 | 650 | //------------ Delta --------------------------------------------------------- 651 | 652 | /// Defines an RRDP delta. 653 | #[derive(Clone, Debug, Eq, PartialEq)] 654 | pub struct Delta { 655 | session: Uuid, 656 | serial: u64, 657 | elements: DeltaElements, 658 | } 659 | 660 | impl Delta { 661 | pub fn new(session: Uuid, serial: u64, elements: DeltaElements) -> Self { 662 | Delta { 663 | session, 664 | serial, 665 | elements, 666 | } 667 | } 668 | 669 | pub fn session(&self) -> &Uuid { 670 | &self.session 671 | } 672 | pub fn serial(&self) -> u64 { 673 | self.serial 674 | } 675 | pub fn elements(&self) -> &DeltaElements { 676 | &self.elements 677 | } 678 | 679 | /// Total number of elements 680 | /// 681 | /// This is a cheap approximation of the size of the delta that can help 682 | /// in determining the choice of how many deltas to include in a 683 | /// notification file. 684 | pub fn len(&self) -> usize { 685 | self.elements.len() 686 | } 687 | 688 | pub fn is_empty(&self) -> bool { 689 | self.elements.is_empty() 690 | } 691 | 692 | pub fn unwrap(self) -> (Uuid, u64, DeltaElements) { 693 | (self.session, self.serial, self.elements) 694 | } 695 | 696 | pub fn write_xml(&self) -> Bytes { 697 | Bytes::from(XmlWriter::encode_vec(|w| { 698 | let a = [ 699 | ("xmlns", NS), 700 | ("version", VERSION), 701 | ("session_id", &format!("{}", self.session)), 702 | ("serial", &format!("{}", self.serial)), 703 | ]; 704 | 705 | w.put_element("delta", Some(&a), |w| { 706 | for el in &self.elements.publishes { 707 | let uri = el.uri.to_string(); 708 | let b64 = el.base64.to_string(); 709 | let atr = [("uri", uri.as_ref())]; 710 | w.put_element("publish", Some(&atr), |w| w.put_text(&b64))?; 711 | } 712 | 713 | for el in &self.elements.updates { 714 | let uri = el.uri.to_string(); 715 | let b64 = el.base64.to_string(); 716 | let hash = el.hash.to_string(); 717 | let atr = [("uri", uri.as_ref()), ("hash", hash.as_ref())]; 718 | w.put_element("publish", Some(&atr), |w| w.put_text(&b64))?; 719 | } 720 | 721 | for el in &self.elements.withdraws { 722 | let uri = el.uri.to_string(); 723 | let hash = el.hash.to_string(); 724 | 725 | let atr = [("uri", uri.as_ref()), ("hash", hash.as_ref())]; 726 | w.put_element("withdraw", Some(&atr), |w| w.empty())?; 727 | } 728 | 729 | Ok(()) 730 | }) 731 | })) 732 | } 733 | } 734 | 735 | //------------ Error --------------------------------------------------------- 736 | #[derive(Debug, Display)] 737 | pub enum Error { 738 | #[display("Invalid XML: {}", _0)] 739 | InvalidXml(String), 740 | 741 | #[display("Invalid delta for current session and serial")] 742 | InvalidDelta, 743 | 744 | #[display("No valid repo state found on disk")] 745 | InvalidRepoState, 746 | } 747 | 748 | impl Error { 749 | fn invalid_xml(e: impl fmt::Display) -> Self { 750 | Error::InvalidXml(e.to_string()) 751 | } 752 | } 753 | 754 | impl From for Error { 755 | fn from(e: XmlReaderErr) -> Self { 756 | Error::invalid_xml(e) 757 | } 758 | } 759 | 760 | impl From for Error { 761 | fn from(e: AttributesError) -> Self { 762 | Error::invalid_xml(e) 763 | } 764 | } 765 | 766 | impl From for Error { 767 | fn from(e: base64::DecodeError) -> Self { 768 | Error::invalid_xml(e) 769 | } 770 | } 771 | 772 | impl From for Error { 773 | fn from(e: ParseIntError) -> Self { 774 | Error::invalid_xml(e) 775 | } 776 | } 777 | 778 | impl From for Error { 779 | fn from(e: uuid::Error) -> Self { 780 | Error::invalid_xml(e) 781 | } 782 | } 783 | 784 | //------------ Tests --------------------------------------------------------- 785 | // 786 | #[cfg(test)] 787 | mod tests { 788 | use super::*; 789 | use crate::rrdp::Snapshot; 790 | use crate::sync; 791 | 792 | const SOURCE_1: &str = "./test-resources/source-1/"; 793 | const SOURCE_2: &str = "./test-resources/source-2/"; 794 | const SOURCE_3: &str = "./test-resources/source-3/"; 795 | 796 | const RSYNC_BASE: &str = "rsync://localhost/repo/"; 797 | const RSYNC_FILE1: &str = "rsync://localhost/repo/file1.txt"; 798 | const RSYNC_FILE3: &str = "rsync://localhost/repo/file3.txt"; 799 | const RSYNC_FILE4: &str = "rsync://localhost/repo/file4.txt"; 800 | 801 | fn snapshot_source_1() -> Snapshot { 802 | let base_dir = PathBuf::from(SOURCE_1); 803 | let rsync_base = RsyncUri::base_uri(RSYNC_BASE).unwrap(); 804 | 805 | let session = Uuid::new_v4(); 806 | let serial = 1; 807 | let files = sync::crawl_disk(&base_dir, &rsync_base).unwrap(); 808 | 809 | Snapshot::new(session, serial, files) 810 | } 811 | 812 | fn snapshot_from_src(session: Uuid, serial: u64, source: &str) -> Snapshot { 813 | let base_dir = PathBuf::from(source); 814 | let rsync_base = RsyncUri::base_uri(RSYNC_BASE).unwrap(); 815 | 816 | let files = sync::crawl_disk(&base_dir, &rsync_base).unwrap(); 817 | 818 | Snapshot::new(session, serial, files) 819 | } 820 | 821 | #[test] 822 | fn save_and_reload_snapshot() { 823 | let snapshot = snapshot_source_1(); 824 | 825 | let xml = snapshot.write_xml(); 826 | let target = PathBuf::from("./test-work/snapshot.xml"); 827 | 828 | sync::save(xml.as_ref(), &target).unwrap(); 829 | 830 | let bytes = sync::read(&target).unwrap(); 831 | let loaded_snapshot = Snapshot::from_xml(bytes).unwrap(); 832 | 833 | assert_eq!(snapshot, loaded_snapshot); 834 | } 835 | 836 | #[test] 837 | fn diff_snapshot() { 838 | let snapshot_1 = snapshot_source_1(); 839 | let snapshot_2 = snapshot_from_src(snapshot_1.session, snapshot_1.serial + 1, SOURCE_2); 840 | 841 | let delta = snapshot_1.to(&snapshot_2).unwrap(); 842 | 843 | assert_eq!(2, delta.serial); 844 | 845 | let elements = delta.elements; 846 | 847 | let (publishes, updates, withdraws) = elements.unwrap(); 848 | 849 | assert_eq!(1, publishes.len()); 850 | assert_eq!( 851 | &RsyncUri::from(RSYNC_FILE4), 852 | publishes.first().unwrap().uri() 853 | ); 854 | 855 | assert_eq!(1, updates.len()); 856 | assert_eq!(&RsyncUri::from(RSYNC_FILE1), updates.first().unwrap().uri()); 857 | 858 | assert_eq!(1, withdraws.len()); 859 | assert_eq!( 860 | &RsyncUri::from(RSYNC_FILE3), 861 | withdraws.first().unwrap().uri() 862 | ); 863 | } 864 | 865 | #[test] 866 | fn save_and_reload_current_state() { 867 | let snapshot_1 = snapshot_source_1(); 868 | 869 | let state = RepoState::new( 870 | snapshot_1, 871 | HttpsUri::from("https://localhost/rrdp/"), 872 | PathBuf::from("./test-work/"), 873 | ); 874 | let target_dir_1 = PathBuf::from(format!("./test-work/{}/1", state.session)); 875 | 876 | state.clone().save(25, true).unwrap(); 877 | 878 | let mut loaded_state = RepoState::reconstitute( 879 | HttpsUri::from("https://localhost/rrdp/"), 880 | PathBuf::from("./test-work/"), 881 | ) 882 | .unwrap(); 883 | 884 | assert_eq!(state, loaded_state); 885 | 886 | let snapshot_2 = snapshot_from_src(loaded_state.session, loaded_state.serial + 1, SOURCE_2); 887 | let target_dir_2 = PathBuf::from(format!("./test-work/{}/2", state.session)); 888 | 889 | loaded_state.apply(snapshot_2).unwrap(); 890 | loaded_state.save(25, true).unwrap(); 891 | 892 | let mut state = RepoState::reconstitute( 893 | HttpsUri::from("https://localhost/rrdp/"), 894 | PathBuf::from("./test-work/"), 895 | ) 896 | .unwrap(); 897 | let target_dir_3 = PathBuf::from(format!("./test-work/{}/3", state.session)); 898 | 899 | let snapshot_3 = snapshot_from_src(state.session, state.serial + 1, SOURCE_3); 900 | state.apply(snapshot_3).unwrap(); 901 | state.save(25, true).unwrap(); 902 | 903 | assert!(!target_dir_1.exists()); // dir 1 should be cleaned up (too much space) 904 | assert!(target_dir_3.exists()); 905 | 906 | // Applying a zero delta should be a no-op, so the new target dir should not exist 907 | // Furthermore, delta 2 should be removed if we limit the max_deltas to 1. I.e. 908 | // we will only have target dir 3 remaining. 909 | let mut state = RepoState::reconstitute( 910 | HttpsUri::from("https://localhost/rrdp/"), 911 | PathBuf::from("./test-work/"), 912 | ) 913 | .unwrap(); 914 | 915 | let target_dir_4 = PathBuf::from(format!("./test-work/{}/4", state.session)); 916 | 917 | let snapshot_4 = snapshot_from_src(state.session, state.serial + 1, SOURCE_3); 918 | state.apply(snapshot_4).unwrap(); 919 | state.save(1, true).unwrap(); 920 | 921 | assert!(!target_dir_2.exists()); 922 | assert!(target_dir_3.exists()); 923 | assert!(!target_dir_4.exists()); 924 | } 925 | } 926 | --------------------------------------------------------------------------------