├── .cargo ├── audit.toml └── config.toml ├── .github ├── install-arm-linkers.yml ├── renovate.json └── workflows │ ├── audit.yml │ ├── ci.yml │ ├── cross-ci.yml │ ├── lint-docs.yml │ ├── nightly.yml │ ├── prebuilt-pr.yml │ ├── release-plz.yml │ ├── release.yml │ └── triage.yml ├── .gitignore ├── .justfile ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── Cross.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── build-dependencies.just ├── cliff.toml ├── committed.toml ├── config └── rustic_scheduler.toml ├── deny.toml ├── dist-workspace.toml ├── dprint.json ├── release-plz.toml ├── src ├── application.rs ├── bin │ └── rustic-scheduler.rs ├── commands.rs ├── commands │ ├── client.rs │ └── server.rs ├── config.rs ├── error.rs ├── lib.rs ├── message.rs ├── prelude.rs └── scheduler.rs ├── templates └── client.stpl ├── tests └── acceptance.rs └── wix └── main.wxs /.cargo/audit.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | ignore = [ 3 | # FIXME!: See https://github.com/RustCrypto/RSA/issues/19#issuecomment-1822995643. 4 | # There is no workaround available yet. 5 | "RUSTSEC-2023-0071", 6 | ] 7 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | rustdocflags = ["--document-private-items"] 3 | # rustflags = "-C target-cpu=native -D warnings" 4 | # incremental = true 5 | 6 | [target.armv7-unknown-linux-gnueabihf] 7 | linker = "arm-linux-gnueabihf-gcc" 8 | 9 | [target.armv7-unknown-linux-musleabihf] 10 | linker = "arm-linux-gnueabihf-gcc" 11 | 12 | [target.aarch64-unknown-linux-gnu] 13 | linker = "aarch64-linux-gnu-gcc" 14 | 15 | [target.aarch64-unknown-linux-musl] 16 | linker = "aarch64-linux-gnu-gcc" 17 | 18 | [target.i686-unknown-linux-gnu] 19 | linker = "i686-linux-gnu-gcc" 20 | 21 | [env] 22 | CC_i686-unknown-linux-gnu = "i686-linux-gnu-gcc" 23 | CC_aarch64_unknown_linux_musl = "aarch64-linux-gnu-gcc" 24 | CC_armv7_unknown_linux_gnueabihf = "arm-linux-gnueabihf-gcc" 25 | CC_armv7_unknown_linux_musleabihf = "arm-linux-gnueabihf-gcc" 26 | -------------------------------------------------------------------------------- /.github/install-arm-linkers.yml: -------------------------------------------------------------------------------- 1 | - name: Install armv7 and aarch64 Linkers 2 | if: runner.os == 'Linux' 3 | run: | 4 | sudo apt install gcc-aarch64-linux-gnu 5 | sudo apt install gcc-arm-none-eabi 6 | -------------------------------------------------------------------------------- /.github/renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["local>rustic-rs/.github:renovate-config"] 4 | } 5 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | 3 | on: 4 | pull_request: 5 | schedule: 6 | # Runs at 00:00 UTC everyday 7 | - cron: "0 0 * * *" 8 | push: 9 | paths: 10 | - "**/Cargo.toml" 11 | - "**/Cargo.lock" 12 | - "crates/**/Cargo.toml" 13 | - "crates/**/Cargo.lock" 14 | merge_group: 15 | types: [checks_requested] 16 | 17 | concurrency: 18 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | audit: 23 | if: ${{ github.repository_owner == 'rustic-rs' }} 24 | name: Run cargo audit 25 | runs-on: ubuntu-latest 26 | steps: 27 | - name: Checkout repository 28 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 29 | # Ensure that the latest version of Cargo is installed 30 | - name: Install Rust toolchain 31 | uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8 # v1 32 | with: 33 | toolchain: stable 34 | - uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2 35 | - uses: rustsec/audit-check@69366f33c96575abad1ee0dba8212993eecbe998 # v2.0.0 36 | with: 37 | token: ${{ secrets.GITHUB_TOKEN }} 38 | 39 | cargo-deny: 40 | if: ${{ github.repository_owner == 'rustic-rs' }} 41 | name: Run cargo-deny 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 45 | 46 | - uses: EmbarkStudios/cargo-deny-action@8371184bd11e21dcf8ac82ebf8c9c9f74ebf7268 # v2 47 | with: 48 | command: check bans licenses sources 49 | 50 | result: 51 | if: ${{ github.repository_owner == 'rustic-rs' }} 52 | name: Result (Audit) 53 | runs-on: ubuntu-latest 54 | needs: 55 | - audit 56 | - cargo-deny 57 | steps: 58 | - name: Mark the job as successful 59 | run: exit 0 60 | if: success() 61 | - name: Mark the job as unsuccessful 62 | run: exit 1 63 | if: "!success()" 64 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - "**/*.md" 7 | push: 8 | branches: 9 | - main 10 | - "renovate/**" 11 | paths-ignore: 12 | - "**/*.md" 13 | schedule: 14 | - cron: "0 0 * * 0" 15 | merge_group: 16 | types: [checks_requested] 17 | 18 | concurrency: 19 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 20 | cancel-in-progress: true 21 | 22 | jobs: 23 | fmt: 24 | name: Rustfmt 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 28 | - name: Install Rust toolchain 29 | uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8 # v1 30 | with: 31 | toolchain: stable 32 | - run: rustup component add rustfmt 33 | - name: Run Cargo Fmt 34 | run: cargo fmt --all -- --check 35 | 36 | clippy: 37 | name: Clippy 38 | runs-on: ubuntu-latest 39 | 40 | steps: 41 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 42 | - name: Install Rust toolchain 43 | uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8 # v1 44 | with: 45 | toolchain: stable 46 | components: clippy 47 | - uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2 48 | - name: Run clippy 49 | run: cargo clippy --all-targets --all-features -- -D warnings 50 | 51 | test: 52 | name: Test 53 | runs-on: ${{ matrix.job.os }} 54 | strategy: 55 | matrix: 56 | rust: [stable] 57 | job: 58 | - os: macos-latest 59 | - os: ubuntu-latest 60 | - os: windows-latest 61 | steps: 62 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 63 | if: github.event_name != 'pull_request' 64 | with: 65 | fetch-depth: 0 66 | 67 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 68 | if: github.event_name == 'pull_request' 69 | with: 70 | ref: ${{ github.event.pull_request.head.sha }} 71 | fetch-depth: 0 72 | 73 | - name: Install Rust toolchain 74 | uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8 # v1 75 | with: 76 | toolchain: stable 77 | - uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2 78 | - name: Run Cargo Test 79 | run: cargo test -r --all-targets --all-features --workspace 80 | 81 | result: 82 | name: Result (CI) 83 | runs-on: ubuntu-latest 84 | needs: 85 | - fmt 86 | - clippy 87 | - test 88 | steps: 89 | - name: Mark the job as successful 90 | run: exit 0 91 | if: success() 92 | - name: Mark the job as unsuccessful 93 | run: exit 1 94 | if: "!success()" 95 | -------------------------------------------------------------------------------- /.github/workflows/cross-ci.yml: -------------------------------------------------------------------------------- 1 | name: Cross CI 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - "**/*.md" 7 | push: 8 | branches: 9 | - main 10 | - "renovate/**" 11 | - "release/**" 12 | paths-ignore: 13 | - "**/*.md" 14 | merge_group: 15 | types: [checks_requested] 16 | 17 | defaults: 18 | run: 19 | shell: bash 20 | 21 | concurrency: 22 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | cross-check: 27 | name: Cross checking ${{ matrix.job.target }} on ${{ matrix.rust }} 28 | runs-on: ${{ matrix.job.os }} 29 | strategy: 30 | fail-fast: false 31 | matrix: 32 | rust: [stable] 33 | job: 34 | - os: windows-latest 35 | os-name: windows 36 | target: x86_64-pc-windows-msvc 37 | architecture: x86_64 38 | use-cross: false 39 | - os: windows-latest 40 | os-name: windows 41 | target: x86_64-pc-windows-gnu 42 | architecture: x86_64 43 | use-cross: false 44 | - os: macos-13 45 | os-name: macos 46 | target: x86_64-apple-darwin 47 | architecture: x86_64 48 | use-cross: false 49 | - os: macos-latest 50 | os-name: macos 51 | target: aarch64-apple-darwin 52 | architecture: arm64 53 | use-cross: true 54 | - os: ubuntu-latest 55 | os-name: linux 56 | target: x86_64-unknown-linux-gnu 57 | architecture: x86_64 58 | use-cross: false 59 | - os: ubuntu-latest 60 | os-name: linux 61 | target: x86_64-unknown-linux-musl 62 | architecture: x86_64 63 | use-cross: false 64 | - os: ubuntu-latest 65 | os-name: linux 66 | target: aarch64-unknown-linux-gnu 67 | architecture: arm64 68 | use-cross: true 69 | - os: ubuntu-latest 70 | os-name: linux 71 | target: aarch64-unknown-linux-musl 72 | architecture: arm64 73 | use-cross: true 74 | - os: ubuntu-latest 75 | os-name: linux 76 | target: i686-unknown-linux-gnu 77 | architecture: i386 78 | use-cross: true 79 | - os: ubuntu-latest 80 | os-name: linux 81 | target: armv7-unknown-linux-gnueabihf 82 | architecture: armv7 83 | use-cross: true 84 | 85 | steps: 86 | - name: Checkout repository 87 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 88 | 89 | - name: Run Cross-CI action 90 | uses: rustic-rs/cross-ci-action@main 91 | with: 92 | toolchain: ${{ matrix.rust }} 93 | target: ${{ matrix.job.target }} 94 | use-cross: ${{ matrix.job.use-cross }} 95 | project-cache-key: "rustic_scheduler" 96 | 97 | result: 98 | name: Result (Cross-CI) 99 | runs-on: ubuntu-latest 100 | needs: cross-check 101 | steps: 102 | - name: Mark the job as successful 103 | run: exit 0 104 | if: success() 105 | - name: Mark the job as unsuccessful 106 | run: exit 1 107 | if: "!success()" 108 | -------------------------------------------------------------------------------- /.github/workflows/lint-docs.yml: -------------------------------------------------------------------------------- 1 | name: Lint Markdown / Toml 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: [main] 7 | merge_group: 8 | types: [checks_requested] 9 | 10 | concurrency: 11 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 12 | cancel-in-progress: true 13 | 14 | jobs: 15 | style: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 19 | 20 | - uses: dprint/check@2f1cf31537886c3bfb05591c031f7744e48ba8a1 # v2.2 21 | 22 | result: 23 | name: Result (Style) 24 | runs-on: ubuntu-latest 25 | needs: 26 | - style 27 | steps: 28 | - name: Mark the job as successful 29 | run: exit 0 30 | if: success() 31 | - name: Mark the job as unsuccessful 32 | run: exit 1 33 | if: "!success()" 34 | -------------------------------------------------------------------------------- /.github/workflows/nightly.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Deployment 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | # “At 00:05.” 7 | # https://crontab.guru/#5_0_*_*_* 8 | - cron: "5 0 * * *" 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | env: 15 | BINARY_NAME: rustic-scheduler 16 | BINARY_NIGHTLY_DIR: rustic_scheduler 17 | 18 | jobs: 19 | publish: 20 | if: ${{ github.repository_owner == 'rustic-rs' && github.ref == 'refs/heads/main' }} 21 | name: Publishing ${{ matrix.job.target }} 22 | runs-on: ${{ matrix.job.os }} 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | rust: [stable] 27 | job: 28 | - os: windows-latest 29 | os-name: windows 30 | target: x86_64-pc-windows-msvc 31 | architecture: x86_64 32 | binary-postfix: ".exe" 33 | use-cross: false 34 | - os: windows-latest 35 | os-name: windows 36 | target: x86_64-pc-windows-gnu 37 | architecture: x86_64 38 | binary-postfix: ".exe" 39 | use-cross: true 40 | - os: macos-13 41 | os-name: macos 42 | target: x86_64-apple-darwin 43 | architecture: x86_64 44 | binary-postfix: "" 45 | use-cross: false 46 | - os: macos-latest 47 | os-name: macos 48 | target: aarch64-apple-darwin 49 | architecture: arm64 50 | binary-postfix: "" 51 | use-cross: false 52 | - os: ubuntu-latest 53 | os-name: linux 54 | target: x86_64-unknown-linux-gnu 55 | architecture: x86_64 56 | binary-postfix: "" 57 | use-cross: false 58 | - os: ubuntu-latest 59 | os-name: linux 60 | target: x86_64-unknown-linux-musl 61 | architecture: x86_64 62 | binary-postfix: "" 63 | use-cross: false 64 | - os: ubuntu-latest 65 | os-name: linux 66 | target: aarch64-unknown-linux-gnu 67 | architecture: arm64 68 | binary-postfix: "" 69 | use-cross: true 70 | - os: ubuntu-latest 71 | os-name: linux 72 | target: i686-unknown-linux-gnu 73 | architecture: i386 74 | binary-postfix: "" 75 | use-cross: true 76 | - os: ubuntu-latest 77 | os-name: linux 78 | target: armv7-unknown-linux-gnueabihf 79 | architecture: armv7 80 | binary-postfix: "" 81 | use-cross: true 82 | 83 | steps: 84 | - name: Checkout repository 85 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 86 | with: 87 | fetch-depth: 0 # fetch all history so that git describe works 88 | - name: Create binary artifact 89 | uses: rustic-rs/create-binary-artifact-action@main # dev 90 | with: 91 | toolchain: ${{ matrix.rust }} 92 | target: ${{ matrix.job.target }} 93 | use-cross: ${{ matrix.job.use-cross }} 94 | describe-tag-suffix: -nightly 95 | binary-postfix: ${{ matrix.job.binary-postfix }} 96 | os: ${{ runner.os }} 97 | binary-name: ${{ env.BINARY_NAME }} 98 | package-secondary-name: nightly-${{ matrix.job.target}} 99 | github-token: ${{ secrets.GITHUB_TOKEN }} 100 | gpg-release-private-key: ${{ secrets.GPG_RELEASE_PRIVATE_KEY }} 101 | gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }} 102 | rsign-release-private-key: ${{ secrets.RSIGN_RELEASE_PRIVATE_KEY }} 103 | rsign-passphrase: ${{ secrets.RSIGN_PASSPHRASE }} 104 | github-ref: ${{ github.ref }} 105 | sign-release: true 106 | hash-release: true 107 | use-project-version: false 108 | 109 | publish-nightly: 110 | if: ${{ github.repository_owner == 'rustic-rs' && github.ref == 'refs/heads/main' }} 111 | name: Publishing nightly builds 112 | needs: publish 113 | runs-on: ubuntu-latest 114 | steps: 115 | - name: Download all workflow run artifacts 116 | uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 117 | - name: Releasing nightly builds 118 | shell: bash 119 | run: | 120 | # set up some directories 121 | WORKING_DIR=$(mktemp -d) 122 | DEST_DIR=$BINARY_NIGHTLY_DIR 123 | 124 | # set up the github deploy key 125 | mkdir -p ~/.ssh 126 | echo "${{ secrets.NIGHTLY_RELEASE_KEY }}" > ~/.ssh/id_ed25519 127 | chmod 600 ~/.ssh/id_ed25519 128 | 129 | # set up git 130 | git config --global user.name "${{ github.actor }}" 131 | git config --global user.email "${{ github.actor }}" 132 | ssh-keyscan -H github.com > ~/.ssh/known_hosts 133 | GIT_SSH='ssh -i ~/.ssh/id_ed25519 -o UserKnownHostsFile=~/.ssh/known_hosts' 134 | 135 | # clone the repo into our working directory 136 | # we use --depth 1 to avoid cloning the entire history 137 | # and only the main branch to avoid cloning all branches 138 | GIT_SSH_COMMAND=$GIT_SSH git clone git@github.com:rustic-rs/nightly.git --branch main --single-branch --depth 1 $WORKING_DIR 139 | 140 | # ensure destination directory exists 141 | mkdir -p $WORKING_DIR/$DEST_DIR 142 | 143 | # do the copy 144 | for i in binary-*; do cp -a $i/* $WORKING_DIR/$DEST_DIR; done 145 | 146 | # create the commit 147 | cd $WORKING_DIR 148 | git add . 149 | git commit -m "${{ github.job }} from https://github.com/${{ github.repository }}/commit/${{ github.sha }}" || echo 150 | GIT_SSH_COMMAND=$GIT_SSH git pull --rebase 151 | GIT_SSH_COMMAND=$GIT_SSH git push 152 | -------------------------------------------------------------------------------- /.github/workflows/prebuilt-pr.yml: -------------------------------------------------------------------------------- 1 | name: Create PR artifacts 2 | 3 | on: 4 | pull_request: 5 | types: [labeled] 6 | branches: 7 | - main 8 | paths-ignore: 9 | - "**/*.md" 10 | - "docs/**/*" 11 | workflow_dispatch: 12 | 13 | env: 14 | BINARY_NAME: rustic-scheduler 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | pr-build: 22 | if: ${{ github.event.label.name == 'S-build' && github.repository_owner == 'rustic-rs' }} 23 | name: Build PR on ${{ matrix.job.target }} 24 | runs-on: ${{ matrix.job.os }} 25 | strategy: 26 | matrix: 27 | rust: [stable] 28 | job: 29 | - os: windows-latest 30 | os-name: windows 31 | target: x86_64-pc-windows-msvc 32 | architecture: x86_64 33 | binary-postfix: ".exe" 34 | use-cross: false 35 | - os: windows-latest 36 | os-name: windows 37 | target: x86_64-pc-windows-gnu 38 | architecture: x86_64 39 | binary-postfix: ".exe" 40 | use-cross: true 41 | - os: macos-13 42 | os-name: macos 43 | target: x86_64-apple-darwin 44 | architecture: x86_64 45 | binary-postfix: "" 46 | use-cross: false 47 | - os: macos-latest 48 | os-name: macos 49 | target: aarch64-apple-darwin 50 | architecture: arm64 51 | binary-postfix: "" 52 | use-cross: false 53 | - os: ubuntu-latest 54 | os-name: linux 55 | target: x86_64-unknown-linux-gnu 56 | architecture: x86_64 57 | binary-postfix: "" 58 | use-cross: false 59 | - os: ubuntu-latest 60 | os-name: linux 61 | target: x86_64-unknown-linux-musl 62 | architecture: x86_64 63 | binary-postfix: "" 64 | use-cross: false 65 | - os: ubuntu-latest 66 | os-name: linux 67 | target: aarch64-unknown-linux-gnu 68 | architecture: arm64 69 | binary-postfix: "" 70 | use-cross: true 71 | - os: ubuntu-latest 72 | os-name: linux 73 | target: i686-unknown-linux-gnu 74 | architecture: i386 75 | binary-postfix: "" 76 | use-cross: true 77 | - os: ubuntu-latest 78 | os-name: linux 79 | target: armv7-unknown-linux-gnueabihf 80 | architecture: armv7 81 | binary-postfix: "" 82 | use-cross: true 83 | 84 | steps: 85 | - name: Checkout repository 86 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 87 | with: 88 | fetch-depth: 0 # fetch all history so that git describe works 89 | - name: Create binary artifact 90 | uses: rustic-rs/create-binary-artifact-action@main # dev 91 | with: 92 | toolchain: ${{ matrix.rust }} 93 | target: ${{ matrix.job.target }} 94 | use-cross: ${{ matrix.job.use-cross }} 95 | describe-tag-suffix: -${{ github.run_id }}-${{ github.run_attempt }} 96 | binary-postfix: ${{ matrix.job.binary-postfix }} 97 | os: ${{ runner.os }} 98 | binary-name: ${{ env.BINARY_NAME }} 99 | package-secondary-name: ${{ matrix.job.target}} 100 | github-token: ${{ secrets.GITHUB_TOKEN }} 101 | github-ref: ${{ github.ref }} 102 | sign-release: false 103 | hash-release: true 104 | use-project-version: false # not being used in rustic_scheduler 105 | 106 | remove-build-label: 107 | name: Remove build label 108 | needs: pr-build 109 | permissions: 110 | contents: read 111 | issues: write 112 | pull-requests: write 113 | runs-on: ubuntu-latest 114 | if: | 115 | always() && 116 | ! contains(needs.*.result, 'skipped') && 117 | github.repository_owner == 'rustic-rs' 118 | steps: 119 | - name: Remove label 120 | env: 121 | GH_TOKEN: ${{ github.token }} 122 | run: | 123 | gh api \ 124 | --method DELETE \ 125 | -H "Accept: application/vnd.github+json" \ 126 | -H "X-GitHub-Api-Version: 2022-11-28" \ 127 | /repos/${{ github.repository }}/issues/${{ github.event.number }}/labels/S-build 128 | -------------------------------------------------------------------------------- /.github/workflows/release-plz.yml: -------------------------------------------------------------------------------- 1 | name: Release-plz 2 | 3 | permissions: 4 | pull-requests: write 5 | contents: write 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | release-plz: 14 | name: Release-plz 15 | if: ${{ github.repository_owner == 'rustic-rs' }} 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Generate GitHub token 19 | uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1 20 | id: generate-token 21 | with: 22 | app-id: ${{ secrets.RELEASE_PLZ_APP_ID }} 23 | private-key: ${{ secrets.RELEASE_PLZ_APP_PRIVATE_KEY }} 24 | - name: Checkout repository 25 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 26 | with: 27 | fetch-depth: 0 28 | token: ${{ steps.generate-token.outputs.token }} 29 | - name: Install Rust toolchain 30 | uses: dtolnay/rust-toolchain@stable 31 | 32 | - name: Run release-plz 33 | uses: MarcoIeni/release-plz-action@301fd6d8c641b97f25b5ade37651a478a5faa7da # v0.5 34 | env: 35 | GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} 36 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ 2 | # 3 | # Copyright 2022-2024, axodotdev 4 | # SPDX-License-Identifier: MIT or Apache-2.0 5 | # 6 | # CI that: 7 | # 8 | # * checks for a Git Tag that looks like a release 9 | # * builds artifacts with dist (archives, installers, hashes) 10 | # * uploads those artifacts to temporary workflow zip 11 | # * on success, uploads the artifacts to a GitHub Release 12 | # 13 | # Note that the GitHub Release will be created with a generated 14 | # title/body based on your changelogs. 15 | 16 | name: Release 17 | permissions: 18 | "attestations": "write" 19 | "contents": "write" 20 | "id-token": "write" 21 | 22 | # This task will run whenever you push a git tag that looks like a version 23 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 24 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 25 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 26 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 27 | # 28 | # If PACKAGE_NAME is specified, then the announcement will be for that 29 | # package (erroring out if it doesn't have the given version or isn't dist-able). 30 | # 31 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 32 | # (dist-able) packages in the workspace with that version (this mode is 33 | # intended for workspaces with only one dist-able package, or with all dist-able 34 | # packages versioned/released in lockstep). 35 | # 36 | # If you push multiple tags at once, separate instances of this workflow will 37 | # spin up, creating an independent announcement for each one. However, GitHub 38 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 39 | # mistake. 40 | # 41 | # If there's a prerelease-style suffix to the version, then the release(s) 42 | # will be marked as a prerelease. 43 | on: 44 | pull_request: 45 | push: 46 | tags: 47 | - '**[0-9]+.[0-9]+.[0-9]+*' 48 | 49 | jobs: 50 | # Run 'dist plan' (or host) to determine what tasks we need to do 51 | plan: 52 | runs-on: "ubuntu-20.04" 53 | outputs: 54 | val: ${{ steps.plan.outputs.manifest }} 55 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 56 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 57 | publishing: ${{ !github.event.pull_request }} 58 | env: 59 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 60 | steps: 61 | - uses: actions/checkout@v4 62 | with: 63 | submodules: recursive 64 | - name: Install dist 65 | # we specify bash to get pipefail; it guards against the `curl` command 66 | # failing. otherwise `sh` won't catch that `curl` returned non-0 67 | shell: bash 68 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.1/cargo-dist-installer.sh | sh" 69 | - name: Cache dist 70 | uses: actions/upload-artifact@v4 71 | with: 72 | name: cargo-dist-cache 73 | path: ~/.cargo/bin/dist 74 | # sure would be cool if github gave us proper conditionals... 75 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 76 | # functionality based on whether this is a pull_request, and whether it's from a fork. 77 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 78 | # but also really annoying to build CI around when it needs secrets to work right.) 79 | - id: plan 80 | run: | 81 | dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 82 | echo "dist ran successfully" 83 | cat plan-dist-manifest.json 84 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 85 | - name: "Upload dist-manifest.json" 86 | uses: actions/upload-artifact@v4 87 | with: 88 | name: artifacts-plan-dist-manifest 89 | path: plan-dist-manifest.json 90 | 91 | # Build and packages all the platform-specific things 92 | build-local-artifacts: 93 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 94 | # Let the initial task tell us to not run (currently very blunt) 95 | needs: 96 | - plan 97 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 98 | strategy: 99 | fail-fast: false 100 | # Target platforms/runners are computed by dist in create-release. 101 | # Each member of the matrix has the following arguments: 102 | # 103 | # - runner: the github runner 104 | # - dist-args: cli flags to pass to dist 105 | # - install-dist: expression to run to install dist on the runner 106 | # 107 | # Typically there will be: 108 | # - 1 "global" task that builds universal installers 109 | # - N "local" tasks that build each platform's binaries and platform-specific installers 110 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 111 | runs-on: ${{ matrix.runner }} 112 | env: 113 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 114 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 115 | steps: 116 | - name: enable windows longpaths 117 | run: | 118 | git config --global core.longpaths true 119 | - uses: actions/checkout@v4 120 | with: 121 | submodules: recursive 122 | - name: "Install armv7 and aarch64 Linkers" 123 | if: "runner.os == 'Linux'" 124 | run: | 125 | sudo apt install gcc-aarch64-linux-gnu 126 | sudo apt install gcc-arm-none-eabi 127 | - uses: swatinem/rust-cache@v2 128 | with: 129 | key: ${{ join(matrix.targets, '-') }} 130 | cache-provider: ${{ matrix.cache_provider }} 131 | - name: Install dist 132 | run: ${{ matrix.install_dist }} 133 | # Get the dist-manifest 134 | - name: Fetch local artifacts 135 | uses: actions/download-artifact@v4 136 | with: 137 | pattern: artifacts-* 138 | path: target/distrib/ 139 | merge-multiple: true 140 | - name: Install dependencies 141 | run: | 142 | ${{ matrix.packages_install }} 143 | - name: Build artifacts 144 | run: | 145 | # Actually do builds and make zips and whatnot 146 | dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 147 | echo "dist ran successfully" 148 | - name: Attest 149 | uses: actions/attest-build-provenance@v1 150 | with: 151 | subject-path: "target/distrib/*${{ join(matrix.targets, ', ') }}*" 152 | - id: cargo-dist 153 | name: Post-build 154 | # We force bash here just because github makes it really hard to get values up 155 | # to "real" actions without writing to env-vars, and writing to env-vars has 156 | # inconsistent syntax between shell and powershell. 157 | shell: bash 158 | run: | 159 | # Parse out what we just built and upload it to scratch storage 160 | echo "paths<> "$GITHUB_OUTPUT" 161 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 162 | echo "EOF" >> "$GITHUB_OUTPUT" 163 | 164 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 165 | - name: "Upload artifacts" 166 | uses: actions/upload-artifact@v4 167 | with: 168 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 169 | path: | 170 | ${{ steps.cargo-dist.outputs.paths }} 171 | ${{ env.BUILD_MANIFEST_NAME }} 172 | 173 | # Build and package all the platform-agnostic(ish) things 174 | build-global-artifacts: 175 | needs: 176 | - plan 177 | - build-local-artifacts 178 | runs-on: "ubuntu-20.04" 179 | env: 180 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 181 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 182 | steps: 183 | - uses: actions/checkout@v4 184 | with: 185 | submodules: recursive 186 | - name: Install cached dist 187 | uses: actions/download-artifact@v4 188 | with: 189 | name: cargo-dist-cache 190 | path: ~/.cargo/bin/ 191 | - run: chmod +x ~/.cargo/bin/dist 192 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 193 | - name: Fetch local artifacts 194 | uses: actions/download-artifact@v4 195 | with: 196 | pattern: artifacts-* 197 | path: target/distrib/ 198 | merge-multiple: true 199 | - id: cargo-dist 200 | shell: bash 201 | run: | 202 | dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 203 | echo "dist ran successfully" 204 | 205 | # Parse out what we just built and upload it to scratch storage 206 | echo "paths<> "$GITHUB_OUTPUT" 207 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 208 | echo "EOF" >> "$GITHUB_OUTPUT" 209 | 210 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 211 | - name: "Upload artifacts" 212 | uses: actions/upload-artifact@v4 213 | with: 214 | name: artifacts-build-global 215 | path: | 216 | ${{ steps.cargo-dist.outputs.paths }} 217 | ${{ env.BUILD_MANIFEST_NAME }} 218 | # Determines if we should publish/announce 219 | host: 220 | needs: 221 | - plan 222 | - build-local-artifacts 223 | - build-global-artifacts 224 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 225 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 226 | env: 227 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 228 | runs-on: "ubuntu-20.04" 229 | outputs: 230 | val: ${{ steps.host.outputs.manifest }} 231 | steps: 232 | - uses: actions/checkout@v4 233 | with: 234 | submodules: recursive 235 | - name: Install cached dist 236 | uses: actions/download-artifact@v4 237 | with: 238 | name: cargo-dist-cache 239 | path: ~/.cargo/bin/ 240 | - run: chmod +x ~/.cargo/bin/dist 241 | # Fetch artifacts from scratch-storage 242 | - name: Fetch artifacts 243 | uses: actions/download-artifact@v4 244 | with: 245 | pattern: artifacts-* 246 | path: target/distrib/ 247 | merge-multiple: true 248 | - id: host 249 | shell: bash 250 | run: | 251 | dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 252 | echo "artifacts uploaded and released successfully" 253 | cat dist-manifest.json 254 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 255 | - name: "Upload dist-manifest.json" 256 | uses: actions/upload-artifact@v4 257 | with: 258 | # Overwrite the previous copy 259 | name: artifacts-dist-manifest 260 | path: dist-manifest.json 261 | # Create a GitHub Release while uploading all files to it 262 | - name: "Download GitHub Artifacts" 263 | uses: actions/download-artifact@v4 264 | with: 265 | pattern: artifacts-* 266 | path: artifacts 267 | merge-multiple: true 268 | - name: Cleanup 269 | run: | 270 | # Remove the granular manifests 271 | rm -f artifacts/*-dist-manifest.json 272 | - name: Create GitHub Release 273 | env: 274 | PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" 275 | ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" 276 | ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" 277 | RELEASE_COMMIT: "${{ github.sha }}" 278 | run: | 279 | # Write and read notes from a file to avoid quoting breaking things 280 | echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt 281 | 282 | gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* 283 | 284 | publish-homebrew-formula: 285 | needs: 286 | - plan 287 | - host 288 | runs-on: "ubuntu-20.04" 289 | env: 290 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 291 | PLAN: ${{ needs.plan.outputs.val }} 292 | GITHUB_USER: "axo bot" 293 | GITHUB_EMAIL: "admin+bot@axo.dev" 294 | if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} 295 | steps: 296 | - uses: actions/checkout@v4 297 | with: 298 | repository: "rustic-rs/homebrew-tap" 299 | token: ${{ secrets.HOMEBREW_TAP_TOKEN }} 300 | # So we have access to the formula 301 | - name: Fetch homebrew formulae 302 | uses: actions/download-artifact@v4 303 | with: 304 | pattern: artifacts-* 305 | path: Formula/ 306 | merge-multiple: true 307 | # This is extra complex because you can make your Formula name not match your app name 308 | # so we need to find releases with a *.rb file, and publish with that filename. 309 | - name: Commit formula files 310 | run: | 311 | git config --global user.name "${GITHUB_USER}" 312 | git config --global user.email "${GITHUB_EMAIL}" 313 | 314 | for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do 315 | filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output) 316 | name=$(echo "$filename" | sed "s/\.rb$//") 317 | version=$(echo "$release" | jq .app_version --raw-output) 318 | 319 | export PATH="/home/linuxbrew/.linuxbrew/bin:$PATH" 320 | brew update 321 | # We avoid reformatting user-provided data such as the app description and homepage. 322 | brew style --except-cops FormulaAudit/Homepage,FormulaAudit/Desc,FormulaAuditStrict --fix "Formula/${filename}" || true 323 | 324 | git add "Formula/${filename}" 325 | git commit -m "${name} ${version}" 326 | done 327 | git push 328 | 329 | announce: 330 | needs: 331 | - plan 332 | - host 333 | - publish-homebrew-formula 334 | # use "always() && ..." to allow us to wait for all publish jobs while 335 | # still allowing individual publish jobs to skip themselves (for prereleases). 336 | # "host" however must run to completion, no skipping allowed! 337 | if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }} 338 | runs-on: "ubuntu-20.04" 339 | env: 340 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 341 | steps: 342 | - uses: actions/checkout@v4 343 | with: 344 | submodules: recursive 345 | -------------------------------------------------------------------------------- /.github/workflows/triage.yml: -------------------------------------------------------------------------------- 1 | on: 2 | issues: 3 | types: 4 | - opened 5 | 6 | jobs: 7 | label_issue: 8 | if: ${{ github.repository_owner == 'rustic-rs' }} 9 | name: Label issue 10 | runs-on: ubuntu-latest 11 | steps: 12 | - env: 13 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 14 | ISSUE_URL: ${{ github.event.issue.html_url }} 15 | run: | 16 | # check if issue doesn't have any labels 17 | if [[ $(gh issue view $ISSUE_URL --json labels -q '.labels | length') -eq 0 ]]; then 18 | # add S-triage label 19 | gh issue edit $ISSUE_URL --add-label "S-triage" 20 | fi 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .vscode 3 | **/*.rs.bk -------------------------------------------------------------------------------- /.justfile: -------------------------------------------------------------------------------- 1 | # 'Just' Configuration 2 | # Loads .env file for variables to be used in 3 | # in this just file 4 | 5 | set dotenv-load := true 6 | 7 | # Ignore recipes that are commented out 8 | 9 | set ignore-comments := true 10 | 11 | # Set shell for Windows OSs: 12 | # If you have PowerShell Core installed and want to use it, 13 | # use `pwsh.exe` instead of `powershell.exe` 14 | 15 | set windows-shell := ["powershell.exe", "-NoLogo", "-Command"] 16 | 17 | # Set shell for non-Windows OSs: 18 | 19 | set shell := ["bash", "-uc"] 20 | 21 | export RUST_BACKTRACE := "1" 22 | export RUST_LOG := "" 23 | export CI := "1" 24 | 25 | build: 26 | cargo build --all-features 27 | cargo build -r --all-features 28 | 29 | b: build 30 | 31 | check: 32 | cargo check --no-default-features 33 | cargo check --all-features 34 | 35 | c: check 36 | 37 | ci: 38 | just loop . dev 39 | 40 | dev: format lint test 41 | 42 | d: dev 43 | 44 | test: check lint 45 | cargo test --all-targets --all-features --workspace 46 | 47 | test-ignored: check lint 48 | cargo test --all-targets --all-features --workspace -- --ignored 49 | 50 | t: test test-ignored 51 | 52 | lint: check 53 | cargo clippy --no-default-features -- -D warnings 54 | cargo clippy --all-targets --all-features -- -D warnings 55 | 56 | 57 | format-dprint: 58 | dprint fmt 59 | 60 | format-cargo: 61 | cargo fmt --all 62 | 63 | format: format-cargo format-dprint 64 | 65 | fmt: format 66 | 67 | rev: 68 | cargo insta review 69 | 70 | inverse-deps crate: 71 | cargo tree -e features -i {{ crate }} 72 | 73 | 74 | loop dir action: 75 | watchexec -w {{ dir }} -- "just {{ action }}" 76 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [unreleased] 6 | 7 | ## [0.2.1](https://github.com/rustic-rs/rustic_scheduler/compare/v0.2.0...v0.2.1) - 2024-11-30 8 | 9 | ### Other 10 | 11 | - add support for i686 and armv7 targets in CI workflows and configuration ([#69](https://github.com/rustic-rs/rustic_scheduler/pull/69)) 12 | 13 | ## [0.2.0](https://github.com/rustic-rs/rustic_scheduler/compare/v0.1.2...v0.2.0) - 2024-11-29 14 | 15 | ### Fixed 16 | 17 | - rename executable and remove unused component reference in main.wxs 18 | 19 | ### Other 20 | 21 | - remove i686-unknown-linux-gnu, armv7-unknown-linux-gnueabihf, and x86_64-unknown-netbsd targets from CI workflows and update target list 22 | - add installation of dependencies for i686-unknown-linux-gnu 23 | - add targets ([#67](https://github.com/rustic-rs/rustic_scheduler/pull/67)) 24 | - update Rust version to 1.74.0 in Cargo.toml to use lints 25 | - Update dist-workspace.toml 26 | - enable GitHub attestations in release workflow and configuration 27 | - add Homebrew formula publishing to release workflow and update installers 28 | - *(deps)* update dependencies 29 | - remove Unicode-DFS-2016 from allow list and deny aws-lc-rs and aws-lc-sys due to build issues 30 | - update included files to include LICENSE-MIT and LICENSE-APACHE 31 | - add rust-cache action to release workflow and streamline target platforms 32 | - reorganize target platforms and add support for aarch64-unknown-linux-gnu 33 | - update target platforms and add dependencies for aarch64 architecture 34 | - *(deps)* update rustic_core to version 0.7.0 and rustic_backend to 0.5.1 35 | - *(deps)* update dependencies and add support for aarch64 architecture 36 | - add advisory ignore for RUSTSEC-2023-0071 due to lack of workaround 37 | - dprint fmt 38 | - *(deps)* update dependency 39 | - add CC0-1.0 to allowed licenses in deny.toml 40 | - update url dependency to version 2.5.4 and add dead code allowance in scheduler 41 | - add workspace linting configurations for Rust and Clippy 42 | - update installation instructions and improve client command usage 43 | - Migrate to abscissa framework (I) 44 | - update cross-compilation comments to include `aws-ls-sys` dependency issues and remove builds 45 | - update dependencies and refactor repository options structure to match rustic 0.6.0 46 | - add .vscode to .gitignore 47 | - add Justfile for build, check, and test automation 48 | - include extra static files in dist workspace configuration 49 | - add installation instructions for default feature on x86_64-unknown-linux-musl 50 | - *(release)* disable git releases, as cargo-dist does it 51 | 52 | ## [0.1.2](https://github.com/rustic-rs/rustic_scheduler/compare/v0.1.1...v0.1.2) - 2024-11-09 53 | 54 | ### Other 55 | 56 | - dprint fmt 57 | - *(release)* add cargo dist packaging 58 | 59 | ## [0.1.1](https://github.com/rustic-rs/rustic_scheduler/compare/v0.1.0...v0.1.1) - 2024-11-09 60 | 61 | ### Fixed 62 | 63 | - *(docs)* update readme 64 | - *(docs)* update readme 65 | - *(deps)* build sha2-asm on windows-gnu 66 | - *(deps)* build sha2-asm on windows-gnu 67 | - *(deps)* explicitly pull in sha2 asm extensions for non-windows targets 68 | - *(deps)* update rust crate chrono to v0.4.38 ([#46](https://github.com/rustic-rs/rustic_scheduler/pull/46)) 69 | - *(deps)* update rust crate axum to v0.7.6 ([#45](https://github.com/rustic-rs/rustic_scheduler/pull/45)) 70 | - *(deps)* update rust crate anyhow to v1.0.89 ([#44](https://github.com/rustic-rs/rustic_scheduler/pull/44)) 71 | - typo in readme ([#37](https://github.com/rustic-rs/rustic_scheduler/pull/37)) 72 | - *(deps)* update rust crate serde_derive to 1.0.194 ([#33](https://github.com/rustic-rs/rustic_scheduler/pull/33)) 73 | - *(deps)* update rust crate tungstenite to 0.21 ([#34](https://github.com/rustic-rs/rustic_scheduler/pull/34)) 74 | - *(deps)* update rust crate axum to 0.7.3 ([#29](https://github.com/rustic-rs/rustic_scheduler/pull/29)) 75 | - *(deps)* update rust crate url to 2.5 ([#28](https://github.com/rustic-rs/rustic_scheduler/pull/28)) 76 | - *(deps)* update rust crate serde_derive to 1.0.193 ([#27](https://github.com/rustic-rs/rustic_scheduler/pull/27)) 77 | - *(ci)* remove unmaintained `actions-rs` ci actions 78 | - *(deps)* update rust crate toml to 0.8.8 ([#25](https://github.com/rustic-rs/rustic_scheduler/pull/25)) 79 | - *(deps)* update rust crate serde_derive to 1.0.192 ([#24](https://github.com/rustic-rs/rustic_scheduler/pull/24)) 80 | - *(deps)* update rust crate clap_derive to 4.4.7 ([#23](https://github.com/rustic-rs/rustic_scheduler/pull/23)) 81 | - Remove redundant clone() 82 | - *(deps)* update rust crate chrono to 0.4.31 ([#16](https://github.com/rustic-rs/rustic_scheduler/pull/16)) 83 | - *(deps)* update rust crate toml to 0.8.0 ([#12](https://github.com/rustic-rs/rustic_scheduler/pull/12)) 84 | - cloning 85 | - build and clippy lints 86 | 87 | ### Other 88 | 89 | - don't build on netbsd again, still fails due to `cannot find -lexecinfo` 90 | - update readme 91 | - *(deps)* update deps ([#60](https://github.com/rustic-rs/rustic_scheduler/pull/60)) 92 | - try fix build problems ([#58](https://github.com/rustic-rs/rustic_scheduler/pull/58)) 93 | - *(deps)* update actions ([#59](https://github.com/rustic-rs/rustic_scheduler/pull/59)) 94 | - use runners according to available images and target triple ([#57](https://github.com/rustic-rs/rustic_scheduler/pull/57)) 95 | - update dprint config ([#56](https://github.com/rustic-rs/rustic_scheduler/pull/56)) 96 | - add triage label to new issues only if no label has been set when creating it ([#55](https://github.com/rustic-rs/rustic_scheduler/pull/55)) 97 | - *(deps)* lock file maintenance rust dependencies ([#54](https://github.com/rustic-rs/rustic_scheduler/pull/54)) 98 | - ignore CHANGELOG.md in dprint formatting 99 | - Update renovate.json 100 | - use release-plz 101 | - *(deps)* upgrade deps 102 | - *(deps)* lockfile maintenance 103 | - update deny.toml 104 | - update deny.toml 105 | - Revert "chore(deps): lock file maintenance" ([#49](https://github.com/rustic-rs/rustic_scheduler/pull/49)) 106 | - *(deps)* lock file maintenance ([#48](https://github.com/rustic-rs/rustic_scheduler/pull/48)) 107 | - *(deps)* update embarkstudios/cargo-deny-action action to v2 ([#47](https://github.com/rustic-rs/rustic_scheduler/pull/47)) 108 | - *(deps)* update taiki-e/install-action digest to 18ab6bd ([#43](https://github.com/rustic-rs/rustic_scheduler/pull/43)) 109 | - *(deps)* update swatinem/rust-cache digest to 23bce25 ([#42](https://github.com/rustic-rs/rustic_scheduler/pull/42)) 110 | - *(deps)* update obi1kenobi/cargo-semver-checks-action digest to 7272cc2 ([#41](https://github.com/rustic-rs/rustic_scheduler/pull/41)) 111 | - *(deps)* update embarkstudios/cargo-deny-action digest to 3f4a782 ([#40](https://github.com/rustic-rs/rustic_scheduler/pull/40)) 112 | - *(deps)* update actions/download-artifact digest to fa0a91b ([#39](https://github.com/rustic-rs/rustic_scheduler/pull/39)) 113 | - *(deps)* update actions/checkout digest to 692973e ([#38](https://github.com/rustic-rs/rustic_scheduler/pull/38)) 114 | - *(deps)* lock file maintenance ([#35](https://github.com/rustic-rs/rustic_scheduler/pull/35)) 115 | - break old ci jobs when new commits are pushed so we don't fill up the queue 116 | - add project-cache-key for better caching in ci 117 | - *(deps)* update taiki-e/install-action digest to 56ab793 ([#32](https://github.com/rustic-rs/rustic_scheduler/pull/32)) 118 | - *(deps)* update taiki-e/install-action digest to a9ad291 ([#31](https://github.com/rustic-rs/rustic_scheduler/pull/31)) 119 | - *(deps)* update actions/download-artifact action to v4 ([#30](https://github.com/rustic-rs/rustic_scheduler/pull/30)) 120 | - dprint fmt 121 | - automerge lockfile maintenance 122 | - activate automerge for github action digest update 123 | - activate automerge for github action digest update 124 | - *(fmt)* upgrade dprint config 125 | - *(deps)* update taiki-e/install-action digest to d211c4b ([#26](https://github.com/rustic-rs/rustic_scheduler/pull/26)) 126 | - netbsd nightly builds fail due to missing execinfo, so we don't build on it for now 127 | - update rustsec/audit-check 128 | - update taiki-e/install-action 129 | - update dtolnay/rust-toolchain 130 | - Run actions that need secrets.GITHUB_TOKEN only on rustic-rs org 131 | - lockfile maintenance 132 | - *(deps)* update embarkstudios/cargo-deny-action digest to 1e59595 ([#22](https://github.com/rustic-rs/rustic_scheduler/pull/22)) 133 | - *(deps)* update taiki-e/install-action digest to 4d85042 ([#21](https://github.com/rustic-rs/rustic_scheduler/pull/21)) 134 | - *(deps)* update actions/checkout digest to b4ffde6 ([#20](https://github.com/rustic-rs/rustic_scheduler/pull/20)) 135 | - add results to ci 136 | - update dprint plugins 137 | - compile dependencies with optimizations in dev mode 138 | - Add client/ site for client statistics ([#19](https://github.com/rustic-rs/rustic_scheduler/pull/19)) 139 | - Update README.md 140 | - *(readme)* remove note about rustic_core not being published 141 | - add x86_64-pc-windows-gnu target 142 | - *(deps)* upgrade deps 143 | - dprint fmt 144 | - update cross ci 145 | - *(cargo)* remove special os-dependent linker/compiler settings 146 | - *(manifest)* use rustic_core from crates.io 147 | - sign binaries using rsign2 as well 148 | - add contributing 149 | - fix list indent 150 | - rewrite contributing remark 151 | - relink to new image location 152 | - *(deps)* update dependencies 153 | - *(deps)* update actions/checkout action to v4 ([#18](https://github.com/rustic-rs/rustic_scheduler/pull/18)) 154 | - *(deps)* pin dependencies ([#17](https://github.com/rustic-rs/rustic_scheduler/pull/17)) 155 | - fmt 156 | - add cargo deny 157 | - *(deps)* update taiki-e/install-action digest to de0d48b ([#15](https://github.com/rustic-rs/rustic_scheduler/pull/15)) 158 | - add merge queue checks 159 | - *(deps)* update actions/checkout action to v4 ([#13](https://github.com/rustic-rs/rustic_scheduler/pull/13)) 160 | - *(deps)* update taiki-e/install-action action to v2 ([#14](https://github.com/rustic-rs/rustic_scheduler/pull/14)) 161 | - *(deps)* update swatinem/rust-cache digest to a95ba19 ([#11](https://github.com/rustic-rs/rustic_scheduler/pull/11)) 162 | - *(deps)* pin dependencies ([#10](https://github.com/rustic-rs/rustic_scheduler/pull/10)) 163 | - run workflow on renovate branches 164 | - update changelog 165 | - run release checks also on release subbranches 166 | - add triaging of issues 167 | - run git-cliff with latest tag during release 168 | - add dev tooling 169 | - add changelog generation 170 | - fix woggly github action comparison 171 | - use bash substring comparison to determine package name from branch 172 | - set right package 173 | - fix github refs 174 | - decrease build times on windows 175 | - fix workflow name for create-binary-artifact action, and check breaking changes package dependent 176 | - *(changelog)* add generated changelog 177 | - add release CD 178 | - fix comment being wrongly attributed 179 | - update ci to reflect changes and optimizations from rustic_server 180 | - remove lint from ci workflow and keep it separate, replace underscore in workflow file 181 | - add comment about shallow clone 182 | - declutter and reorganize 183 | - add signature and shallow clones to nightly 184 | - update header 185 | - add link to nightly downloads in documentation 186 | - *(readme)* add link to binaries and badge 187 | - nightly builds 188 | - refactor to library and client + server binaries 189 | - fmt 190 | - fix binary builds 191 | - *(readme)* rewrite comment for rustic_core, uses git dependency now 192 | - *(deps)* use workspace dependency on git repo 193 | - add rustic to artifact build 194 | - pull in rustic manually for now 195 | - *(deps)* update dependencies 196 | - add licenses, fix manifest 197 | - fmt 198 | - add ci 199 | - Update rustic_scheduler.toml 200 | - Update rustic_scheduler.toml 201 | - initial commit 202 | 203 | ### Bug Fixes 204 | 205 | - Build and clippy lints 206 | - Cloning 207 | 208 | ### Documentation 209 | 210 | - Add licenses, fix manifest 211 | - Rewrite comment for rustic_core, uses git dependency now 212 | - Add link to binaries and badge 213 | - Add link to nightly downloads in documentation 214 | - Add generated changelog 215 | 216 | ### Miscellaneous Tasks 217 | 218 | - Add ci 219 | - Pull in rustic manually for now 220 | - Add rustic to artifact build 221 | - Fix binary builds 222 | - Nightly builds 223 | - Update header 224 | - Add signature and shallow clones to nightly 225 | - Declutter and reorganize 226 | - Add comment about shallow clone 227 | - Remove lint from ci workflow and keep it separate, replace underscore in 228 | workflow file 229 | - Update ci to reflect changes and optimizations from rustic_server 230 | - Fix comment being wrongly attributed 231 | - Add release CD 232 | - Fix workflow name for create-binary-artifact action, and check breaking 233 | changes package dependent 234 | - Decrease build times on windows 235 | - Fix github refs 236 | - Set right package 237 | - Use bash substring comparison to determine package name from branch 238 | - Fix woggly github action comparison 239 | - Add changelog generation 240 | - Add dev tooling 241 | - Run git-cliff with latest tag during release 242 | - Add triaging of issues 243 | - Run release checks also on release subbranches 244 | 245 | ### Refactor 246 | 247 | - Refactor to library and client + server binaries 248 | 249 | 250 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to `rustic_scheduler` 2 | 3 | Thank you for your interest in contributing to `rustic_scheduler`! 4 | 5 | We appreciate your help in making this project better. 6 | 7 | Please read the 8 | [contribution guide](https://rustic.cli.rs/docs/contributing-to-rustic.html) to 9 | get started. 10 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rustic_scheduler" 3 | version = "0.2.1" 4 | authors = ["the rustic-rs team"] 5 | categories = ["command-line-utilities"] 6 | edition = "2021" 7 | homepage = "https://rustic.cli.rs/" 8 | keywords = [ 9 | "backup", 10 | "restic", 11 | "cli", 12 | "scheduler", 13 | ] 14 | license = "Apache-2.0 OR MIT" 15 | repository = "https://github.com/rustic-rs/rustic_scheduler" 16 | rust-version = "1.74.0" 17 | description = """ 18 | rustic scheduler - a client/server application to schedule regular backups on 19 | many clients to one identical repository controlled by a central scheduling 20 | server. 21 | """ 22 | 23 | [package.metadata.wix] 24 | upgrade-guid = "C23C558D-97A1-454A-8B23-DA1368EE51A0" 25 | path-guid = "D3BCB70C-354E-405C-A25E-431453E4CB58" 26 | license = false 27 | eula = false 28 | 29 | [dependencies] 30 | abscissa_tokio = "0.8.0" 31 | anyhow = "1" 32 | axum = { version = "0.7.9", features = ["ws"] } 33 | chrono = "0.4.38" 34 | clap = "4" 35 | clap_derive = "4.5.18" 36 | cron = "0.13.0" 37 | env_logger = "0.11" 38 | gethostname = "0.5.0" 39 | log = "0.4.22" 40 | rustic_backend = { version = "0.5.2", features = ["merge"] } 41 | rustic_core = "0.7.1" 42 | sailfish = "0.9.0" 43 | serde = "1" 44 | serde_derive = "1" 45 | serde_json = "1" 46 | serde_with = "3" 47 | thiserror = "2" 48 | tokio = { version = "1", features = ["full"] } 49 | toml = "0.8.19" 50 | tungstenite = "0.24.0" 51 | url = "2.5" 52 | 53 | [target.'cfg(not(windows))'.dependencies] 54 | sha2 = { version = "0.10.8", features = ["asm"] } 55 | 56 | [target.'cfg(windows)'.dependencies] 57 | # unfortunately, the asm extensions do not build on Windows, see https://github.com/RustCrypto/asm-hashes/issues/17 58 | # and https://github.com/RustCrypto/asm-hashes/pull/issues/78 59 | sha2 = "0.10.8" 60 | 61 | [dependencies.abscissa_core] 62 | version = "0.8.1" 63 | # optional: use `gimli` to capture backtraces 64 | # see https://github.com/rust-lang/backtrace-rs/issues/189 65 | # features = ["gimli-backtrace"] 66 | 67 | [dev-dependencies] 68 | abscissa_core = { version = "0.8.1", features = ["testing"] } 69 | once_cell = "1.20" 70 | 71 | # see: https://nnethercote.github.io/perf-book/build-configuration.html 72 | [profile.dev] 73 | opt-level = 0 74 | debug = true 75 | rpath = false 76 | lto = false 77 | debug-assertions = true 78 | codegen-units = 4 79 | 80 | # compile dependencies with optimizations in dev mode 81 | # see: https://doc.rust-lang.org/stable/cargo/reference/profiles.html#overrides 82 | [profile.dev.package."*"] 83 | opt-level = 3 84 | debug = true 85 | 86 | [profile.release] 87 | opt-level = 3 88 | debug = false # true for profiling 89 | rpath = false 90 | lto = "fat" 91 | debug-assertions = false 92 | codegen-units = 1 93 | strip = true 94 | panic = "abort" 95 | 96 | [profile.test] 97 | opt-level = 1 98 | debug = true 99 | rpath = false 100 | lto = false 101 | debug-assertions = true 102 | codegen-units = 4 103 | 104 | [profile.bench] 105 | opt-level = 3 106 | debug = true # true for profiling 107 | rpath = false 108 | lto = true 109 | debug-assertions = false 110 | codegen-units = 1 111 | 112 | # The profile that 'dist' will build with 113 | [profile.dist] 114 | inherits = "release" 115 | lto = "thin" 116 | 117 | [workspace.lints.rust] 118 | unsafe_code = "forbid" 119 | missing_docs = "warn" 120 | rust_2018_idioms = { level = "warn", priority = -1 } 121 | trivial_casts = "warn" 122 | unused_lifetimes = "warn" 123 | unused_qualifications = "warn" 124 | bad_style = "warn" 125 | dead_code = "allow" # TODO: "warn" 126 | improper_ctypes = "warn" 127 | missing_copy_implementations = "warn" 128 | missing_debug_implementations = "warn" 129 | non_shorthand_field_patterns = "warn" 130 | no_mangle_generic_items = "warn" 131 | overflowing_literals = "warn" 132 | path_statements = "warn" 133 | patterns_in_fns_without_body = "warn" 134 | trivial_numeric_casts = "warn" 135 | unused_results = "warn" 136 | unused_extern_crates = "warn" 137 | unused_import_braces = "warn" 138 | unconditional_recursion = "warn" 139 | unused = { level = "warn", priority = -1 } 140 | unused_allocation = "warn" 141 | unused_comparisons = "warn" 142 | unused_parens = "warn" 143 | while_true = "warn" 144 | unreachable_pub = "allow" 145 | non_local_definitions = "allow" 146 | 147 | [workspace.lints.clippy] 148 | redundant_pub_crate = "allow" 149 | pedantic = { level = "warn", priority = -1 } 150 | nursery = { level = "warn", priority = -1 } 151 | # expect_used = "warn" # TODO! 152 | # unwrap_used = "warn" # TODO! 153 | enum_glob_use = "warn" 154 | correctness = { level = "warn", priority = -1 } 155 | suspicious = { level = "warn", priority = -1 } 156 | complexity = { level = "warn", priority = -1 } 157 | perf = { level = "warn", priority = -1 } 158 | cast_lossless = "warn" 159 | default_trait_access = "warn" 160 | doc_markdown = "warn" 161 | manual_string_new = "warn" 162 | match_same_arms = "warn" 163 | semicolon_if_nothing_returned = "warn" 164 | trivially_copy_pass_by_ref = "warn" 165 | module_name_repetitions = "allow" 166 | # TODO: Remove when Windows support landed 167 | # mostly Windows-related functionality is missing `const` 168 | # as it's only OK(()), but doesn't make it reasonable to 169 | # have a breaking change in the future. They won't be const. 170 | missing_const_for_fn = "allow" 171 | needless_raw_string_hashes = "allow" 172 | 173 | [workspace.lints.rustdoc] 174 | # We run rustdoc with `--document-private-items` so we can document private items 175 | private_intra_doc_links = "allow" 176 | -------------------------------------------------------------------------------- /Cross.toml: -------------------------------------------------------------------------------- 1 | [target.i686-unknown-linux-gnu] 2 | image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:edge" 3 | pre-build = [ 4 | "dpkg --add-architecture $CROSS_DEB_ARCH", 5 | "apt-get update && apt-get --assume-yes install gcc-multilib-i686-linux-gnu gcc-i686-linux-gnu", 6 | ] 7 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2022 Alexander Weiss 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 |

centrally schedule rustic backups

5 |

6 | 7 | 8 | 9 | 10 |

11 |

12 | 13 |

14 | 15 | ## About 16 | 17 | rustic scheduler is a client/server application to schedule regular backups on 18 | many clients to one identical repository controlled by a central scheduling 19 | server. 20 | 21 | It allows to define client groups which are all backed up the same way. 22 | 23 | **Note**: rustic scheduler is in an early development stage. 24 | 25 | ## Contact 26 | 27 | | Contact | Where? | 28 | | ------------- | --------------------------------------------------------------------------------------------- | 29 | | Issue Tracker | [GitHub Issues](https://github.com/rustic-rs/rustic_scheduler/issues) | 30 | | Discord | [![Discord](https://dcbadge.vercel.app/api/server/WRUWENZnzQ)](https://discord.gg/WRUWENZnzQ) | 31 | | Discussions | [GitHub Discussions](https://github.com/rustic-rs/rustic/discussions) | 32 | 33 | ### Installation 34 | 35 | Copy the `rustic-scheduler` binary to your backup schedule server and to all 36 | your clients. You can download the latest version from the 37 | [releases page](https://github.com/rustic-rs/rustic_scheduler/releases) 38 | 39 | ## Getting started 40 | 41 | - Create a config file `./config/rustic_scheduler.toml` on your backup schedule 42 | server (example config is available in the `config/` dir) 43 | 44 | - Run the `rustic-scheduler` binary on your server in the dir containing the 45 | config. 46 | 47 | - On each client, run `rustic-scheduler client --url `, where `` is 48 | the websocket address to connect, e.g. 49 | `rustic-scheduler client --url ws://server.localdomain:3012/ws`. 50 | 51 | - Backups on your clients are automatically started based on the configured 52 | schedule(s). 53 | 54 | - Statistics for a specific clients are available under `/client/%client`, e.g. 55 | `http://server.localdomain:3012/client/my_server1`. 56 | 57 | ## Contributing 58 | 59 | Tried rustic-scheduler and not satisfied? Don't just walk away! You can help: 60 | 61 | - You can report issues or suggest new features on our 62 | [Discord server](https://discord.gg/WRUWENZnzQ) or using 63 | [Github Issues](https://github.com/rustic-rs/rustic_scheduler/issues/new/choose)! 64 | 65 | Do you know how to code or got an idea for an improvement? Don't keep it to 66 | yourself! 67 | 68 | - Contribute fixes or new features via a pull requests! 69 | 70 | Please make sure, that you read the 71 | [contribution guide](https://rustic.cli.rs/docs/contributing-to-rustic.html). 72 | 73 | ## License 74 | 75 | Licensed under either of: 76 | 77 | - [Apache License, Version 2.0](./LICENSE-APACHE) 78 | - [MIT license](./LICENSE-MIT) 79 | 80 | at your option. 81 | -------------------------------------------------------------------------------- /build-dependencies.just: -------------------------------------------------------------------------------- 1 | ### DEFAULT ### 2 | 3 | # Install dependencies for the default feature on x86_64-unknown-linux-musl 4 | install-default-x86_64-unknown-linux-musl: 5 | sudo apt-get update 6 | sudo apt-get install -y musl-tools 7 | 8 | # Install dependencies for the default feature on aarch64-unknown-linux-musl 9 | install-default-aarch64-unknown-linux-musl: 10 | sudo apt-get update 11 | sudo apt-get install -y musl-tools 12 | 13 | # Install dependencies for the default feature on i686-unknown-linux-gnu 14 | install-default-i686-unknown-linux-gnu: 15 | sudo apt-get update 16 | sudo apt-get install -y gcc-multilib-i686-linux-gnu gcc-i686-linux-gnu 17 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | All notable changes to this project will be documented in this file.\n 13 | """ 14 | # template for the changelog body 15 | # https://tera.netlify.app/docs 16 | body = """ 17 | {% if version %}\ 18 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 19 | {% else %}\ 20 | ## [unreleased] 21 | {% endif %}\ 22 | {% for group, commits in commits | group_by(attribute="group") %} 23 | ### {{ group | upper_first }} 24 | {% for commit in commits %} 25 | - {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\ 26 | {% endfor %} 27 | {% endfor %}\n 28 | """ 29 | # remove the leading and trailing whitespace from the template 30 | trim = true 31 | # changelog footer 32 | footer = """ 33 | 34 | """ 35 | # postprocessors 36 | postprocessors = [ 37 | { pattern = '', replace = "https://github.com/rustic-rs/rustic_scheduler" }, 38 | ] 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))" }, # replace issue numbers 49 | ] 50 | # regex for parsing and grouping commits 51 | commit_parsers = [ 52 | { message = "^feat", group = "Features" }, 53 | { message = "^fix", group = "Bug Fixes" }, 54 | { message = "^doc", group = "Documentation" }, 55 | { message = "^perf", group = "Performance" }, 56 | { message = "^refactor", group = "Refactor" }, 57 | { message = "^style", group = "Styling", skip = true }, 58 | { message = "^test", group = "Testing" }, 59 | { message = "^chore\\(release\\): prepare for", skip = true }, 60 | { message = "^chore\\(deps\\)", skip = true }, 61 | { message = "^chore\\(pr\\)", skip = true }, 62 | { message = "^chore\\(pull\\)", skip = true }, 63 | { message = "^chore|ci", group = "Miscellaneous Tasks" }, 64 | { body = ".*security", group = "Security" }, 65 | { message = "^revert", group = "Revert" }, 66 | ] 67 | # protect breaking changes from being skipped due to matching a skipping commit_parser 68 | protect_breaking_commits = false 69 | # filter out the commits that are not matched by commit parsers 70 | filter_commits = false 71 | # glob pattern for matching git tags 72 | tag_pattern = "[0-9]*" 73 | # regex for skipping tags 74 | skip_tags = "v0.1.0-beta.1" 75 | # regex for ignoring tags 76 | ignore_tags = "" 77 | # sort the tags topologically 78 | topo_order = false 79 | # sort the commits inside sections by oldest/newest order 80 | sort_commits = "oldest" 81 | # limit the number of commits included in the changelog. 82 | # limit_commits = 42 83 | -------------------------------------------------------------------------------- /committed.toml: -------------------------------------------------------------------------------- 1 | subject_length = 50 2 | subject_capitalized = false 3 | subject_not_punctuated = true 4 | imperative_subject = true 5 | no_fixup = true 6 | no_wip = true 7 | hard_line_length = 0 8 | line_length = 80 9 | style = "none" 10 | allowed_types = [ 11 | "fix", 12 | "feat", 13 | "chore", 14 | "docs", 15 | "style", 16 | "refactor", 17 | "perf", 18 | "test", 19 | ] 20 | merge_commit = true 21 | -------------------------------------------------------------------------------- /config/rustic_scheduler.toml: -------------------------------------------------------------------------------- 1 | [global] 2 | address = "127.0.0.1:3012" 3 | # log-level = "debug" # not yet implemented 4 | # log-file = "/log/rustic.log" # not yet implemented 5 | 6 | # repository options: Note that all clients must be able to access this repository! 7 | [repository] 8 | repository = "rest:http://storage-server/repo" 9 | password = "test" 10 | 11 | # You can define as many clientgroups as you like 12 | [clientgroup.myservers] 13 | clients = ["my_server1", "my_server2", "my_server3"] 14 | 15 | # Define sources, schedules and backup options to be used for all machines in the clientgroup 16 | [[clientgroup.myservers.sources]] 17 | source = "/data/dir" 18 | schedule = "custom" 19 | options = "default" 20 | 21 | [[clientgroup.myservers.sources]] 22 | source = "/home/global" 23 | schedule = "daily" 24 | options = "global" 25 | 26 | # Define the scheldules used for the sources - using crontab schedules 27 | [schedules] 28 | daily = "0 0 0 * * * *" 29 | custom = "5,25,45,55 * * * * * *" # Test schedule which backups 4x per minute 30 | custom2 = "10,45 * * * * * *" # Test schedule which backups 2x per minute 31 | 32 | # Define the options used for the sources 33 | [options.default] 34 | # empty for default options 35 | 36 | [options.global] 37 | git-ignore = true 38 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # This template contains all of the possible sections and their default values 2 | 3 | # Note that all fields that take a lint level have these possible values: 4 | # * deny - An error will be produced and the check will fail 5 | # * warn - A warning will be produced, but the check will not fail 6 | # * allow - No warning or error will be produced, though in some cases a note 7 | # will be 8 | 9 | # The values provided in this template are the default values that will be used 10 | # when any section or field is not specified in your own configuration 11 | 12 | # Root options 13 | 14 | # The graph table configures how the dependency graph is constructed and thus 15 | # which crates the checks are performed against 16 | [graph] 17 | # If 1 or more target triples (and optionally, target_features) are specified, 18 | # only the specified targets will be checked when running `cargo deny check`. 19 | # This means, if a particular package is only ever used as a target specific 20 | # dependency, such as, for example, the `nix` crate only being used via the 21 | # `target_family = "unix"` configuration, that only having windows targets in 22 | # this list would mean the nix crate, as well as any of its exclusive 23 | # dependencies not shared by any other crates, would be ignored, as the target 24 | # list here is effectively saying which targets you are building for. 25 | targets = [ 26 | # The triple can be any string, but only the target triples built in to 27 | # rustc (as of 1.40) can be checked against actual config expressions 28 | # "x86_64-unknown-linux-musl", 29 | # You can also specify which target_features you promise are enabled for a 30 | # particular target. target_features are currently not validated against 31 | # the actual valid features supported by the target architecture. 32 | # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, 33 | ] 34 | # When creating the dependency graph used as the source of truth when checks are 35 | # executed, this field can be used to prune crates from the graph, removing them 36 | # from the view of cargo-deny. This is an extremely heavy hammer, as if a crate 37 | # is pruned from the graph, all of its dependencies will also be pruned unless 38 | # they are connected to another crate in the graph that hasn't been pruned, 39 | # so it should be used with care. The identifiers are [Package ID Specifications] 40 | # (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) 41 | # exclude = [] 42 | # If true, metadata will be collected with `--all-features`. Note that this can't 43 | # be toggled off if true, if you want to conditionally enable `--all-features` it 44 | # is recommended to pass `--all-features` on the cmd line instead 45 | all-features = true 46 | # If true, metadata will be collected with `--no-default-features`. The same 47 | # caveat with `all-features` applies 48 | no-default-features = false 49 | # If set, these feature will be enabled when collecting metadata. If `--features` 50 | # is specified on the cmd line they will take precedence over this option. 51 | # features = [] 52 | 53 | # The output table provides options for how/if diagnostics are outputted 54 | [output] 55 | # When outputting inclusion graphs in diagnostics that include features, this 56 | # option can be used to specify the depth at which feature edges will be added. 57 | # This option is included since the graphs can be quite large and the addition 58 | # of features from the crate(s) to all of the graph roots can be far too verbose. 59 | # This option can be overridden via `--feature-depth` on the cmd line 60 | feature-depth = 1 61 | 62 | # This section is considered when running `cargo deny check advisories` 63 | # More documentation for the advisories section can be found here: 64 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 65 | [advisories] 66 | # The path where the advisory databases are cloned/fetched into 67 | # db-path = "$CARGO_HOME/advisory-dbs" 68 | # The url(s) of the advisory databases to use 69 | # db-urls = ["https://github.com/rustsec/advisory-db"] 70 | # A list of advisory IDs to ignore. Note that ignored advisories will still 71 | # output a note when they are encountered. 72 | ignore = [ 73 | # FIXME!: See https://github.com/RustCrypto/RSA/issues/19#issuecomment-1822995643. 74 | # There is no workaround available yet. 75 | "RUSTSEC-2023-0071", 76 | # { id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, 77 | # "a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish 78 | # { crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, 79 | ] 80 | # If this is true, then cargo deny will use the git executable to fetch advisory database. 81 | # If this is false, then it uses a built-in git library. 82 | # Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. 83 | # See Git Authentication for more information about setting up git authentication. 84 | # git-fetch-with-cli = true 85 | 86 | # This section is considered when running `cargo deny check licenses` 87 | # More documentation for the licenses section can be found here: 88 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 89 | [licenses] 90 | # List of explicitly allowed licenses 91 | # See https://spdx.org/licenses/ for list of possible licenses 92 | # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. 93 | allow = [ 94 | "MIT", 95 | "Apache-2.0", 96 | "Apache-2.0 WITH LLVM-exception", 97 | "ISC", 98 | "BSD-3-Clause", 99 | "Unicode-3.0", 100 | "OpenSSL", 101 | "MPL-2.0", 102 | "CC0-1.0", 103 | ] 104 | # The confidence threshold for detecting a license from license text. 105 | # The higher the value, the more closely the license text must be to the 106 | # canonical license text of a valid SPDX license file. 107 | # [possible values: any between 0.0 and 1.0]. 108 | confidence-threshold = 0.8 109 | # Allow 1 or more licenses on a per-crate basis, so that particular licenses 110 | # aren't accepted for every possible crate as with the normal allow list 111 | exceptions = [ 112 | # Each entry is the crate and version constraint, and its specific allow 113 | # list 114 | # { allow = ["Zlib"], crate = "adler32" }, 115 | ] 116 | 117 | # Some crates don't have (easily) machine readable licensing information, 118 | # adding a clarification entry for it allows you to manually specify the 119 | # licensing information 120 | [[licenses.clarify]] 121 | # The package spec the clarification applies to 122 | crate = "ring" 123 | # The SPDX expression for the license requirements of the crate 124 | expression = "MIT AND ISC AND OpenSSL" 125 | # One or more files in the crate's source used as the "source of truth" for 126 | # the license expression. If the contents match, the clarification will be used 127 | # when running the license check, otherwise the clarification will be ignored 128 | # and the crate will be checked normally, which may produce warnings or errors 129 | # depending on the rest of your configuration 130 | license-files = [ 131 | # Each entry is a crate relative path, and the (opaque) hash of its contents 132 | { path = "LICENSE", hash = 0xbd0eed23 }, 133 | ] 134 | 135 | [licenses.private] 136 | # If true, ignores workspace crates that aren't published, or are only 137 | # published to private registries. 138 | # To see how to mark a crate as unpublished (to the official registry), 139 | # visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. 140 | ignore = false 141 | # One or more private registries that you might publish crates to, if a crate 142 | # is only published to private registries, and ignore is true, the crate will 143 | # not have its license(s) checked 144 | registries = [ 145 | # "https://sekretz.com/registry 146 | ] 147 | 148 | # This section is considered when running `cargo deny check bans`. 149 | # More documentation about the 'bans' section can be found here: 150 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 151 | [bans] 152 | # Lint level for when multiple versions of the same crate are detected 153 | multiple-versions = "allow" 154 | # Lint level for when a crate version requirement is `*` 155 | wildcards = "allow" 156 | # The graph highlighting used when creating dotgraphs for crates 157 | # with multiple versions 158 | # * lowest-version - The path to the lowest versioned duplicate is highlighted 159 | # * simplest-path - The path to the version with the fewest edges is highlighted 160 | # * all - Both lowest-version and simplest-path are used 161 | highlight = "all" 162 | # The default lint level for `default` features for crates that are members of 163 | # the workspace that is being checked. This can be overridden by allowing/denying 164 | # `default` on a crate-by-crate basis if desired. 165 | workspace-default-features = "allow" 166 | # The default lint level for `default` features for external crates that are not 167 | # members of the workspace. This can be overridden by allowing/denying `default` 168 | # on a crate-by-crate basis if desired. 169 | external-default-features = "allow" 170 | # List of crates that are allowed. Use with care! 171 | allow = [ 172 | # "ansi_term@0.11.0", 173 | # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, 174 | ] 175 | # List of crates to deny 176 | deny = [ 177 | { crate = "aws-lc-rs", reason = "this crate introduces exorbitant build effort and breaks cross-compilation" }, 178 | { crate = "aws-lc-sys", reason = "this crate introduces exorbitant build effort and breaks cross-compilation" }, 179 | ] 180 | 181 | # List of features to allow/deny 182 | # Each entry the name of a crate and a version range. If version is 183 | # not specified, all versions will be matched. 184 | # [[bans.features]] 185 | # crate = "reqwest" 186 | # Features to not allow 187 | # deny = ["json"] 188 | # Features to allow 189 | # allow = [ 190 | # "rustls", 191 | # "__rustls", 192 | # "__tls", 193 | # "hyper-rustls", 194 | # "rustls", 195 | # "rustls-pemfile", 196 | # "rustls-tls-webpki-roots", 197 | # "tokio-rustls", 198 | # "webpki-roots", 199 | # ] 200 | # If true, the allowed features must exactly match the enabled feature set. If 201 | # this is set there is no point setting `deny` 202 | # exact = true 203 | 204 | # Certain crates/versions that will be skipped when doing duplicate detection. 205 | skip = [ 206 | # "ansi_term@0.11.0", 207 | # { crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, 208 | ] 209 | # Similarly to `skip` allows you to skip certain crates during duplicate 210 | # detection. Unlike skip, it also includes the entire tree of transitive 211 | # dependencies starting at the specified crate, up to a certain depth, which is 212 | # by default infinite. 213 | skip-tree = [ 214 | # "ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies 215 | # { crate = "ansi_term@0.11.0", depth = 20 }, 216 | ] 217 | 218 | # This section is considered when running `cargo deny check sources`. 219 | # More documentation about the 'sources' section can be found here: 220 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 221 | [sources] 222 | # Lint level for what to happen when a crate from a crate registry that is not 223 | # in the allow list is encountered 224 | unknown-registry = "warn" 225 | # Lint level for what to happen when a crate from a git repository that is not 226 | # in the allow list is encountered 227 | unknown-git = "warn" 228 | # List of URLs for allowed crate registries. Defaults to the crates.io index 229 | # if not specified. If it is specified but empty, no registries are allowed. 230 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 231 | # List of URLs for allowed Git repositories 232 | allow-git = [] 233 | 234 | [sources.allow-org] 235 | # github.com organizations to allow git sources for 236 | github = [] 237 | # gitlab.com organizations to allow git sources for 238 | gitlab = [] 239 | # bitbucket.org organizations to allow git sources for 240 | bitbucket = [] 241 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.25.1" 8 | # Whether to enable GitHub Attestations 9 | github-attestations = true 10 | # CI backends to support 11 | ci = "github" 12 | # The installers to generate for each app 13 | installers = ["shell", "powershell", "homebrew", "msi"] 14 | # Target platforms to build apps for (Rust target-triple syntax) 15 | targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-gnu", "x86_64-unknown-linux-musl", "x86_64-pc-windows-msvc", "i686-unknown-linux-gnu"] 16 | # Path that installers should place binaries in 17 | install-path = "CARGO_HOME" 18 | # Whether to install an updater program 19 | install-updater = true 20 | # Extra static files to include in each App (path relative to this Cargo.toml's dir) 21 | include = ["./config/", "./LICENSE-MIT", "./LICENSE-APACHE"] 22 | # Which actions to run on pull requests 23 | pr-run-mode = "upload" 24 | # A GitHub repo to push Homebrew formulas to 25 | tap = "rustic-rs/homebrew-tap" 26 | # Publish jobs to run in CI 27 | publish-jobs = ["homebrew"] 28 | github-build-setup = "../install-arm-linkers.yml" 29 | 30 | [dist.github-custom-runners] 31 | aarch64-apple-darwin = "macos-latest" 32 | aarch64-unknown-linux-gnu = "ubuntu-latest" 33 | aarch64-unknown-linux-musl = "ubuntu-latest" 34 | armv7-unknown-linux-gnueabihf = "ubuntu-latest" 35 | armv7-unknown-linux-musleabihf = "ubuntu-latest" 36 | i686-unknown-linux-gnu = "ubuntu-latest" 37 | x86_64-apple-darwin = "macos-13" 38 | x86_64-pc-windows-gnu = "windows-latest" 39 | x86_64-pc-windows-msvc = "windows-latest" 40 | x86_64-unknown-linux-gnu = "ubuntu-latest" 41 | x86_64-unknown-linux-musl = "ubuntu-latest" 42 | 43 | [dist.dependencies.chocolatey] 44 | nasm = '*' # Required for building `aws-lc-sys` on Windows 45 | 46 | [dist.dependencies.apt] 47 | gcc-aarch64-linux-gnu = { version = '*', targets = ["aarch64-unknown-linux-gnu", "aarch64-unknown-linux-musl"] } 48 | gcc-arm-linux-gnueabihf = { version = '*', targets = ["armv7-unknown-linux-gnueabihf", "armv7-unknown-linux-musleabihf"] } 49 | gcc-i686-linux-gnu = { version = '*', targets = ["i686-unknown-linux-gnu"] } 50 | gcc-multilib-i686-linux-gnu = { version = '*', targets = ["i686-unknown-linux-gnu"] } 51 | musl-tools = { version = '*', targets = ["aarch64-unknown-linux-musl", "x86_64-unknown-linux-musl", "armv7-unknown-linux-musleabihf"] } 52 | musl-dev = { version = '*', targets = ["aarch64-unknown-linux-musl", "x86_64-unknown-linux-musl", "armv7-unknown-linux-musleabihf"] } 53 | -------------------------------------------------------------------------------- /dprint.json: -------------------------------------------------------------------------------- 1 | { 2 | "lineWidth": 80, 3 | "markdown": { 4 | "lineWidth": 80, 5 | "emphasisKind": "asterisks", 6 | "strongKind": "asterisks", 7 | "textWrap": "always" 8 | }, 9 | "toml": { 10 | "lineWidth": 80 11 | }, 12 | "json": { 13 | "lineWidth": 80, 14 | "indentWidth": 4 15 | }, 16 | "includes": [ 17 | "**/*.{md}", 18 | "**/*.{toml}", 19 | "**/*.{json}" 20 | ], 21 | "excludes": [ 22 | "target/**/*", 23 | "CHANGELOG.md", 24 | "dist-workspace.toml" 25 | ], 26 | "plugins": [ 27 | "https://plugins.dprint.dev/markdown-0.17.8.wasm", 28 | "https://plugins.dprint.dev/toml-0.6.3.wasm", 29 | "https://plugins.dprint.dev/json-0.19.4.wasm" 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /release-plz.toml: -------------------------------------------------------------------------------- 1 | # configuration spec can be found here https://release-plz.ieni.dev/docs/config 2 | 3 | [workspace] 4 | pr_draft = true 5 | dependencies_update = true 6 | git_release_enable = false # disable GitHub/Gitea releases 7 | # changelog_config = "cliff.toml" # Don't use this for now, as it will override the default changelog config 8 | 9 | [changelog] 10 | protect_breaking_commits = true 11 | -------------------------------------------------------------------------------- /src/application.rs: -------------------------------------------------------------------------------- 1 | //! RusticScheduler Abscissa Application 2 | 3 | use crate::{commands::EntryPoint, config::RusticSchedulerConfig}; 4 | use abscissa_core::{ 5 | application::{self, AppCell}, 6 | config::{self, CfgCell}, 7 | trace, Application, FrameworkError, StandardPaths, 8 | }; 9 | use abscissa_tokio::TokioComponent; 10 | 11 | /// Application state 12 | pub static RUSTIC_SCHEDULER_APP: AppCell = AppCell::new(); 13 | 14 | /// RusticScheduler Application 15 | #[derive(Debug)] 16 | pub struct RusticSchedulerApp { 17 | /// Application configuration. 18 | config: CfgCell, 19 | 20 | /// Application state. 21 | state: application::State, 22 | } 23 | 24 | /// Initialize a new application instance. 25 | /// 26 | /// By default no configuration is loaded, and the framework state is 27 | /// initialized to a default, empty state (no components, threads, etc). 28 | impl Default for RusticSchedulerApp { 29 | fn default() -> Self { 30 | Self { 31 | config: CfgCell::default(), 32 | state: application::State::default(), 33 | } 34 | } 35 | } 36 | 37 | impl Application for RusticSchedulerApp { 38 | /// Entrypoint command for this application. 39 | type Cmd = EntryPoint; 40 | 41 | /// Application configuration. 42 | type Cfg = RusticSchedulerConfig; 43 | 44 | /// Paths to resources within the application. 45 | type Paths = StandardPaths; 46 | 47 | /// Accessor for application configuration. 48 | fn config(&self) -> config::Reader { 49 | self.config.read() 50 | } 51 | 52 | /// Borrow the application state immutably. 53 | fn state(&self) -> &application::State { 54 | &self.state 55 | } 56 | 57 | /// Register all components used by this application. 58 | /// 59 | /// If you would like to add additional components to your application 60 | /// beyond the default ones provided by the framework, this is the place 61 | /// to do so. 62 | fn register_components(&mut self, command: &Self::Cmd) -> Result<(), FrameworkError> { 63 | let mut components = self.framework_components(command)?; 64 | 65 | // Create `TokioComponent` and add it to your app's components here: 66 | components.push(Box::new(TokioComponent::new()?)); 67 | 68 | self.state.components_mut().register(components) 69 | } 70 | 71 | /// Post-configuration lifecycle callback. 72 | /// 73 | /// Called regardless of whether config is loaded to indicate this is the 74 | /// time in app lifecycle when configuration would be loaded if 75 | /// possible. 76 | fn after_config(&mut self, config: Self::Cfg) -> Result<(), FrameworkError> { 77 | // Configure components 78 | let mut components = self.state.components_mut(); 79 | components.after_config(&config)?; 80 | self.config.set_once(config); 81 | Ok(()) 82 | } 83 | 84 | /// Get tracing configuration from command-line options 85 | fn tracing_config(&self, command: &EntryPoint) -> trace::Config { 86 | if command.verbose { 87 | trace::Config::verbose() 88 | } else { 89 | trace::Config::default() 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/bin/rustic-scheduler.rs: -------------------------------------------------------------------------------- 1 | //! Main entry point for RusticScheduler 2 | 3 | #![deny(warnings, missing_docs, trivial_casts, unused_qualifications)] 4 | #![forbid(unsafe_code)] 5 | 6 | use rustic_scheduler::application::RUSTIC_SCHEDULER_APP; 7 | 8 | /// Boot RusticScheduler 9 | fn main() { 10 | abscissa_core::boot(&RUSTIC_SCHEDULER_APP); 11 | } 12 | -------------------------------------------------------------------------------- /src/commands.rs: -------------------------------------------------------------------------------- 1 | //! RusticScheduler Subcommands 2 | //! 3 | //! This is where you specify the subcommands of your application. 4 | //! 5 | //! The default application comes with two subcommands: 6 | //! 7 | //! - `start`: launches the application 8 | //! - `--version`: print application version 9 | //! 10 | //! See the `impl Configurable` below for how to specify the path to the 11 | //! application's configuration file. 12 | 13 | mod client; 14 | mod server; 15 | 16 | use crate::{ 17 | commands::{client::ClientCmd, server::ServerCmd}, 18 | config::RusticSchedulerConfig, 19 | }; 20 | use abscissa_core::{Command, Configurable, FrameworkError, Runnable}; 21 | use std::path::PathBuf; 22 | 23 | /// RusticScheduler Configuration Filename 24 | pub const CONFIG_FILE: &str = "rustic_scheduler.toml"; 25 | 26 | /// RusticScheduler Subcommands 27 | /// Subcommands need to be listed in an enum. 28 | #[derive(clap::Parser, Command, Debug, Runnable)] 29 | pub enum RusticSchedulerCmd { 30 | /// Start the client 31 | Client(ClientCmd), 32 | 33 | /// Start the server 34 | Server(ServerCmd), 35 | } 36 | 37 | /// Entry point for the application. It needs to be a struct to allow using subcommands! 38 | #[derive(clap::Parser, Command, Debug)] 39 | #[command(author, about, version)] 40 | pub struct EntryPoint { 41 | #[command(subcommand)] 42 | cmd: RusticSchedulerCmd, 43 | 44 | /// Enable verbose logging 45 | #[arg(short, long)] 46 | pub verbose: bool, 47 | 48 | /// Use the specified config file 49 | #[arg(short, long)] 50 | pub config: Option, 51 | } 52 | 53 | impl Runnable for EntryPoint { 54 | fn run(&self) { 55 | self.cmd.run() 56 | } 57 | } 58 | 59 | /// This trait allows you to define how application configuration is loaded. 60 | impl Configurable for EntryPoint { 61 | /// Location of the configuration file 62 | fn config_path(&self) -> Option { 63 | // Check if the config file exists, and if it does not, ignore it. 64 | // If you'd like for a missing configuration file to be a hard error 65 | // instead, always return `Some(CONFIG_FILE)` here. 66 | let filename = self 67 | .config 68 | .as_ref() 69 | .map(PathBuf::from) 70 | .unwrap_or_else(|| CONFIG_FILE.into()); 71 | 72 | if filename.exists() { 73 | Some(filename) 74 | } else { 75 | None 76 | } 77 | } 78 | 79 | /// Apply changes to the config after it's been loaded, e.g. overriding 80 | /// values in a config file using command-line options. 81 | /// 82 | /// This can be safely deleted if you don't want to override config 83 | /// settings from command-line options. 84 | fn process_config( 85 | &self, 86 | config: RusticSchedulerConfig, 87 | ) -> Result { 88 | // match &self.cmd { 89 | // RusticSchedulerCmd::Start(cmd) => cmd.override_config(config), 90 | // 91 | // If you don't need special overrides for some 92 | // subcommands, you can just use a catch all 93 | // _ => Ok(config), 94 | // } 95 | 96 | Ok(config) 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/commands/client.rs: -------------------------------------------------------------------------------- 1 | //! `client` subcommand 2 | use std::thread::sleep; 3 | use std::time::Duration; 4 | 5 | use abscissa_core::{ 6 | config::Override, status_err, Application, Command, FrameworkError, Runnable, Shutdown, 7 | }; 8 | use anyhow::Result; 9 | use clap::Parser; 10 | use gethostname::gethostname; 11 | use log::{info, warn}; 12 | use tungstenite::{connect, Message}; 13 | use url::Url; 14 | 15 | use rustic_core::{repofile::SnapshotFile, PathList, Repository}; 16 | 17 | use crate::{ 18 | config::RusticSchedulerConfig, 19 | message::{BackupMessage, BackupResultMessage, HandshakeMessage}, 20 | prelude::RUSTIC_SCHEDULER_APP, 21 | }; 22 | 23 | /// `client` subcommand 24 | /// 25 | /// The `Parser` proc macro generates an option parser based on the struct 26 | /// definition, and is defined in the `clap` crate. See their documentation 27 | /// for a more comprehensive example: 28 | /// 29 | /// 30 | #[derive(Command, Debug, Parser)] 31 | pub struct ClientCmd { 32 | /// Set client name. Default: hostname 33 | #[clap(short)] 34 | name: Option, 35 | 36 | /// Server websocket URL to connect to, e.g. ws://host:3012/ws 37 | #[clap(long)] 38 | url: Url, 39 | } 40 | 41 | impl Override for ClientCmd { 42 | fn override_config( 43 | &self, 44 | config: RusticSchedulerConfig, 45 | ) -> std::result::Result { 46 | // TODO - override config with CLI settings 47 | 48 | Ok(config) 49 | } 50 | } 51 | 52 | impl Runnable for ClientCmd { 53 | /// Start the application. 54 | fn run(&self) { 55 | let res = || -> Result<()> { 56 | let name = self 57 | .name 58 | .clone() 59 | .unwrap_or_else(|| gethostname().to_string_lossy().to_string()); 60 | 61 | // TODO: retry with backoff 62 | loop { 63 | if let Err(err) = connect_client(self.url.clone(), name.clone()) { 64 | eprintln!("{err}"); 65 | warn!("error {err}, retrying..."); 66 | // retry conneting after 5s 67 | sleep(Duration::from_secs(5)); 68 | } 69 | } 70 | }; 71 | 72 | if let Err(err) = res() { 73 | status_err!("{}", err); 74 | RUSTIC_SCHEDULER_APP.shutdown(Shutdown::Crash); 75 | }; 76 | } 77 | } 78 | 79 | fn connect_client(server: Url, name: String) -> Result<()> { 80 | let (mut socket, _) = connect(server.as_str())?; 81 | 82 | info!("Connected to the server"); 83 | println!("Connected to the server"); 84 | 85 | // handshake 86 | let handshake_msg = HandshakeMessage { client: name }; 87 | let handshake_msg = serde_json::to_string(&handshake_msg)?; 88 | socket.send(handshake_msg.into())?; 89 | 90 | loop { 91 | let msg = socket.read()?; 92 | 93 | match msg { 94 | Message::Ping(..) => socket.send(Message::Pong(Vec::new()))?, 95 | _ => { 96 | let msg = msg.into_data(); 97 | let backup_msg: BackupMessage = serde_json::from_slice(&msg)?; 98 | 99 | let snap_msg = match do_backup(backup_msg) { 100 | Ok(snap) => { 101 | println!("{snap:?}"); 102 | BackupResultMessage::Ok { 103 | snapshot: Box::new(snap), 104 | } 105 | } 106 | Err(err) => BackupResultMessage::Error { 107 | message: err.to_string(), 108 | }, 109 | }; 110 | let snap_msg = serde_json::to_string(&snap_msg)?; 111 | socket.send(snap_msg.into())?; 112 | } 113 | } 114 | } 115 | // socket.close(None); 116 | } 117 | 118 | fn do_backup(message: BackupMessage) -> Result { 119 | let backends = message.repo_opts.be.to_backends()?; 120 | 121 | let repo_opts = message.repo_opts.repo; 122 | 123 | let repo = Repository::new(&repo_opts, &backends)? 124 | .open()? 125 | .to_indexed_ids()?; 126 | 127 | let source = PathList::from_string(&message.source)?.sanitize()?; 128 | 129 | let snap = message.snapshot_opts.to_snapshot()?; 130 | 131 | let snap = repo.backup(&message.backup_opts, &source, snap)?; 132 | 133 | Ok(snap) 134 | } 135 | -------------------------------------------------------------------------------- /src/commands/server.rs: -------------------------------------------------------------------------------- 1 | //! `server` subcommand 2 | 3 | use abscissa_core::{status_err, Application, Command, Runnable, Shutdown}; 4 | use anyhow::Result; 5 | use axum::{ 6 | extract::{ 7 | ws::{WebSocket, WebSocketUpgrade}, 8 | Path, State, 9 | }, 10 | response::{Html, Response}, 11 | routing::get, 12 | Router, 13 | }; 14 | use chrono::Local; 15 | use clap::Parser; 16 | use log::warn; 17 | use sailfish::TemplateOnce; 18 | use std::{collections::HashMap, time::Duration}; 19 | use tokio::{ 20 | net::TcpListener, 21 | spawn, 22 | sync::{mpsc, oneshot}, 23 | time::sleep, 24 | }; 25 | 26 | use crate::{ 27 | config::AllBackupOptions, 28 | message::{BackupMessage, BackupResultMessage, ClientMessage, HandshakeMessage, NotifyMessage}, 29 | prelude::RUSTIC_SCHEDULER_APP, 30 | scheduler::{Client, Clients, Source, SourceBackupStatus}, 31 | }; 32 | 33 | /// `server` subcommand 34 | /// 35 | /// The `Parser` proc macro generates an option parser based on the struct 36 | /// definition, and is defined in the `clap` crate. See their documentation 37 | /// for a more comprehensive example: 38 | /// 39 | /// 40 | #[derive(Command, Debug, Parser)] 41 | pub struct ServerCmd { 42 | // /// Option foobar. Doc comments are the help description 43 | // #[clap(short)] 44 | // foobar: Option 45 | 46 | // /// Baz path 47 | // #[clap(long)] 48 | // baz: Option 49 | 50 | // "free" arguments don't need a macro 51 | // free_args: Vec, 52 | } 53 | 54 | // ? Make dedicated `serve`/`start` command 55 | impl Runnable for ServerCmd { 56 | /// Start the application. 57 | fn run(&self) { 58 | if let Err(tokio_err) = abscissa_tokio::run(&RUSTIC_SCHEDULER_APP, async { 59 | if let Err(err) = self.inner_run().await { 60 | status_err!("{}", err); 61 | RUSTIC_SCHEDULER_APP.shutdown(Shutdown::Crash); 62 | } 63 | }) { 64 | status_err!("{}", tokio_err); 65 | RUSTIC_SCHEDULER_APP.shutdown(Shutdown::Crash); 66 | }; 67 | } 68 | } 69 | 70 | impl ServerCmd { 71 | async fn inner_run(&self) -> Result<()> { 72 | let config = RUSTIC_SCHEDULER_APP.config(); 73 | config.validate().unwrap(); 74 | 75 | // Add clients from config file to scheduler 76 | let mut options_mapper = HashMap::new(); 77 | let mut clients = Clients::new(); 78 | for (_, cg) in config.clientgroup.iter() { 79 | for name in &cg.clients { 80 | let mut client = Client::new(); 81 | for source in &cg.sources { 82 | client.add_source(Source::new( 83 | source.source.clone(), 84 | config.schedules[&source.schedule].clone(), 85 | )); 86 | options_mapper.insert( 87 | (name.clone(), source.source.clone()), 88 | source.options.clone(), 89 | ); 90 | } 91 | clients.add_client(name.clone(), client); 92 | } 93 | } 94 | 95 | let (wtx, mut rx) = mpsc::channel(1); 96 | 97 | // The backup loop handling the schedules 98 | spawn(async move { 99 | let mut client_channels: HashMap> = HashMap::new(); 100 | let sleep_timer = sleep(Duration::ZERO); 101 | tokio::pin!(sleep_timer); 102 | 103 | loop { 104 | tokio::select! { 105 | _ = &mut sleep_timer => { 106 | if let Some((client, source)) = clients.process_next(Local::now()) { 107 | let repo_opts = config.repository.clone(); 108 | 109 | let AllBackupOptions { 110 | backup_opts, 111 | snapshot_opts, 112 | } = config.options[&options_mapper[&(client.clone(), source.clone())]] 113 | .clone(); 114 | 115 | let msg = BackupMessage { 116 | repo_opts, 117 | backup_opts, 118 | snapshot_opts, 119 | source, 120 | }; 121 | client_channels.get(&client).unwrap().send(ClientMessage::Backup { client, msg }).await.unwrap(); 122 | } 123 | } 124 | Some(res) = rx.recv() => { 125 | match res { 126 | NotifyMessage::BackupResult{client, msg:BackupResultMessage::Ok {snapshot} } => { 127 | println!("backup to {client}, {} finished successfully. Got snapshot {}", snapshot.paths, snapshot.id); 128 | clients.finish_process(client, Local::now(), SourceBackupStatus::Ok(*snapshot.id)); 129 | } 130 | NotifyMessage::BackupResult{client, msg:BackupResultMessage::Error {message} } => { 131 | println!("backup to {client} failed: {}", message); 132 | clients.finish_process(client, Local::now(), SourceBackupStatus::Error(message)); 133 | } 134 | NotifyMessage::Connect{client, channel} => { 135 | if let Err(err) = clients.connect_client(client.clone()){ 136 | eprintln!("Error: {err}, continuing..."); 137 | warn!("Error: {err}, continuing..."); 138 | } else { 139 | println!("client {client} connected."); 140 | client_channels.insert(client, channel); 141 | } 142 | } 143 | NotifyMessage::Disconnect{client} => { 144 | println!("reading websocket failed; disconnect client {client}"); 145 | client_channels.remove(&client); 146 | clients.disconnect_client(client); 147 | } 148 | NotifyMessage::StatsRequest{client, channel} => { 149 | channel.send(clients.client_stats(client)).unwrap(); 150 | } 151 | } 152 | } 153 | } 154 | 155 | let wait_time = clients.wait_time(Local::now()).unwrap_or_else(|err| { 156 | warn!("Error determining wait time: {err}"); 157 | Duration::from_secs(5) 158 | }); 159 | println!("waiting {wait_time:?}"); 160 | sleep_timer.set(sleep(wait_time)); 161 | } 162 | }); 163 | 164 | // build our application with a single route 165 | let app = Router::new() 166 | .route("/ws", get(ws_handler)) 167 | .route("/client/:client", get(client_handler)) 168 | .with_state(wtx); 169 | 170 | // run it with hyper on localhost:3012 171 | let listener = TcpListener::bind(&RUSTIC_SCHEDULER_APP.config().global.address) 172 | .await 173 | .unwrap(); 174 | 175 | println!( 176 | "Listening on http://{}", 177 | RUSTIC_SCHEDULER_APP.config().global.address 178 | ); 179 | 180 | axum::serve(listener, app.into_make_service()) 181 | .await 182 | .unwrap(); 183 | 184 | Ok(()) 185 | } 186 | } 187 | 188 | async fn client_handler( 189 | Path(client): Path, 190 | State(wtx): State>, 191 | ) -> Html { 192 | let (tx, wrx) = oneshot::channel(); 193 | 194 | wtx.send(NotifyMessage::StatsRequest { 195 | client, 196 | channel: tx, 197 | }) 198 | .await 199 | .unwrap(); 200 | 201 | let stats = wrx.await.unwrap().unwrap(); 202 | Html(stats.render_once().unwrap()) 203 | } 204 | 205 | async fn ws_handler( 206 | ws: WebSocketUpgrade, 207 | State(wtx): State>, 208 | ) -> Response { 209 | ws.on_upgrade(|socket| handle_socket(socket, wtx)) 210 | } 211 | 212 | async fn handle_socket(mut socket: WebSocket, wtx: mpsc::Sender) { 213 | let (tx, mut wrx) = mpsc::channel(1); 214 | 215 | // handshake 216 | let handshake_msg = socket.recv().await.unwrap().unwrap().into_data(); 217 | let handshake_msg: HandshakeMessage = serde_json::from_slice(&handshake_msg).unwrap(); 218 | let client_name = handshake_msg.client; 219 | println!("client {client_name} wants to connected."); 220 | wtx.send(NotifyMessage::Connect { 221 | client: client_name.clone(), 222 | channel: tx, 223 | }) 224 | .await 225 | .unwrap(); 226 | 227 | loop { 228 | tokio::select! { 229 | msg = socket.recv() => { 230 | match msg { 231 | None | Some(Err(_)) => { 232 | wtx.send(NotifyMessage::Disconnect { client: client_name.clone() }).await.unwrap(); 233 | } 234 | Some(Ok(_)) => { 235 | // ignore message 236 | } 237 | } 238 | 239 | } 240 | msg = wrx.recv() => { 241 | match msg { 242 | Some(ClientMessage::Backup{ client, msg}) => { 243 | let data = serde_json::to_string(&msg).unwrap(); 244 | if let Err(err) = socket.send(data.into()).await { 245 | println!("writing websocket failed; disconnect client {client}: {err}"); 246 | break; 247 | } 248 | println!("waiting for backup to {client}, {} to finish...", msg.source); 249 | 250 | match socket.recv().await { 251 | Some(Ok(result)) => { 252 | let result: BackupResultMessage = 253 | serde_json::from_slice(&result.into_data()).unwrap(); 254 | wtx.send(NotifyMessage::BackupResult { client: client.clone(), msg: result }).await.unwrap(); 255 | } 256 | Some(Err(err)) => { 257 | println!("reading websocket failed; disconnect client {client}: {err}"); 258 | break; 259 | } 260 | None => { 261 | println!("client {client} disconnected"); 262 | break; 263 | } 264 | } 265 | } 266 | None => { 267 | println!("client {client_name} disconnected"); 268 | break; 269 | } 270 | } 271 | 272 | } 273 | }; 274 | wtx.send(NotifyMessage::Disconnect { 275 | client: client_name.clone(), 276 | }) 277 | .await 278 | .unwrap(); 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | //! RusticScheduler Config 2 | //! 3 | //! See instructions in `commands.rs` to specify the path to your 4 | //! application's configuration file and/or command-line options 5 | //! for specifying it. 6 | 7 | use std::collections::HashMap; 8 | 9 | use anyhow::{bail, Result}; 10 | use cron::Schedule; 11 | use rustic_backend::BackendOptions; 12 | use rustic_core::{BackupOptions, RepositoryOptions, SnapshotOptions}; 13 | use serde::{Deserialize, Serialize}; 14 | use serde_with::{serde_as, DisplayFromStr}; 15 | 16 | #[serde_as] 17 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 18 | #[serde(deny_unknown_fields)] 19 | pub struct RusticSchedulerConfig { 20 | pub global: GlobalOptions, 21 | pub repository: AllRepositoryOptions, 22 | pub clientgroup: HashMap, 23 | #[serde_as(as = "HashMap<_,DisplayFromStr>")] 24 | pub schedules: HashMap, 25 | pub options: HashMap, 26 | } 27 | 28 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 29 | #[serde(deny_unknown_fields)] 30 | pub struct GlobalOptions { 31 | pub address: String, 32 | } 33 | 34 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 35 | #[serde(deny_unknown_fields)] 36 | pub struct ClientGroupOptions { 37 | pub clients: Vec, 38 | pub sources: Vec, 39 | } 40 | 41 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 42 | #[serde(deny_unknown_fields)] 43 | pub struct SourceOptions { 44 | pub source: String, 45 | pub schedule: String, 46 | pub options: String, 47 | } 48 | 49 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 50 | #[serde(deny_unknown_fields)] 51 | pub struct AllBackupOptions { 52 | #[serde(flatten)] 53 | pub backup_opts: BackupOptions, 54 | #[serde(flatten)] 55 | pub snapshot_opts: SnapshotOptions, 56 | } 57 | 58 | #[derive(Clone, Debug, Deserialize, Serialize, Default)] 59 | #[serde(deny_unknown_fields)] 60 | pub struct AllRepositoryOptions { 61 | #[serde(flatten)] 62 | pub be: BackendOptions, 63 | #[serde(flatten)] 64 | pub repo: RepositoryOptions, 65 | } 66 | 67 | impl RusticSchedulerConfig { 68 | pub fn validate(&self) -> Result<()> { 69 | for (name, cg) in self.clientgroup.iter() { 70 | for source in &cg.sources { 71 | if !self.options.contains_key(&source.options) { 72 | bail!( 73 | "Clientgroup {name}, Source {}: Options {} are undefined!", 74 | source.source, 75 | source.options 76 | ); 77 | } 78 | if !self.schedules.contains_key(&source.schedule) { 79 | bail!( 80 | "Clientgroup {name}, Source {}: Scheduler {} is undefined!", 81 | source.source, 82 | source.schedule 83 | ); 84 | } 85 | } 86 | } 87 | 88 | Ok(()) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | //! Error types 2 | 3 | use abscissa_core::error::{BoxError, Context}; 4 | use std::{ 5 | fmt::{self, Display}, 6 | io, 7 | ops::Deref, 8 | }; 9 | use thiserror::Error; 10 | 11 | /// Kinds of errors 12 | #[derive(Copy, Clone, Debug, Eq, Error, PartialEq)] 13 | pub enum ErrorKind { 14 | /// Error in configuration file 15 | #[error("config error")] 16 | Config, 17 | 18 | /// Input/output error 19 | #[error("I/O error")] 20 | Io, 21 | } 22 | 23 | impl ErrorKind { 24 | /// Create an error context from this error 25 | pub fn context(self, source: impl Into) -> Context { 26 | Context::new(self, Some(source.into())) 27 | } 28 | } 29 | 30 | /// Error type 31 | #[derive(Debug)] 32 | pub struct Error(Box>); 33 | 34 | impl Deref for Error { 35 | type Target = Context; 36 | 37 | fn deref(&self) -> &Context { 38 | &self.0 39 | } 40 | } 41 | 42 | impl Display for Error { 43 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 44 | self.0.fmt(f) 45 | } 46 | } 47 | 48 | impl std::error::Error for Error { 49 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 50 | self.0.source() 51 | } 52 | } 53 | 54 | impl From for Error { 55 | fn from(kind: ErrorKind) -> Self { 56 | Context::new(kind, None).into() 57 | } 58 | } 59 | 60 | impl From> for Error { 61 | fn from(context: Context) -> Self { 62 | Error(Box::new(context)) 63 | } 64 | } 65 | 66 | impl From for Error { 67 | fn from(err: io::Error) -> Self { 68 | ErrorKind::Io.context(err).into() 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! RusticScheduler 2 | //! 3 | //! Application based on the [Abscissa] framework. 4 | //! 5 | //! [Abscissa]: https://github.com/iqlusioninc/abscissa 6 | 7 | #![allow(non_local_definitions)] 8 | 9 | pub mod application; 10 | pub mod commands; 11 | pub mod config; 12 | pub mod error; 13 | pub mod prelude; 14 | 15 | pub(crate) mod message; 16 | pub(crate) mod scheduler; 17 | -------------------------------------------------------------------------------- /src/message.rs: -------------------------------------------------------------------------------- 1 | use crate::{config::AllRepositoryOptions, scheduler::ClientStats}; 2 | use anyhow::Result; 3 | use rustic_core::{repofile::SnapshotFile, BackupOptions, SnapshotOptions}; 4 | use serde::{Deserialize, Serialize}; 5 | use tokio::sync::{mpsc, oneshot}; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct HandshakeMessage { 9 | pub client: String, 10 | } 11 | 12 | #[derive(Debug, Serialize, Deserialize)] 13 | #[serde(tag = "result")] 14 | pub enum HandshakeResultMessage { 15 | Ok, 16 | Error { message: String }, 17 | } 18 | 19 | #[derive(Debug, Serialize, Deserialize)] 20 | pub struct BackupMessage { 21 | pub repo_opts: AllRepositoryOptions, 22 | pub backup_opts: BackupOptions, 23 | pub snapshot_opts: SnapshotOptions, 24 | pub source: String, 25 | } 26 | 27 | #[derive(Debug, Serialize, Deserialize)] 28 | #[serde(tag = "result")] 29 | pub enum BackupResultMessage { 30 | Ok { snapshot: Box }, 31 | Error { message: String }, 32 | } 33 | 34 | pub(crate) enum ClientMessage { 35 | Backup { client: String, msg: BackupMessage }, 36 | } 37 | 38 | pub(crate) enum NotifyMessage { 39 | Connect { 40 | client: String, 41 | channel: mpsc::Sender, 42 | }, 43 | Disconnect { 44 | client: String, 45 | }, 46 | BackupResult { 47 | client: String, 48 | msg: BackupResultMessage, 49 | }, 50 | StatsRequest { 51 | client: String, 52 | channel: oneshot::Sender>, 53 | }, 54 | } 55 | -------------------------------------------------------------------------------- /src/prelude.rs: -------------------------------------------------------------------------------- 1 | //! Application-local prelude: conveniently import types/functions/macros 2 | //! which are generally useful and should be available in every module with 3 | //! `use crate::prelude::*; 4 | 5 | /// Abscissa core prelude 6 | pub use abscissa_core::prelude::*; 7 | 8 | /// Application state 9 | pub use crate::application::RUSTIC_SCHEDULER_APP; 10 | -------------------------------------------------------------------------------- /src/scheduler.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, bail, Result}; 2 | use chrono::{DateTime, Local}; 3 | use cron::Schedule; 4 | use rustic_core::Id; 5 | use sailfish::TemplateOnce; 6 | use std::{cmp::Ordering, collections::HashMap, time::Duration}; 7 | 8 | const MAX_WAIT_TIME: Duration = Duration::from_secs(3600); 9 | type Time = DateTime; 10 | 11 | #[derive(Debug, TemplateOnce)] 12 | #[template(path = "client.stpl")] 13 | pub struct ClientStats { 14 | name: String, 15 | client: Client, 16 | backup_stats: BackupStats, 17 | } 18 | 19 | #[derive(Debug, Clone)] 20 | pub struct BackupStats { 21 | ok: usize, 22 | missed: usize, 23 | error: usize, 24 | } 25 | 26 | #[derive(Debug, Clone)] 27 | pub enum ClientState { 28 | NotConnected, 29 | Idle, 30 | Processing(Time), 31 | } 32 | 33 | #[derive(Debug, Clone)] 34 | pub struct Client { 35 | state: ClientState, 36 | sources: Vec, // ordered by next_invocation! 37 | } 38 | 39 | impl Client { 40 | pub fn new() -> Self { 41 | Self { 42 | state: ClientState::NotConnected, 43 | sources: Vec::new(), 44 | } 45 | } 46 | 47 | fn connect(&mut self) -> Result<()> { 48 | match self.state { 49 | ClientState::NotConnected => { 50 | self.state = ClientState::Idle; 51 | Ok(()) 52 | } 53 | _ => bail!("client is already connected"), 54 | } 55 | } 56 | 57 | fn disconnect(&mut self) { 58 | if let ClientState::Idle = self.state { 59 | self.state = ClientState::NotConnected; 60 | } 61 | } 62 | 63 | fn next_invocation(&self) -> Option