├── .github └── workflows │ ├── main.yml │ └── release.yml ├── .gitignore ├── LICENSE ├── README.md ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src ├── main ├── java │ └── spb │ │ ├── Backup.java │ │ ├── BackupFolderSummary.java │ │ ├── ConfigProvider.java │ │ ├── FolderToBackupConfig.java │ │ ├── GenerateKey.java │ │ ├── Impl.java │ │ ├── ListFiles.java │ │ ├── LogbackConfigurator.java │ │ ├── Main.java │ │ ├── PropertiesConfigProvider.java │ │ ├── Restore.java │ │ ├── Spb.java │ │ ├── SpbVersionProvider.java │ │ ├── Util.java │ │ └── Verify.java └── resources │ └── META-INF │ └── services │ └── ch.qos.logback.classic.spi.Configurator └── test └── java ├── RandomUUID.java └── spb ├── E2ETest.java ├── ImplTest.java └── TestConfigProvider.java /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Main build and test 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | build: 9 | runs-on: ${{ matrix.os }} 10 | continue-on-error: true 11 | strategy: 12 | matrix: 13 | os: [ macos-12, windows-latest, ubuntu-latest ] 14 | env: 15 | BUCKET_NAME: 'spb-github-test-1e4f5ff1-ee54' 16 | permissions: 17 | id-token: write # This is required for requesting the JWT 18 | contents: read # This is required for actions/checkout 19 | steps: 20 | - uses: actions/checkout@v3 21 | - uses: gradle/wrapper-validation-action@v1 22 | - name: configure aws credentials 23 | uses: aws-actions/configure-aws-credentials@v1-node16 24 | with: 25 | role-to-assume: arn:aws:iam::391059136496:role/SpbGithubTesting 26 | aws-region: ap-southeast-2 27 | - uses: graalvm/setup-graalvm@v1 28 | with: 29 | version: '22.3.1' 30 | java-version: '19' 31 | components: 'native-image' 32 | github-token: ${{ secrets.GITHUB_TOKEN }} 33 | native-image-job-reports: 'true' 34 | - name: build and test 35 | run: ./gradlew nativeTest --info 36 | - name: native compile 37 | run: ./gradlew nativeCompile --info 38 | 39 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release a new version 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version: 6 | description: 'the version to be released' 7 | required: true 8 | 9 | jobs: 10 | build: 11 | runs-on: ${{ matrix.os }} 12 | continue-on-error: true 13 | strategy: 14 | matrix: 15 | os: [ macos-latest, ubuntu-latest ] 16 | env: 17 | BUCKET_NAME: 'spb-github-test-1e4f5ff1-ee54' 18 | RELEASE_VERSION: ${{ github.event.inputs.version }} 19 | permissions: 20 | id-token: write 21 | contents: read 22 | steps: 23 | - uses: actions/checkout@v3 24 | - uses: gradle/wrapper-validation-action@v1 25 | - name: configure aws credentials 26 | uses: aws-actions/configure-aws-credentials@v1-node16 27 | with: 28 | role-to-assume: arn:aws:iam::391059136496:role/SpbGithubTesting 29 | aws-region: ap-southeast-2 30 | - uses: graalvm/setup-graalvm@v1 31 | with: 32 | version: '22.3.1' 33 | java-version: '19' 34 | components: 'native-image' 35 | github-token: ${{ secrets.GITHUB_TOKEN }} 36 | native-image-job-reports: 'true' 37 | # - name: build and test 38 | # run: ./gradlew nativeTest --info 39 | - name: native compile 40 | run: ./gradlew nativeCompile --info 41 | - uses: actions/upload-artifact@v3 42 | with: 43 | name: spb-${{ matrix.os }} 44 | path: build/native/nativeCompile/spb 45 | 46 | build-windows: 47 | runs-on: windows-latest 48 | continue-on-error: true 49 | env: 50 | BUCKET_NAME: 'spb-github-test-1e4f5ff1-ee54' 51 | RELEASE_VERSION: ${{ github.event.inputs.version }} 52 | permissions: 53 | id-token: write 54 | contents: read 55 | steps: 56 | - uses: actions/checkout@v3 57 | - uses: gradle/wrapper-validation-action@v1 58 | - name: configure aws credentials 59 | uses: aws-actions/configure-aws-credentials@v1-node16 60 | with: 61 | role-to-assume: arn:aws:iam::391059136496:role/SpbGithubTesting 62 | aws-region: ap-southeast-2 63 | - uses: graalvm/setup-graalvm@v1 64 | with: 65 | version: '22.3.1' 66 | java-version: '19' 67 | components: 'native-image' 68 | github-token: ${{ secrets.GITHUB_TOKEN }} 69 | native-image-job-reports: 'true' 70 | # - name: build and test 71 | # run: ./gradlew nativeTest --info 72 | - name: native compile 73 | run: ./gradlew nativeCompile --info 74 | - uses: actions/upload-artifact@v3 75 | with: 76 | name: spb-windows 77 | path: build/native/nativeCompile/spb.exe 78 | 79 | release: 80 | needs: [ build, build-windows ] 81 | runs-on: ubuntu-latest 82 | permissions: 83 | contents: write 84 | steps: 85 | - uses: actions/download-artifact@v3 86 | with: 87 | name: spb-macos-latest 88 | - run: ls -R 89 | - run: mv spb spb-macos 90 | 91 | - uses: actions/download-artifact@v3 92 | with: 93 | name: spb-ubuntu-latest 94 | - run: ls -R 95 | - run: mv spb spb-linux 96 | 97 | - uses: actions/download-artifact@v3 98 | with: 99 | name: spb-windows 100 | - run: ls -R 101 | - run: mv spb.exe spb-windows.exe 102 | 103 | - name: Release 104 | uses: softprops/action-gh-release@v1 105 | with: 106 | tag_name: ${{ github.event.inputs.version }} 107 | files: | 108 | spb-macos 109 | spb-linux 110 | spb-windows.exe 111 | 112 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /test-data 2 | .idea 3 | .gradle 4 | out 5 | build 6 | .DS_Store 7 | restore-test -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Andreas Marek 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple and secure personal backup 2 | 3 | Spb is a simple and secure personal backup tool. 4 | It backups folder into S3 while encrypting the files on the client side. 5 | 6 | ## Overview 7 | 8 | The fundamental idea is simple: spb scans a locale folder and uploads files into a S3 bucket if needed. 9 | By leveraging [S3 versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/Versioning.html) it provides 10 | a full history of all files every backed up. 11 | 12 | A file is client side encrypted (with AES-GCM) before being backed up and only uploaded if the SHA256 value has been 13 | changed 14 | since the last backup. 15 | 16 | Spb aims to be as simple as possible, while practically usable and secure. 17 | 18 | It is written in Java and compiled to a binary via [GraalVM](https://www.graalvm.org/) and available as CLI for Mac, 19 | Windows and Linux. 20 | 21 | For the critical encryption logic it relies on as few dependencies as possible: 22 | 23 | 1. JDK built in encryption algorithm 24 | 2. [AWS encryption sdk](https://github.com/aws/aws-encryption-sdk-java) (which itself relies only on Bouncy Castle) 25 | 3. [Bouncy Castle](https://www.bouncycastle.org/) 26 | 27 | Additionally, it relies on 28 | 29 | 1. [AWS SDK for Java 2.0](https://github.com/aws/aws-sdk-java-v2) for accessing S3 30 | 2. [Logback](https://logback.qos.ch/) and [SLF4J](https://www.slf4j.org/) to write log files 31 | 3. [picocli](https://picocli.info/) for providing a nice command line interface 32 | 33 | ## How to use it 34 | 35 | Download a [spb release](https://github.com/andimarek/spb/releases) for Mac, Linux or Windows. 36 | 37 | Additionally to the terminal output spb writes a log files with all the details atl `~/spb.log`. 38 | 39 | Spb requires a config file `~/spb.config`. 40 | It allows you to config the S3 bucket to write the backup, the secret key to use for the encryption and 41 | the list of folders to back up. 42 | 43 | Each folder to back up is considered "one backup". 44 | 45 | Example config file: 46 | 47 | ```properties 48 | bucket.name=my-backups-123ABC 49 | secret.key=<256-bit-secret-key-in-base64> 50 | backup.0.folder=/Users/andi/my-data 51 | backup.0.name=data 52 | backup.1.folder=/Users/andi/texts 53 | backup.1.name=texts 54 | ``` 55 | 56 | `bucket.name` is the S3 bucket name. 57 | `secret.key` is the private 256 bit secret key, Base64 encoded which is used to encrypt all data 58 | send to S3. You can generate a new key with `spb generate-key` 59 | 60 | `backup.N.name` and `backup.N.folder` configures the folders to back up. Each folder has a name 61 | assigned, which identifies the backup and which is also used as S3 folder name. 62 | 63 | Spb requires access to the S3 bucket named in the config with the following actions: 64 | 65 | ``` 66 | "s3:PutObject", 67 | "s3:GetObject", 68 | "s3:ListBucketVersions", 69 | "s3:ListBucket", 70 | "s3:DeleteObject", 71 | "s3:GetObjectVersion" 72 | ```` 73 | 74 | The credentials for the S3 access can be loaded via: 75 | 76 | - Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY 77 | - Web Identity Token credentials from system properties or environment variables 78 | - Credential profiles file at the default location (~/.aws/credentials) shared by all AWS SDKs and the AWS CLI 79 | 80 | spb offers the following commands: 81 | 82 | ``` 83 | backup initiate backup of a folder 84 | restore restore previously backed up files 85 | list list all backed up files 86 | verify verify backups 87 | generate-key generate a new random key 88 | ``` 89 | 90 | Example to initiate a backup: 91 | 92 | ```shell 93 | spb backup 94 | ``` 95 | 96 | The verify commands restores all the backed up and verifies their integrity by comparing 97 | the downloaded SHA256 checksums with the expected checksums. 98 | 99 | ```shell 100 | spb verify 101 | ``` 102 | 103 | To restore a specific file from the backup `documents`: 104 | 105 | ```shell 106 | spb restore --backup-name=documents --target-folder=documents-restored --file=tax/tax-2020.pdf 107 | ``` 108 | 109 | To restore all files from the backup `documents`: 110 | 111 | ```shell 112 | spb restore --backup-name=documents --target-folder=documents-restored 113 | ``` 114 | 115 | To find a specific file in the backups: 116 | 117 | ```shell 118 | spb list --file-pattern="folder/important.txt" 119 | ``` 120 | 121 | To find a specific file in the backups including history: 122 | 123 | ```shell 124 | spb list --file-pattern=".*/important.*" --historical 125 | ``` 126 | 127 | This will show all versions of all matching files including the version id. 128 | The version id can then be used to restore this specific version: 129 | 130 | ```shell 131 | spb restore --backup-name=documents --file-name="folder/important.txt" --target-folder="out" --version-id="sesN1qhjd6h13bsG.IIUfXeAFYE5AX7h" 132 | ``` 133 | 134 | The full list of options are available via `spb --help` and `spb --help`. 135 | 136 | ## Details 137 | 138 | Every file is saved as two different objects in S3: one metadata object and one content object. 139 | These files are always written together, everytime a file is backed up. 140 | 141 | The full keys in the S3 bucket are `/backup-name//metadata` nad `/backup-name//content` 142 | The `backup-name` is the name of backup configured in the spb config file. 143 | 144 | The `` is a `AESCMAC` hash generated with the spb secret key and the relative file path 145 | of the backed up file. 146 | 147 | The content object contains the content of the backed up file, encrypted 148 | as [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/concepts.html) 149 | message. The message format is 150 | documented [here](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/message-format.html). 151 | 152 | The algorithm used is `AES_256_GCM_HKDF_SHA512_COMMIT_KEY` which basically means it is encrypted with AES-GMC. 153 | The AES key used is the spb secret key. 154 | 155 | The metadata file contains encrypted metadata about the backed up files: 156 | 157 | - the file path 158 | - the file SHA256 checksum 159 | - the size of the file in bytes 160 | - the S3 versionId of the corresponding content object 161 | 162 | The information are used for example to determine if a file needs to be backed up again or not (by comparing the SHA256 163 | value). 164 | 165 | 166 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | import java.text.SimpleDateFormat 2 | 3 | plugins { 4 | id 'java' 5 | id 'application' 6 | id 'org.graalvm.buildtools.native' version '0.9.19' 7 | } 8 | 9 | group 'org.example' 10 | 11 | def getDevelopmentVersion() { 12 | def gitCheckOutput = new StringBuilder() 13 | def gitCheckError = new StringBuilder() 14 | def gitCheck = ["git", "rev-parse", "--is-inside-work-tree"].execute() 15 | gitCheck.waitForProcessOutput(gitCheckOutput, gitCheckError) 16 | def isGit = gitCheckOutput.toString().trim() 17 | if (isGit != "true") { 18 | def version = "0.0.0-" + new SimpleDateFormat('yyyy-MM-dd\'T\'HH-mm-ss').format(new Date()) + "-no-git" 19 | println "created development version: $version" 20 | return version 21 | } 22 | 23 | def gitHashOutput = new StringBuilder() 24 | def gitHashError = new StringBuilder() 25 | def gitShortHash = ["git", "-C", projectDir.toString(), "rev-parse", "--short", "HEAD"].execute() 26 | gitShortHash.waitForProcessOutput(gitHashOutput, gitHashError) 27 | def gitHash = gitHashOutput.toString().trim() 28 | if (gitHash.isEmpty()) { 29 | println "git hash is empty: error: ${error.toString()}" 30 | throw new IllegalStateException("git hash could not be determined") 31 | } 32 | def version = "0.0.0-" + new SimpleDateFormat('yyyy-MM-dd\'T\'HH-mm-ss').format(new Date()) + "-" + gitHash 33 | println "created development version: $version" 34 | version 35 | } 36 | 37 | def releaseVersion = System.env.RELEASE_VERSION 38 | version = releaseVersion ? releaseVersion : getDevelopmentVersion() 39 | 40 | mainClassName = "spb.Main" 41 | 42 | repositories { 43 | mavenCentral() 44 | } 45 | 46 | java { 47 | toolchain { 48 | languageVersion = JavaLanguageVersion.of(19) 49 | vendor = JvmVendorSpec.matching("GraalVM Community") 50 | } 51 | } 52 | 53 | tasks.jar { 54 | manifest { 55 | attributes("Main-Class": "spb.Main", 56 | "Implementation-Version": archiveVersion 57 | ) 58 | } 59 | } 60 | 61 | 62 | dependencies { 63 | compileOnly 'org.jetbrains:annotations:23.0.0' 64 | implementation('com.amazonaws:aws-encryption-sdk-java:2.4.0') { 65 | exclude group: 'org.bouncycastle', module: 'bcprov-ext-jdk15on' 66 | } 67 | // compileOnly("org.graalvm.nativeimage:svm:22.3.1") 68 | implementation "org.bouncycastle:bcprov-jdk18on:1.72" 69 | implementation platform('software.amazon.awssdk:bom:2.19.21') 70 | implementation 'software.amazon.awssdk:s3' 71 | implementation "info.picocli:picocli:4.7.0" 72 | implementation 'ch.qos.logback:logback-classic:1.4.5' 73 | annotationProcessor "info.picocli:picocli-codegen:4.7.0" 74 | 75 | 76 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' 77 | testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' 78 | testImplementation("org.assertj:assertj-core:3.24.0") 79 | 80 | } 81 | 82 | test { 83 | useJUnitPlatform() 84 | 85 | } 86 | 87 | 88 | graalvmNative { 89 | binaries { 90 | test { 91 | buildArgs.add('--initialize-at-build-time=org.slf4j,ch.qos.logback') 92 | } 93 | main { 94 | buildArgs.add('--initialize-at-build-time=org.slf4j,ch.qos.logback') 95 | } 96 | } 97 | } -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/andimarek/spb/3f1923c16a5f642457a4e0f1563301dae770b466/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit 84 | 85 | APP_NAME="Gradle" 86 | APP_BASE_NAME=${0##*/} 87 | 88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 90 | 91 | # Use the maximum available, or set MAX_FD != -1 to use that value. 92 | MAX_FD=maximum 93 | 94 | warn () { 95 | echo "$*" 96 | } >&2 97 | 98 | die () { 99 | echo 100 | echo "$*" 101 | echo 102 | exit 1 103 | } >&2 104 | 105 | # OS specific support (must be 'true' or 'false'). 106 | cygwin=false 107 | msys=false 108 | darwin=false 109 | nonstop=false 110 | case "$( uname )" in #( 111 | CYGWIN* ) cygwin=true ;; #( 112 | Darwin* ) darwin=true ;; #( 113 | MSYS* | MINGW* ) msys=true ;; #( 114 | NONSTOP* ) nonstop=true ;; 115 | esac 116 | 117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 118 | 119 | 120 | # Determine the Java command to use to start the JVM. 121 | if [ -n "$JAVA_HOME" ] ; then 122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 123 | # IBM's JDK on AIX uses strange locations for the executables 124 | JAVACMD=$JAVA_HOME/jre/sh/java 125 | else 126 | JAVACMD=$JAVA_HOME/bin/java 127 | fi 128 | if [ ! -x "$JAVACMD" ] ; then 129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 130 | 131 | Please set the JAVA_HOME variable in your environment to match the 132 | location of your Java installation." 133 | fi 134 | else 135 | JAVACMD=java 136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | 142 | # Increase the maximum file descriptors if we can. 143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 144 | case $MAX_FD in #( 145 | max*) 146 | MAX_FD=$( ulimit -H -n ) || 147 | warn "Could not query maximum file descriptor limit" 148 | esac 149 | case $MAX_FD in #( 150 | '' | soft) :;; #( 151 | *) 152 | ulimit -n "$MAX_FD" || 153 | warn "Could not set maximum file descriptor limit to $MAX_FD" 154 | esac 155 | fi 156 | 157 | # Collect all arguments for the java command, stacking in reverse order: 158 | # * args from the command line 159 | # * the main class name 160 | # * -classpath 161 | # * -D...appname settings 162 | # * --module-path (only if needed) 163 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 164 | 165 | # For Cygwin or MSYS, switch paths to Windows format before running java 166 | if "$cygwin" || "$msys" ; then 167 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 168 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 169 | 170 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 171 | 172 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 173 | for arg do 174 | if 175 | case $arg in #( 176 | -*) false ;; # don't mess with options #( 177 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 178 | [ -e "$t" ] ;; #( 179 | *) false ;; 180 | esac 181 | then 182 | arg=$( cygpath --path --ignore --mixed "$arg" ) 183 | fi 184 | # Roll the args list around exactly as many times as the number of 185 | # args, so each arg winds up back in the position where it started, but 186 | # possibly modified. 187 | # 188 | # NB: a `for` loop captures its iteration list before it begins, so 189 | # changing the positional parameters here affects neither the number of 190 | # iterations, nor the values presented in `arg`. 191 | shift # remove old arg 192 | set -- "$@" "$arg" # push replacement arg 193 | done 194 | fi 195 | 196 | # Collect all arguments for the java command; 197 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of 198 | # shell script including quotes and variable substitutions, so put them in 199 | # double quotes to make sure that they get re-expanded; and 200 | # * put everything else in single quotes, so that it's not re-expanded. 201 | 202 | set -- \ 203 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 204 | -classpath "$CLASSPATH" \ 205 | org.gradle.wrapper.GradleWrapperMain \ 206 | "$@" 207 | 208 | # Stop when "xargs" is not available. 209 | if ! command -v xargs >/dev/null 2>&1 210 | then 211 | die "xargs is not available" 212 | fi 213 | 214 | # Use "xargs" to parse quoted args. 215 | # 216 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 217 | # 218 | # In Bash we could simply go: 219 | # 220 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 221 | # set -- "${ARGS[@]}" "$@" 222 | # 223 | # but POSIX shell has neither arrays nor command substitution, so instead we 224 | # post-process each arg (as a line of input to sed) to backslash-escape any 225 | # character that might be a shell metacharacter, then use eval to reverse 226 | # that process (while maintaining the separation between arguments), and wrap 227 | # the whole thing up as a single "set" statement. 228 | # 229 | # This will of course break if any of these variables contains a newline or 230 | # an unmatched quote. 231 | # 232 | 233 | eval "set -- $( 234 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 235 | xargs -n1 | 236 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 237 | tr '\n' ' ' 238 | )" '"$@"' 239 | 240 | exec "$JAVACMD" "$@" 241 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if %ERRORLEVEL% equ 0 goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if %ERRORLEVEL% equ 0 goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | set EXIT_CODE=%ERRORLEVEL% 84 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 85 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 86 | exit /b %EXIT_CODE% 87 | 88 | :mainEnd 89 | if "%OS%"=="Windows_NT" endlocal 90 | 91 | :omega 92 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | mavenCentral() 4 | gradlePluginPortal() 5 | } 6 | } 7 | 8 | rootProject.name = 'spb' 9 | 10 | -------------------------------------------------------------------------------- /src/main/java/spb/Backup.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine.Command; 4 | import picocli.CommandLine.Option; 5 | 6 | import java.util.concurrent.Callable; 7 | 8 | @Command(name = "backup", mixinStandardHelpOptions = true, description = "initiate backup of a folder") 9 | public class Backup implements Callable { 10 | 11 | @Option(names = {"--dry-run"}, description = "Show what an actual backup would mean", paramLabel = "dry-run") 12 | private boolean dryRun; 13 | 14 | @Override 15 | public String call() throws Exception { 16 | Impl impl = new Impl(); 17 | impl.backupFolders(dryRun); 18 | impl.shutdown(); 19 | return "success"; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/spb/BackupFolderSummary.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import java.util.List; 4 | 5 | /** 6 | * Result of backing up a folder. 7 | */ 8 | public record BackupFolderSummary(String backupName, 9 | String backupFolder, 10 | List backedUpFiles, 11 | List deletedFiles) { 12 | 13 | public sealed interface BackedUpFile permits BackedUpFile.UnchangedFile, BackedUpFile.ChangedFile { 14 | String relativePath(); 15 | 16 | record UnchangedFile(String relativePath) implements BackedUpFile { 17 | 18 | } 19 | 20 | record ChangedFile(String relativePath, 21 | String sha256Base64, 22 | long fileSizeInBytes) implements BackedUpFile { 23 | 24 | } 25 | 26 | } 27 | 28 | 29 | public record DeletedFile(String relativePath) { 30 | 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /src/main/java/spb/ConfigProvider.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import java.util.List; 4 | 5 | public interface ConfigProvider { 6 | 7 | 8 | byte[] getRawSecretKeyBytes(); 9 | 10 | String getBucketName(); 11 | 12 | List getFoldersBackupConfig(); 13 | 14 | int getMultiPartUploadLimitInBytes(); 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/spb/FolderToBackupConfig.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | public record FolderToBackupConfig(String folder, String backupName) { 4 | 5 | } -------------------------------------------------------------------------------- /src/main/java/spb/GenerateKey.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine; 4 | 5 | import java.security.SecureRandom; 6 | import java.util.Base64; 7 | import java.util.concurrent.Callable; 8 | 9 | @CommandLine.Command(name = "generate-key", mixinStandardHelpOptions = true, description = "generate a new random key") 10 | public class GenerateKey implements Callable { 11 | @Override 12 | public Integer call() throws Exception { 13 | byte[] newKey = new byte[32]; 14 | SecureRandom secureRandom = new SecureRandom(); 15 | secureRandom.nextBytes(newKey); 16 | // not logging, just printing 17 | String keyBase64 = Base64.getEncoder().encodeToString(newKey); 18 | System.out.println("new 256 bit key, Base64 encoded: " + keyBase64); 19 | return 0; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/spb/Impl.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import com.amazonaws.encryptionsdk.AwsCrypto; 4 | import com.amazonaws.encryptionsdk.CommitmentPolicy; 5 | import com.amazonaws.encryptionsdk.CryptoAlgorithm; 6 | import com.amazonaws.encryptionsdk.CryptoInputStream; 7 | import com.amazonaws.encryptionsdk.CryptoResult; 8 | import com.amazonaws.encryptionsdk.jce.JceMasterKey; 9 | import org.bouncycastle.crypto.engines.AESEngine; 10 | import org.bouncycastle.crypto.macs.CMac; 11 | import org.bouncycastle.crypto.params.KeyParameter; 12 | import org.jetbrains.annotations.Nullable; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | import software.amazon.awssdk.core.ResponseInputStream; 16 | import software.amazon.awssdk.core.sync.RequestBody; 17 | import software.amazon.awssdk.services.s3.S3Client; 18 | import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm; 19 | import software.amazon.awssdk.services.s3.model.CommonPrefix; 20 | import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; 21 | import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadResponse; 22 | import software.amazon.awssdk.services.s3.model.CompletedMultipartUpload; 23 | import software.amazon.awssdk.services.s3.model.CompletedPart; 24 | import software.amazon.awssdk.services.s3.model.CreateMultipartUploadRequest; 25 | import software.amazon.awssdk.services.s3.model.CreateMultipartUploadResponse; 26 | import software.amazon.awssdk.services.s3.model.Delete; 27 | import software.amazon.awssdk.services.s3.model.DeleteMarkerEntry; 28 | import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; 29 | import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; 30 | import software.amazon.awssdk.services.s3.model.GetObjectRequest; 31 | import software.amazon.awssdk.services.s3.model.GetObjectResponse; 32 | import software.amazon.awssdk.services.s3.model.ListObjectVersionsRequest; 33 | import software.amazon.awssdk.services.s3.model.ListObjectsV2Request; 34 | import software.amazon.awssdk.services.s3.model.ListObjectsV2Response; 35 | import software.amazon.awssdk.services.s3.model.ObjectIdentifier; 36 | import software.amazon.awssdk.services.s3.model.ObjectVersion; 37 | import software.amazon.awssdk.services.s3.model.PutObjectRequest; 38 | import software.amazon.awssdk.services.s3.model.PutObjectResponse; 39 | import software.amazon.awssdk.services.s3.model.UploadPartRequest; 40 | import software.amazon.awssdk.services.s3.model.UploadPartResponse; 41 | import software.amazon.awssdk.services.s3.paginators.ListObjectVersionsIterable; 42 | import software.amazon.awssdk.utils.IoUtils; 43 | import spb.BackupFolderSummary.BackedUpFile; 44 | import spb.BackupFolderSummary.BackedUpFile.ChangedFile; 45 | import spb.BackupFolderSummary.BackedUpFile.UnchangedFile; 46 | import spb.BackupFolderSummary.DeletedFile; 47 | 48 | import javax.crypto.spec.SecretKeySpec; 49 | import java.io.File; 50 | import java.io.FileInputStream; 51 | import java.io.FileOutputStream; 52 | import java.io.IOException; 53 | import java.nio.ByteBuffer; 54 | import java.nio.channels.FileChannel; 55 | import java.nio.charset.StandardCharsets; 56 | import java.nio.file.Files; 57 | import java.nio.file.Path; 58 | import java.security.InvalidKeyException; 59 | import java.security.NoSuchAlgorithmException; 60 | import java.security.NoSuchProviderException; 61 | import java.time.Instant; 62 | import java.util.ArrayList; 63 | import java.util.Base64; 64 | import java.util.Collections; 65 | import java.util.Comparator; 66 | import java.util.LinkedHashMap; 67 | import java.util.LinkedHashSet; 68 | import java.util.List; 69 | import java.util.Map; 70 | import java.util.Optional; 71 | import java.util.Set; 72 | import java.util.concurrent.CompletableFuture; 73 | import java.util.concurrent.ExecutionException; 74 | import java.util.concurrent.ExecutorService; 75 | import java.util.concurrent.Executors; 76 | import java.util.concurrent.atomic.AtomicLong; 77 | import java.util.regex.Pattern; 78 | import java.util.stream.Collectors; 79 | import java.util.stream.Stream; 80 | 81 | import static spb.Impl.HistoricalFile.HistoricalBackedUpFile; 82 | import static spb.Impl.HistoricalFile.HistoricalDeletedFile; 83 | import static spb.Util.DIVIDER; 84 | import static spb.Util.bytesToHumanReadableFormat; 85 | 86 | public class Impl { 87 | 88 | private static final AwsCrypto awsCrypto = AwsCrypto.builder() 89 | .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) 90 | .withEncryptionAlgorithm(CryptoAlgorithm.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY) 91 | .build(); 92 | private static final int MAX_FILES_COUNT = 10_000; 93 | 94 | private static final long HUNDRED_MB = 100 * 1024 * 1024L; 95 | 96 | // changing this key will make existing backups fail. 97 | private static final String MASTER_KEY_ID = "SpbSecretKey"; 98 | 99 | // currently we only have version 1 100 | private static final int METADATA_VERSION_1 = 1; 101 | private final ConfigProvider configFile; 102 | private final S3Client s3Client; 103 | private String bucketName; 104 | private JceMasterKey masterKey; 105 | private SecretKeySpec secretKeySpec; 106 | 107 | private final ExecutorService threadPoolExecutor = Executors.newFixedThreadPool(10); 108 | private final ExecutorService multipartUploadExecutor = Executors.newFixedThreadPool(5); 109 | 110 | private final Logger logger = LoggerFactory.getLogger("spb"); 111 | 112 | 113 | static final List filePatternsToIgnore = List.of(Pattern.compile("(.*/)?.DS_Store")); 114 | 115 | 116 | public record FileMetadata(String fileName, 117 | String originalFileSha256Base64, 118 | String objectKey, 119 | long originalFileSizeInBytes, 120 | Instant creationDate, 121 | String contentVersionId 122 | ) implements FileInfo { 123 | 124 | } 125 | 126 | interface FileInfo { 127 | 128 | String fileName(); 129 | 130 | String originalFileSha256Base64(); 131 | 132 | long originalFileSizeInBytes(); 133 | 134 | Instant creationDate(); 135 | 136 | // this includes the backup-name + nameHash + "/" 137 | String objectKey(); 138 | 139 | @Nullable String contentVersionId(); 140 | 141 | } 142 | 143 | public sealed interface HistoricalFile 144 | permits HistoricalBackedUpFile, HistoricalDeletedFile { 145 | 146 | String fileName(); 147 | 148 | boolean isLatest(); 149 | 150 | Instant creationDate(); 151 | 152 | record HistoricalBackedUpFile( 153 | String fileName, 154 | String originalFileSha256Base64, 155 | long originalFileSizeInBytes, 156 | Instant creationDate, 157 | boolean isLatest, 158 | String objectKey, 159 | String contentVersionId, 160 | String metadataVersionId 161 | ) implements HistoricalFile, FileInfo { 162 | } 163 | 164 | record HistoricalDeletedFile( 165 | String fileName, 166 | Instant creationDate, 167 | boolean isLatest 168 | ) implements HistoricalFile { 169 | } 170 | } 171 | 172 | record CountFilesResult(long count, long ignoredFiles) { 173 | } 174 | 175 | public Impl(ConfigProvider configProvider) throws IOException { 176 | this.configFile = configProvider; 177 | readConfigFile(); 178 | s3Client = S3Client.create(); 179 | } 180 | 181 | public Impl() throws IOException { 182 | this(new PropertiesConfigProvider()); 183 | } 184 | 185 | public void shutdown() { 186 | threadPoolExecutor.shutdown(); 187 | multipartUploadExecutor.shutdown(); 188 | } 189 | 190 | private void readConfigFile() throws IOException { 191 | bucketName = configFile.getBucketName(); 192 | 193 | secretKeySpec = new SecretKeySpec(configFile.getRawSecretKeyBytes(), "AES"); 194 | masterKey = JceMasterKey.getInstance(secretKeySpec, "Spb", MASTER_KEY_ID, "AES/GCM/NoPadding"); 195 | 196 | } 197 | 198 | public List backupFolders(boolean dryRun) throws IOException, ExecutionException, InterruptedException { 199 | List foldersBackupConfig = configFile.getFoldersBackupConfig(); 200 | List result = new ArrayList<>(); 201 | if (dryRun) { 202 | logger.info(DIVIDER); 203 | logger.info("DRY RUN ---- NOTHING will be actually actually backed up ---- DRY RUN"); 204 | logger.info(DIVIDER); 205 | } 206 | logger.info("start backup of {} folders", foldersBackupConfig.size()); 207 | for (FolderToBackupConfig folderToBackupConfig : foldersBackupConfig) { 208 | result.add(backupSingleFolder(folderToBackupConfig.folder(), folderToBackupConfig.backupName(), dryRun)); 209 | } 210 | result.forEach(backupFolderSummary -> printBackupSummary(backupFolderSummary, dryRun)); 211 | return result; 212 | } 213 | 214 | private void printBackupSummary(BackupFolderSummary backupFolderSummary, boolean dryRun) { 215 | String backupName = backupFolderSummary.backupName(); 216 | List backedUpFiles = backupFolderSummary.backedUpFiles(); 217 | int changedFilesCount = 0; 218 | int unchangedFilesCount = 0; 219 | long totalBytesUploaded = 0; 220 | for (BackedUpFile backedUpFile : backedUpFiles) { 221 | if (backedUpFile instanceof ChangedFile changedFile) { 222 | changedFilesCount++; 223 | totalBytesUploaded += changedFile.fileSizeInBytes(); 224 | } else if (backedUpFile instanceof UnchangedFile) { 225 | unchangedFilesCount++; 226 | } 227 | } 228 | 229 | List deletedFiles = backupFolderSummary.deletedFiles(); 230 | 231 | logger.info("number of backed up files (changed and unchanged): {}", backedUpFiles.size()); 232 | logger.info("number of deleted files: {}", deletedFiles.size()); 233 | if (dryRun) { 234 | logger.info(DIVIDER); 235 | logger.info("DRY RUN ---- NOTHING was actually backed up ---- DRY RUN"); 236 | logger.info("DRY RUN summary for backup '{}' from folder '{}'", backupName, backupFolderSummary.backupFolder()); 237 | } else { 238 | logger.info(DIVIDER); 239 | logger.info("Summary for backup '{}' from folder '{}'", backupName, backupFolderSummary.backupFolder()); 240 | } 241 | logger.info("total files backed up: {} made out of {} changed vs {} unchanged", backedUpFiles.size(), changedFilesCount, unchangedFilesCount); 242 | logger.info("total data uploaded {} ", bytesToHumanReadableFormat(totalBytesUploaded)); 243 | logger.info("total files deleted {}", deletedFiles.size()); 244 | logger.info(DIVIDER); 245 | 246 | if (backedUpFiles.size() == 0) { 247 | logger.debug("no files found to backup. This means the folder to backup is empty."); 248 | } else { 249 | logger.debug("details of backed up files:"); 250 | } 251 | for (BackedUpFile backedUpFile : backedUpFiles) { 252 | if (backedUpFile instanceof ChangedFile) { 253 | logger.debug("file {} was changed and backed up. ", backedUpFile.relativePath()); 254 | } else if (backedUpFile instanceof UnchangedFile) { 255 | logger.debug("file {} was not changed and not backed up. ", backedUpFile.relativePath()); 256 | } 257 | } 258 | if (deletedFiles.size() == 0) { 259 | logger.debug("no deleted files"); 260 | } else { 261 | logger.debug("details of deleted files:"); 262 | for (DeletedFile deletedFile : deletedFiles) { 263 | logger.debug("file {} was deleted", deletedFile.relativePath()); 264 | } 265 | } 266 | if (dryRun) { 267 | logger.info(DIVIDER); 268 | logger.info("DRY RUN SUMMARY FINISHED FOR '{}'", backupName); 269 | } else { 270 | logger.info(DIVIDER); 271 | logger.info("SUMMARY FINISHED FOR '{}'", backupName); 272 | } 273 | logger.info("total files backed up: {} made out of {} changed vs {} unchanged", backedUpFiles.size(), changedFilesCount, unchangedFilesCount); 274 | logger.info("total data uploaded {} ", bytesToHumanReadableFormat(totalBytesUploaded)); 275 | logger.info("total files deleted {}", deletedFiles.size()); 276 | logger.info(DIVIDER); 277 | 278 | } 279 | 280 | 281 | private BackupFolderSummary backupSingleFolder( 282 | String folderStr, 283 | String backupName, 284 | boolean dryRun 285 | ) throws IOException, ExecutionException, InterruptedException { 286 | Path folder = Path.of(folderStr); 287 | CountFilesResult filesCount; 288 | try { 289 | filesCount = countFilesToBackup(folder); 290 | } catch (IOException e) { 291 | logger.error("error accessing files to backup ... abort backing up {}.", folder, e); 292 | throw new RuntimeException(e); 293 | } 294 | 295 | if (filesCount.count > MAX_FILES_COUNT) { 296 | logger.error("Abort ... to many files to backup"); 297 | throw new RuntimeException("To many files to backup"); 298 | } 299 | logger.info("Found {} files to backup ({} ignored files)", filesCount.count, filesCount.ignoredFiles); 300 | List alreadyBackedUpFiles = getBackedUpFiles(backupName); 301 | 302 | if (dryRun) { 303 | BackupFolderSummary backupFolderSummary = dryRunBackupSingleFolderImpl(folder, backupName, alreadyBackedUpFiles); 304 | return backupFolderSummary; 305 | 306 | } else { 307 | BackupFolderSummary backupFolderSummary = backupSingleFolderImpl(folder, backupName, alreadyBackedUpFiles); 308 | return backupFolderSummary; 309 | } 310 | } 311 | 312 | private BackupFolderSummary dryRunBackupSingleFolderImpl(Path folder, String backupName, List alreadyBackedUpFiles) throws IOException { 313 | logger.info("Start backup '{}' from folder '{}'", backupName, folder); 314 | if (!folder.toFile().isDirectory()) { 315 | throw new RuntimeException("Must be a folder " + folder); 316 | } 317 | 318 | Map fileNamesMap = alreadyBackedUpFiles 319 | .stream() 320 | .collect(Collectors.toMap(FileMetadata::fileName, fileMetadata -> fileMetadata)); 321 | 322 | List newlyBackedUpFiles = new ArrayList<>(); 323 | Set existingFiles = new LinkedHashSet<>(); 324 | 325 | try (Stream walk = Files.walk(folder)) { 326 | walk.forEach(file -> { 327 | if (file.equals(folder)) { 328 | return; 329 | } 330 | if (file.toFile().isDirectory()) { 331 | return; 332 | } 333 | if (shouldIgnoreFile(file.toString())) { 334 | logger.debug("file {} is ignored", file); 335 | return; 336 | } 337 | Path fileRelativePath = folder.relativize(file); 338 | existingFiles.add(fileRelativePath.toString()); 339 | 340 | String originalFileSha256Base64; 341 | try { 342 | long originalFileSize = Files.size(file); 343 | if (originalFileSize > HUNDRED_MB) { 344 | logger.debug("calculating sha256 for larger file {} with {}", file, bytesToHumanReadableFormat(originalFileSize)); 345 | } 346 | originalFileSha256Base64 = Util.sha256Base64ForFile(file); 347 | if (doesFileNeedBackup(folder, fileRelativePath, originalFileSha256Base64, fileNamesMap)) { 348 | newlyBackedUpFiles.add(new BackedUpFile.ChangedFile(fileRelativePath.toString(), originalFileSha256Base64, originalFileSize)); 349 | } else { 350 | newlyBackedUpFiles.add(new UnchangedFile(fileRelativePath.toString())); 351 | } 352 | } catch (Exception e) { 353 | logger.error("error: ", e); 354 | throw new RuntimeException(e); 355 | } 356 | }); 357 | } 358 | 359 | // files to deleted 360 | List filesToDelete = alreadyBackedUpFiles.stream() 361 | .filter(fileMetadata -> !existingFiles.contains(fileMetadata.fileName())) 362 | .map(fileMetadata -> new DeletedFile(fileMetadata.fileName)) 363 | .toList(); 364 | return new BackupFolderSummary(backupName, folder.toString(), newlyBackedUpFiles, filesToDelete); 365 | } 366 | 367 | private BackupFolderSummary backupSingleFolderImpl(Path folder, String backupName, List alreadyBackedUpFiles) throws 368 | IOException, ExecutionException, InterruptedException { 369 | logger.info("Start backup '{}' from folder '{}'", backupName, folder); 370 | if (!folder.toFile().isDirectory()) { 371 | throw new RuntimeException("Must be a folder " + folder); 372 | } 373 | 374 | Map fileNamesMap = alreadyBackedUpFiles 375 | .stream() 376 | .collect(Collectors.toMap(FileMetadata::fileName, fileMetadata -> fileMetadata)); 377 | 378 | 379 | Path tempDirectory = Files.createTempDirectory(backupName); 380 | tempDirectory.toFile().deleteOnExit(); 381 | 382 | List> futures = new ArrayList<>(); 383 | List newlyBackedUpFiles = Collections.synchronizedList(new ArrayList<>()); 384 | Set existingFiles = Collections.synchronizedSet(new LinkedHashSet<>()); 385 | try (Stream walk = Files.walk(folder)) { 386 | walk.forEach(file -> { 387 | if (file.equals(folder)) { 388 | return; 389 | } 390 | if (file.toFile().isDirectory()) { 391 | return; 392 | } 393 | if (shouldIgnoreFile(file.toString())) { 394 | logger.debug("file {} is ignored", file); 395 | return; 396 | } 397 | futures.add(CompletableFuture.runAsync((() -> { 398 | try { 399 | Path fileRelativePath = folder.relativize(file); 400 | existingFiles.add(fileRelativePath.toString()); 401 | BackedUpFile backedUpFile = encryptAndUploadFile(folder, fileRelativePath, tempDirectory, backupName, fileNamesMap); 402 | newlyBackedUpFiles.add(backedUpFile); 403 | } catch (Exception e) { 404 | logger.info("upload failed", e); 405 | throw new RuntimeException(e); 406 | } 407 | }), threadPoolExecutor)); 408 | }); 409 | 410 | } 411 | CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).get(); 412 | logger.info("finished uploading new or changed files"); 413 | 414 | List deletedFiles = deleteFiles(alreadyBackedUpFiles, existingFiles); 415 | return new BackupFolderSummary(backupName, folder.toString(), newlyBackedUpFiles, deletedFiles); 416 | } 417 | 418 | 419 | public void verifyAllBackup() throws IOException, NoSuchAlgorithmException, ExecutionException, InterruptedException { 420 | logger.info("start verifying all {} backups", configFile.getFoldersBackupConfig().size()); 421 | for (FolderToBackupConfig folderToBackupConfig : configFile.getFoldersBackupConfig()) { 422 | verifyBackup(folderToBackupConfig.backupName()); 423 | } 424 | 425 | } 426 | 427 | public void verifyBackup(String backupName) throws IOException, NoSuchAlgorithmException, ExecutionException, InterruptedException { 428 | logger.info("Start verifying backup {}", backupName); 429 | Path tempDirectory = Files.createTempDirectory(backupName); 430 | logger.debug("Created tmp directory {}", tempDirectory); 431 | restoreFullBackup(backupName, tempDirectory); 432 | Util.deleteFolderRecursively(tempDirectory); 433 | logger.info("Backup {} successfully verified", backupName); 434 | } 435 | 436 | public void restoreFullBackup(String backupName, Path targetFolder) throws IOException, ExecutionException, InterruptedException, NoSuchAlgorithmException { 437 | List backedUpFiles = getBackedUpFiles(backupName); 438 | 439 | List> completableFutures = new ArrayList<>(); 440 | for (final FileMetadata fileMetadata : backedUpFiles) { 441 | completableFutures.add(CompletableFuture.runAsync(() -> { 442 | try { 443 | restoreFile(fileMetadata, targetFolder); 444 | } catch (Exception e) { 445 | logger.error("error restoring file {}", fileMetadata.fileName, e); 446 | throw new RuntimeException(e); 447 | } 448 | }, threadPoolExecutor)); 449 | } 450 | logger.info("Waiting for restoring of all files finished"); 451 | CompletableFuture.allOf(completableFutures.toArray(new CompletableFuture[0])).get(); 452 | 453 | } 454 | 455 | 456 | public void restoreHistoricalFile(String backupName, 457 | Path targetFolder, 458 | String fileToRestore, 459 | String metadataVersionId) throws IOException, NoSuchAlgorithmException { 460 | String fileNameHash = createFileNameHash(Path.of(fileToRestore)); 461 | FileMetadata fileMetadata = readFileMetadata(backupName + "/" + fileNameHash + "/", metadataVersionId); 462 | restoreFile(fileMetadata, targetFolder); 463 | } 464 | 465 | 466 | public void restoreFile(Path targetFolder, 467 | String backupName, 468 | String fileToRestore) throws 469 | IOException, ExecutionException, InterruptedException, NoSuchAlgorithmException { 470 | List alreadyBackedUpFiles = getBackedUpFiles(backupName); 471 | Optional backedUpFileOptional = alreadyBackedUpFiles.stream().filter(fileMetadata -> fileMetadata.fileName.equals(fileToRestore)).findFirst(); 472 | if (backedUpFileOptional.isEmpty()) { 473 | logger.info("file {} not found in backup {} ... nothing to restore", fileToRestore, backupName); 474 | return; 475 | } 476 | restoreFile(backedUpFileOptional.get(), targetFolder); 477 | } 478 | 479 | 480 | public void restoreFile(FileInfo fileInfo, 481 | Path targetFolder) throws IOException, NoSuchAlgorithmException { 482 | logger.debug("restoring file {}", fileInfo); 483 | /** 484 | * Download 485 | */ 486 | GetObjectRequest getObjectRequest = GetObjectRequest 487 | .builder() 488 | .bucket(bucketName) 489 | .key(contentObjectKey(fileInfo.objectKey())) 490 | .versionId(fileInfo.contentVersionId()) 491 | .build(); 492 | Path encryptedFiled = targetFolder.resolve(fileInfo.fileName() + ".encrypted"); 493 | encryptedFiled.toFile().getParentFile().mkdirs(); 494 | long time = System.currentTimeMillis(); 495 | logger.debug("Start downloading file {}. Original file size: {} bytes", fileInfo.fileName(), fileInfo.originalFileSizeInBytes()); 496 | GetObjectResponse getObjectResponse = s3Client.getObject(getObjectRequest, encryptedFiled); 497 | logger.debug("Finished downloading file {} after {}ms ", fileInfo.fileName(), System.currentTimeMillis() - time); 498 | Path decryptedFile = targetFolder.resolve(fileInfo.fileName()); 499 | decryptFile(decryptedFile, encryptedFiled.toFile()); 500 | encryptedFiled.toFile().deleteOnExit(); 501 | 502 | /** 503 | * Verify 504 | */ 505 | String sha256 = Util.sha256Base64ForFile(decryptedFile); 506 | if (sha256.equals(fileInfo.originalFileSha256Base64())) { 507 | logger.debug("Verified SHA256 successfully for restored file {}", fileInfo.fileName()); 508 | } else { 509 | logger.error("invalid SHA256: {} vs expected {}", sha256, fileInfo.originalFileSha256Base64()); 510 | throw new RuntimeException("Could not verify restored file"); 511 | } 512 | 513 | logger.info("file {} restored at {}", fileInfo.fileName(), targetFolder); 514 | } 515 | 516 | 517 | /** 518 | * all backed up files across all backups configured in the config file. 519 | * This doesn't include historical versions, only the latest ones. 520 | */ 521 | public Map> allBackedUpFiles() throws 522 | IOException, ExecutionException, InterruptedException { 523 | Map> result = new LinkedHashMap<>(); 524 | List foldersBackupConfig = configFile.getFoldersBackupConfig(); 525 | for (FolderToBackupConfig folderToBackupConfig : foldersBackupConfig) { 526 | List fileMetadata = getBackedUpFiles(folderToBackupConfig.backupName()); 527 | result.put(folderToBackupConfig.backupName(), fileMetadata); 528 | } 529 | return result; 530 | } 531 | 532 | public Map>> allBackedUpFilesIncludingHistory() throws ExecutionException, InterruptedException { 533 | Map>> result = new LinkedHashMap<>(); 534 | List foldersBackupConfig = configFile.getFoldersBackupConfig(); 535 | for (FolderToBackupConfig folderToBackupConfig : foldersBackupConfig) { 536 | Map> fileMetadata = getBackedUpFilesIncludingHistory(folderToBackupConfig.backupName()); 537 | result.put(folderToBackupConfig.backupName(), fileMetadata); 538 | } 539 | return result; 540 | 541 | } 542 | 543 | 544 | /** 545 | * The result is ordered by date, from oldest to newest. 546 | */ 547 | private Map> getBackedUpFilesIncludingHistory(String backupName) throws ExecutionException, InterruptedException { 548 | List allKeys = getAllObjectKeysInBackup(backupName, true); 549 | 550 | Map> objectKeyToVersionAndDeleteMarker = new LinkedHashMap<>(); 551 | for (CommonPrefix commonPrefix : allKeys) { 552 | ListObjectVersionsRequest req = ListObjectVersionsRequest.builder() 553 | .bucket(bucketName) 554 | .prefix(commonPrefix.prefix() + "metadata") 555 | .delimiter("/") 556 | .maxKeys(1_000) 557 | .build(); 558 | ListObjectVersionsIterable iterable = s3Client.listObjectVersionsPaginator(req); 559 | /** 560 | Unfortunately in SDK v2 versions and delete markers are parsed separately 561 | which means we need to create again one list by combining and sorting both lists. 562 | "LastModified" has a 1 seconds resolution, which means in theory we can't separate events 563 | happened in less than 1 second, which should be fine for our use case here. 564 | See https://github.com/aws/aws-sdk-cpp/issues/1649 565 | and https://github.com/aws/aws-sdk-java-v2/issues/1620 566 | */ 567 | List versionsAndDeleteMarkers = new ArrayList<>(); 568 | iterable.stream().forEach(singleResp -> { 569 | versionsAndDeleteMarkers.addAll(singleResp.versions()); 570 | versionsAndDeleteMarkers.addAll(singleResp.deleteMarkers()); 571 | }); 572 | versionsAndDeleteMarkers.sort((o1, o2) -> { 573 | Instant i1 = null; 574 | if (o1 instanceof ObjectVersion objectVersion) { 575 | i1 = objectVersion.lastModified(); 576 | } else if (o1 instanceof DeleteMarkerEntry deleteMarkerEntry) { 577 | i1 = deleteMarkerEntry.lastModified(); 578 | } 579 | Instant i2 = null; 580 | if (o2 instanceof ObjectVersion objectVersion) { 581 | i2 = objectVersion.lastModified(); 582 | } else if (o2 instanceof DeleteMarkerEntry deleteMarkerEntry) { 583 | i2 = deleteMarkerEntry.lastModified(); 584 | } 585 | return i1.compareTo(i2); 586 | }); 587 | objectKeyToVersionAndDeleteMarker.put(commonPrefix.prefix(), versionsAndDeleteMarkers); 588 | } 589 | 590 | Map> result = Collections.synchronizedMap(new LinkedHashMap<>()); 591 | List> futures = new ArrayList<>(); 592 | for (String objectKey : objectKeyToVersionAndDeleteMarker.keySet()) { 593 | futures.add(CompletableFuture.runAsync(() -> { 594 | // This is sorted 595 | String fileName = null; 596 | List fileMetadataForOneKey = new ArrayList<>(); 597 | for (Object versionOrDeleteMarker : objectKeyToVersionAndDeleteMarker.get(objectKey)) { 598 | if (versionOrDeleteMarker instanceof ObjectVersion metadataObjectVersion) { 599 | try { 600 | FileMetadata fileMetadata = readFileMetadata(objectKey, metadataObjectVersion.versionId()); 601 | fileMetadataForOneKey.add(new HistoricalBackedUpFile( 602 | fileMetadata.fileName, 603 | fileMetadata.originalFileSha256Base64, 604 | fileMetadata.originalFileSizeInBytes, 605 | fileMetadata.creationDate, 606 | metadataObjectVersion.isLatest(), 607 | objectKey, 608 | fileMetadata.contentVersionId, 609 | metadataObjectVersion.versionId() 610 | )); 611 | if (fileName == null) { 612 | fileName = fileMetadata.fileName; 613 | } 614 | } catch (Exception e) { 615 | logger.error("error reading metadata object ", e); 616 | throw new RuntimeException(e); 617 | } 618 | } else if (versionOrDeleteMarker instanceof DeleteMarkerEntry deleteMarkerEntry) { 619 | fileMetadataForOneKey.add(new HistoricalDeletedFile( 620 | fileName, 621 | deleteMarkerEntry.lastModified(), 622 | deleteMarkerEntry.isLatest() 623 | )); 624 | } 625 | } 626 | result.put(fileName, fileMetadataForOneKey); 627 | }, threadPoolExecutor)); 628 | } 629 | CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).get(); 630 | return result; 631 | } 632 | 633 | private List getBackedUpFiles(String backupName) throws 634 | ExecutionException, InterruptedException { 635 | List allPrefixes = getAllObjectKeysInBackup(backupName, false); 636 | 637 | List result = Collections.synchronizedList(new ArrayList<>()); 638 | List> futures = new ArrayList<>(); 639 | for (CommonPrefix commonPrefix : allPrefixes) { 640 | futures.add(CompletableFuture.runAsync(() -> { 641 | try { 642 | result.add(readFileMetadata(commonPrefix.prefix(), null)); 643 | } catch (Exception e) { 644 | logger.error("error reading metadata object ", e); 645 | throw new RuntimeException(e); 646 | } 647 | }, threadPoolExecutor)); 648 | } 649 | CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).get(); 650 | return result; 651 | } 652 | 653 | private FileMetadata readFileMetadata(String keyWithEndingSlash, @Nullable String versionId) throws IOException { 654 | GetObjectRequest getObjectRequest = GetObjectRequest.builder() 655 | .bucket(bucketName) 656 | .key(keyWithEndingSlash + "metadata") 657 | .versionId(versionId) 658 | .build(); 659 | logger.debug("get object for metadata: {}", getObjectRequest); 660 | ResponseInputStream responseResponseInputStream = s3Client.getObject(getObjectRequest); 661 | GetObjectResponse getObjectResponse = responseResponseInputStream.response(); 662 | Instant creationDate = getObjectResponse.lastModified(); 663 | byte[] metaDataEncrypted = IoUtils.toByteArray(responseResponseInputStream); 664 | CryptoResult decrypted = awsCrypto.decryptData(masterKey, metaDataEncrypted); 665 | 666 | String metadata = new String(decrypted.getResult(), StandardCharsets.UTF_8); 667 | // we saved it as comma separated 668 | String[] metadataEntries = metadata.split(","); 669 | 670 | int metadataVersion = Integer.parseInt(metadataEntries[0]); 671 | if (metadataVersion != METADATA_VERSION_1) { 672 | logger.error("unexpected metadata version {}", metadataVersion); 673 | throw new RuntimeException("unexpected metadata version"); 674 | } 675 | String fileName = new String(Base64.getDecoder().decode(metadataEntries[1]), StandardCharsets.UTF_8); 676 | String originalFileSha256Base64 = metadataEntries[2]; 677 | long originalFileSizeInBytes = Long.parseLong(metadataEntries[3]); 678 | String contentVersionId = metadataEntries[4]; 679 | 680 | 681 | FileMetadata fileMetadata = new FileMetadata(fileName, 682 | originalFileSha256Base64, 683 | keyWithEndingSlash, 684 | originalFileSizeInBytes, 685 | creationDate, 686 | contentVersionId 687 | ); 688 | logger.debug("read metadata: {}", fileMetadata); 689 | return fileMetadata; 690 | } 691 | 692 | private List getAllObjectKeysInBackup(String backupName, boolean includeDeleted) { 693 | if (includeDeleted) { 694 | ListObjectVersionsRequest req = ListObjectVersionsRequest.builder() 695 | .bucket(bucketName) 696 | .prefix(backupName + "/") 697 | .delimiter("/") 698 | .build(); 699 | ListObjectVersionsIterable iterable = s3Client.listObjectVersionsPaginator(req); 700 | return iterable.commonPrefixes().stream().toList(); 701 | } else { 702 | ListObjectsV2Response listObjectsV2Response; 703 | String continuationToken = null; 704 | List allPrefixes = new ArrayList<>(); 705 | do { 706 | ListObjectsV2Request listObjectsV2Request = ListObjectsV2Request.builder() 707 | .bucket(bucketName) 708 | .delimiter("/") 709 | .prefix(backupName + "/") 710 | .maxKeys(1_000) 711 | .continuationToken(continuationToken) 712 | .build(); 713 | listObjectsV2Response = s3Client.listObjectsV2(listObjectsV2Request); 714 | allPrefixes.addAll(listObjectsV2Response.commonPrefixes()); 715 | continuationToken = listObjectsV2Response.nextContinuationToken(); 716 | } while (listObjectsV2Response.isTruncated()); 717 | logger.info("Total files backed up: {}", allPrefixes.size()); 718 | return allPrefixes; 719 | } 720 | } 721 | 722 | 723 | private List deleteFiles(List backedUpFiles, Set existingFiles) { 724 | List result = new ArrayList<>(); 725 | List filesToDelete = backedUpFiles.stream().filter(fileMetadata -> !existingFiles.contains(fileMetadata.fileName)).toList(); 726 | logger.info("Found {} deleted files", filesToDelete.size()); 727 | 728 | for (int i = 0; i < filesToDelete.size(); i += 500) { 729 | List keys = new ArrayList<>(); 730 | for (int j = i; j < i + 500 && j < filesToDelete.size(); j++) { 731 | logger.debug("Deleting {} ", filesToDelete.get(j).fileName); 732 | result.add(new DeletedFile(filesToDelete.get(j).fileName)); 733 | String contentObjectKey = contentObjectKey(filesToDelete.get(j).objectKey()); 734 | String metadataObjectKey = metadataObjectKey(filesToDelete.get(j).objectKey()); 735 | keys.add(ObjectIdentifier.builder().key(contentObjectKey).build()); 736 | keys.add(ObjectIdentifier.builder().key(metadataObjectKey).build()); 737 | } 738 | DeleteObjectsRequest deleteObjectRequest = DeleteObjectsRequest.builder() 739 | .bucket(bucketName) 740 | .delete(Delete.builder().objects(keys).build()) 741 | .build(); 742 | DeleteObjectsResponse deleteObjectsResponse = s3Client.deleteObjects(deleteObjectRequest); 743 | logger.debug("deleteObjectResponse {}", deleteObjectsResponse); 744 | } 745 | logger.info("Finished deleting {} files", filesToDelete.size()); 746 | return result; 747 | } 748 | 749 | 750 | private static CountFilesResult countFilesToBackup(Path folder) throws IOException { 751 | 752 | try (Stream walk = Files.walk(folder)) { 753 | AtomicLong ignoredFiles = new AtomicLong(); 754 | AtomicLong count = new AtomicLong(); 755 | walk.forEach(path -> { 756 | if (path.toFile().isDirectory()) { 757 | return; 758 | } 759 | if (shouldIgnoreFile(path.toString())) { 760 | ignoredFiles.getAndIncrement(); 761 | return; 762 | } 763 | count.getAndIncrement(); 764 | }); 765 | return new CountFilesResult(count.get(), ignoredFiles.get()); 766 | } 767 | } 768 | 769 | private boolean doesFileNeedBackup(Path root, 770 | Path originalFileRelative, 771 | String originalFileSha256Base64, 772 | Map fileMap 773 | ) { 774 | if (fileMap.containsKey(originalFileRelative.toString())) { 775 | logger.debug("found file {} checking if it changed", originalFileRelative); 776 | String backedUpSha256Base64 = fileMap.get(originalFileRelative.toString()).originalFileSha256Base64; 777 | if (backedUpSha256Base64.equals(originalFileSha256Base64)) { 778 | logger.debug("file {} not changed. Not being backed up again.", originalFileRelative); 779 | return false; 780 | } else { 781 | logger.debug("file {} changed and will be backed up.", originalFileRelative); 782 | } 783 | } else { 784 | logger.debug("file {} is new and will be backed up", originalFileRelative); 785 | } 786 | return true; 787 | 788 | 789 | } 790 | 791 | private BackedUpFile encryptAndUploadFile(Path root, 792 | Path originalFileRelative, 793 | Path tempDirectory, 794 | String backupName, 795 | Map fileMap 796 | ) throws 797 | NoSuchAlgorithmException, IOException, ExecutionException, InterruptedException, NoSuchProviderException, InvalidKeyException { 798 | 799 | Path originalFileResolved = root.resolve(originalFileRelative); 800 | logger.debug("start processing {}", originalFileRelative); 801 | String originalFileSha256Base64 = Util.sha256Base64ForFile(originalFileResolved); 802 | if (!doesFileNeedBackup(root, originalFileRelative, originalFileSha256Base64, fileMap)) { 803 | return new UnchangedFile(originalFileRelative.toString()); 804 | } 805 | String fileNameHash = createFileNameHash(originalFileRelative); 806 | Path encryptedFile = encryptFile(tempDirectory, originalFileResolved.toFile(), fileNameHash); 807 | 808 | encryptedFile.toFile().deleteOnExit(); 809 | 810 | long originalFileSize = Files.size(originalFileResolved); 811 | String contentVersionId = createContentObject(backupName, fileNameHash, originalFileRelative, originalFileResolved, encryptedFile, originalFileSize); 812 | createMetadataObject(backupName, fileNameHash, originalFileRelative, originalFileResolved, originalFileSha256Base64, contentVersionId); 813 | 814 | logger.debug("finished file {}", originalFileRelative); 815 | return new ChangedFile(originalFileRelative.toString(), originalFileSha256Base64, originalFileSize); 816 | 817 | } 818 | 819 | private Path encryptFile(Path tempDirectory, File originalFile, String fileNameHash) throws IOException { 820 | logger.debug("encrypt file {}", originalFile); 821 | Map context = Collections.singletonMap("nameHash", fileNameHash); 822 | CryptoInputStream encryptingStream = awsCrypto 823 | .createEncryptingStream(masterKey, new FileInputStream(originalFile), context); 824 | Path encryptedFile = tempDirectory.resolve(fileNameHash); 825 | FileOutputStream out = new FileOutputStream(encryptedFile.toFile()); 826 | IoUtils.copy(encryptingStream, out); 827 | encryptingStream.close(); 828 | out.close(); 829 | return encryptedFile; 830 | } 831 | 832 | private void decryptFile(Path decryptedFile, File encryptedFile) throws IOException { 833 | 834 | CryptoInputStream encryptingStream = awsCrypto 835 | .createDecryptingStream(masterKey, new FileInputStream(encryptedFile)); 836 | 837 | FileOutputStream out = new FileOutputStream(decryptedFile.toFile()); 838 | IoUtils.copy(encryptingStream, out); 839 | encryptingStream.close(); 840 | out.close(); 841 | } 842 | 843 | private String createFileNameHash(Path relativeFileName) { 844 | 845 | /** 846 | * We are using here BouncyCastle directly to calculate AESCMAC hash instead via 847 | * JCE because this works in native images. 848 | */ 849 | CMac cMac = new CMac(new AESEngine()); 850 | cMac.init(new KeyParameter(secretKeySpec.getEncoded())); 851 | 852 | byte[] filenameBytes = relativeFileName.toString().getBytes(StandardCharsets.UTF_8); 853 | cMac.update(filenameBytes, 0, filenameBytes.length); 854 | byte[] keyBytes = new byte[cMac.getMacSize()]; 855 | cMac.doFinal(keyBytes, 0); 856 | return Base64.getUrlEncoder().encodeToString(keyBytes); 857 | } 858 | 859 | 860 | private void createMetadataObject(String backupName, 861 | String fileNameHash, 862 | Path originalFileRelative, 863 | Path originalFileResolved, 864 | String originalFileSha256Base64, 865 | String contentVersionId) throws IOException { 866 | PutObjectRequest putObjectRequest = PutObjectRequest.builder() 867 | .bucket(bucketName) 868 | .key(backupName + "/" + fileNameHash + "/metadata") 869 | .build(); 870 | long originalFileSizeBytes = Files.size(originalFileResolved); 871 | // comma separated list 872 | String metadata = METADATA_VERSION_1 + 873 | "," + Base64.getEncoder().encodeToString(originalFileRelative.toString().getBytes(StandardCharsets.UTF_8)) + 874 | "," + originalFileSha256Base64 + 875 | "," + originalFileSizeBytes + 876 | "," + contentVersionId; 877 | 878 | 879 | CryptoResult encryptResult = awsCrypto.encryptData(masterKey, metadata.getBytes(StandardCharsets.UTF_8)); 880 | byte[] encryptedFileName = encryptResult.getResult(); 881 | RequestBody requestBody = RequestBody.fromBytes(encryptedFileName); 882 | PutObjectResponse putObjectResponse = s3Client.putObject(putObjectRequest, requestBody); 883 | logger.debug("uploaded metadata object for {}: {}", originalFileRelative, putObjectResponse); 884 | } 885 | 886 | private String createContentObject(String backupName, 887 | String fileNameHash, 888 | Path originalFileRelative, 889 | Path originalFileResolved, 890 | Path encryptedFile, 891 | long originalFileSizeByte) throws IOException, ExecutionException, InterruptedException, NoSuchAlgorithmException { 892 | String objectKey = backupName + "/" + fileNameHash + "/content"; 893 | if (originalFileSizeByte >= configFile.getMultiPartUploadLimitInBytes()) { 894 | logger.debug("file {} is bigger than {} with {} ... using multipart upload", 895 | originalFileRelative, 896 | bytesToHumanReadableFormat(configFile.getMultiPartUploadLimitInBytes()), 897 | bytesToHumanReadableFormat(originalFileSizeByte)); 898 | return multipartUpload(objectKey, originalFileRelative, encryptedFile); 899 | } else { 900 | return putObject(originalFileRelative, encryptedFile, objectKey); 901 | } 902 | 903 | } 904 | 905 | private String putObject(Path originalFileRelative, Path encryptedFile, String objectKey) throws 906 | IOException, NoSuchAlgorithmException { 907 | String sha256 = Util.sha256Base64ForFile(encryptedFile); 908 | logger.info("uploading file {}", originalFileRelative); 909 | PutObjectRequest putObjectRequest = PutObjectRequest.builder() 910 | .bucket(bucketName) 911 | .key(objectKey) 912 | .checksumAlgorithm(ChecksumAlgorithm.SHA256) 913 | .checksumSHA256(sha256) 914 | .build(); 915 | PutObjectResponse putObjectResponse = s3Client.putObject(putObjectRequest, encryptedFile); 916 | logger.debug("uploaded content object for {} response {} ", originalFileRelative, putObjectResponse); 917 | return putObjectResponse.versionId(); 918 | } 919 | 920 | private String multipartUpload(String objectKey, 921 | Path originalFileRelative, 922 | Path encryptedFile 923 | ) throws 924 | IOException, ExecutionException, InterruptedException, NoSuchAlgorithmException { 925 | long encryptedFileSize = Files.size(encryptedFile); 926 | int multiPartUploadLimitInBytes = configFile.getMultiPartUploadLimitInBytes(); 927 | int partCount = (int) Math.ceil((double) encryptedFileSize / multiPartUploadLimitInBytes); 928 | logger.info("start multipart upload for {}. Expected to upload {} parts", originalFileRelative, partCount); 929 | CreateMultipartUploadRequest createMultipartUploadRequest = CreateMultipartUploadRequest.builder() 930 | .bucket(bucketName) 931 | .key(objectKey) 932 | .checksumAlgorithm(ChecksumAlgorithm.SHA256) 933 | .build(); 934 | 935 | CreateMultipartUploadResponse response = s3Client.createMultipartUpload(createMultipartUploadRequest); 936 | String uploadId = response.uploadId(); 937 | 938 | ByteBuffer buffer = ByteBuffer.allocate(multiPartUploadLimitInBytes); 939 | int partNumber = 0; 940 | List completedParts = Collections.synchronizedList(new ArrayList<>()); 941 | List> completableFutures = new ArrayList<>(); 942 | try (FileInputStream fileInputStream = new FileInputStream(encryptedFile.toFile()); 943 | FileChannel fileChannel = fileInputStream.getChannel()) { 944 | 945 | int read; 946 | while ((read = fileChannel.read(buffer)) > 0) { 947 | logger.debug("body read {} bytes for encrypted file of {}", read, originalFileRelative); 948 | buffer.flip(); 949 | UploadPartRequest uploadPartRequest = UploadPartRequest.builder() 950 | .bucket(bucketName) 951 | .key(objectKey) 952 | .uploadId(uploadId) 953 | .partNumber(++partNumber) 954 | .checksumAlgorithm(ChecksumAlgorithm.SHA256) 955 | .checksumSHA256(Util.sha256Base64(buffer)) 956 | .build(); 957 | 958 | RequestBody requestBody = RequestBody.fromByteBuffer(buffer); 959 | buffer.clear(); 960 | logger.debug("loading part {}/{} for file {} into memory finished", partNumber, partCount, originalFileRelative); 961 | int finalPartNumber = partNumber; 962 | completableFutures.add(CompletableFuture.runAsync(() -> { 963 | try { 964 | logger.debug("start uploading part {}/{} for file {}", finalPartNumber, partCount, originalFileRelative); 965 | UploadPartResponse uploadPartResponse = s3Client.uploadPart(uploadPartRequest, 966 | requestBody); 967 | completedParts.add(CompletedPart.builder().partNumber(finalPartNumber).checksumSHA256(uploadPartResponse.checksumSHA256()).eTag(uploadPartResponse.eTag()).build()); 968 | logger.debug("uploaded part {}/{} for file {}", finalPartNumber, partCount, originalFileRelative); 969 | } catch (Exception e) { 970 | logger.error("error uploading part ", e); 971 | } 972 | }, multipartUploadExecutor)); 973 | } 974 | } catch (IOException e) { 975 | logger.info("error", e); 976 | throw e; 977 | } 978 | logger.debug("waiting for uploading parts finished for {}", originalFileRelative); 979 | CompletableFuture.allOf(completableFutures.toArray(new CompletableFuture[0])).get(); 980 | logger.debug("uploading parts finished for {}", originalFileRelative); 981 | 982 | completedParts.sort(Comparator.comparingInt(CompletedPart::partNumber)); 983 | CompletedMultipartUpload completedMultipartUpload = CompletedMultipartUpload.builder() 984 | .parts(completedParts) 985 | .build(); 986 | 987 | CompleteMultipartUploadRequest completeMultipartUploadRequest = 988 | CompleteMultipartUploadRequest.builder() 989 | .bucket(bucketName) 990 | .key(objectKey) 991 | .uploadId(uploadId) 992 | .multipartUpload(completedMultipartUpload) 993 | .build(); 994 | CompleteMultipartUploadResponse completeMultipartUploadResponse = s3Client.completeMultipartUpload(completeMultipartUploadRequest); 995 | 996 | logger.info("completed multi part {} of total {}", completeMultipartUploadResponse, bytesToHumanReadableFormat(encryptedFileSize)); 997 | return completeMultipartUploadResponse.versionId(); 998 | 999 | } 1000 | 1001 | 1002 | static boolean shouldIgnoreFile(String file) { 1003 | return filePatternsToIgnore.stream().anyMatch(pattern -> pattern.matcher(file).matches()); 1004 | } 1005 | 1006 | private static String contentObjectKey(String objectKeyEndingWithSlash) { 1007 | return objectKeyEndingWithSlash + "content"; 1008 | } 1009 | 1010 | private static String metadataObjectKey(String objectKeyEndingWithSlash) { 1011 | return objectKeyEndingWithSlash + "metadata"; 1012 | } 1013 | 1014 | 1015 | } 1016 | -------------------------------------------------------------------------------- /src/main/java/spb/ListFiles.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import picocli.CommandLine; 6 | import spb.Impl.FileMetadata; 7 | 8 | import java.util.List; 9 | import java.util.Map; 10 | import java.util.concurrent.Callable; 11 | import java.util.regex.Pattern; 12 | 13 | import static spb.Util.DIVIDER; 14 | import static spb.Util.bytesToHumanReadableFormat; 15 | 16 | @CommandLine.Command(name = "list", mixinStandardHelpOptions = true, description = "list all backed up files") 17 | public class ListFiles implements Callable { 18 | 19 | private final Logger logger = LoggerFactory.getLogger("SPB"); 20 | 21 | @CommandLine.Option(names = {"--file-pattern"}, description = "A pattern to describe the file", 22 | paramLabel = "file-pattern", required = false) 23 | private String filePattern; 24 | 25 | @CommandLine.Option(names = {"--backup"}, description = "Restrict the search to this backup", 26 | paramLabel = "backup", required = false) 27 | private String backupName; 28 | 29 | @CommandLine.Option(names = {"--historical"}, description = "Includes all past versions in the search", 30 | paramLabel = "historical", required = false) 31 | private boolean historical; 32 | 33 | @Override 34 | public Integer call() throws Exception { 35 | Pattern pattern = null; 36 | if (filePattern == null) { 37 | logger.info("no pattern specified ... listing all files"); 38 | } else { 39 | logger.info("using the pattern '{}' to list backed up files", filePattern); 40 | pattern = Pattern.compile(filePattern); 41 | } 42 | Impl impl = new Impl(); 43 | 44 | if (historical) { 45 | logger.info("listing files including history"); 46 | Map>> allBackedUpFiles = impl.allBackedUpFilesIncludingHistory(); 47 | logger.info("Found {} backups", allBackedUpFiles.size()); 48 | logger.info(DIVIDER); 49 | int matchedCount = 0; 50 | for (String backupName : allBackedUpFiles.keySet()) { 51 | Map> filesForOneBackup = allBackedUpFiles.get(backupName); 52 | logger.info("Listing matching files for backup '{}' with {} total files", backupName, filesForOneBackup.size()); 53 | for (String file : filesForOneBackup.keySet()) { 54 | List allFileVersions = filesForOneBackup.get(file); 55 | if (pattern != null) { 56 | if (pattern.matcher(file).matches()) { 57 | printAllVersions(file, allFileVersions); 58 | matchedCount++; 59 | } 60 | } else { 61 | printAllVersions(file, allFileVersions); 62 | matchedCount++; 63 | } 64 | } 65 | } 66 | logger.info(DIVIDER); 67 | if (filePattern != null) { 68 | logger.info("found {} matching files in {} for pattern '{}'", matchedCount, backupName, filePattern); 69 | } else { 70 | logger.info("found {} files in {}", matchedCount, backupName); 71 | } 72 | logger.info(DIVIDER); 73 | } else { 74 | Map> allBackedUpFiles = impl.allBackedUpFiles(); 75 | logger.info("Found {} backups", allBackedUpFiles.size()); 76 | for (String backupName : allBackedUpFiles.keySet()) { 77 | logger.info(DIVIDER); 78 | List fileMetadataList = allBackedUpFiles.get(backupName); 79 | logger.info("Listing matching files for backup '{}' with {} total files", backupName, fileMetadataList.size()); 80 | int matchedCount = 0; 81 | for (FileMetadata fileMetadata : fileMetadataList) { 82 | if (pattern != null) { 83 | if (pattern.matcher(fileMetadata.fileName()).matches()) { 84 | logger.info("file: " + fileMetadata.fileName()); 85 | logger.info("size: " + bytesToHumanReadableFormat(fileMetadata.originalFileSizeInBytes())); 86 | logger.info("creation date: " + fileMetadata.creationDate()); 87 | matchedCount++; 88 | } 89 | } else { 90 | logger.info("file: " + fileMetadata.fileName()); 91 | logger.info("size: " + fileMetadata.originalFileSizeInBytes() + " bytes"); 92 | logger.info("creation date: " + fileMetadata.creationDate()); 93 | matchedCount++; 94 | } 95 | } 96 | logger.info(DIVIDER); 97 | if (filePattern != null) { 98 | logger.info("found {} matching files in {} for pattern '{}'", matchedCount, backupName, filePattern); 99 | } else { 100 | logger.info("found {} files in {}", matchedCount, backupName); 101 | } 102 | logger.info(DIVIDER); 103 | } 104 | } 105 | impl.shutdown(); 106 | return 0; 107 | } 108 | 109 | private void printAllVersions(String file, List allFileVersions) { 110 | logger.info("All {} versions of file: {}", allFileVersions.size(), file); 111 | for (Impl.HistoricalFile historicFile : allFileVersions) { 112 | if (historicFile instanceof Impl.HistoricalFile.HistoricalBackedUpFile historicBackedUpFile) { 113 | logger.info("backup date:{}, ", historicFile.creationDate()); 114 | logger.info("size: {}", bytesToHumanReadableFormat(historicBackedUpFile.originalFileSizeInBytes())); 115 | logger.info("version-id: {}", historicBackedUpFile.metadataVersionId()); 116 | } else if (historicFile instanceof Impl.HistoricalFile.HistoricalDeletedFile historicDeletedFile) { 117 | logger.info("file was deleted on {}", historicDeletedFile.creationDate()); 118 | } 119 | } 120 | logger.info(Util.DIVIDER); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/main/java/spb/LogbackConfigurator.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import ch.qos.logback.classic.Level; 4 | import ch.qos.logback.classic.Logger; 5 | import ch.qos.logback.classic.LoggerContext; 6 | import ch.qos.logback.classic.layout.TTLLLayout; 7 | import ch.qos.logback.classic.spi.Configurator; 8 | import ch.qos.logback.classic.spi.ILoggingEvent; 9 | import ch.qos.logback.core.ConsoleAppender; 10 | import ch.qos.logback.core.encoder.LayoutWrappingEncoder; 11 | import ch.qos.logback.core.spi.ContextAwareBase; 12 | 13 | public class LogbackConfigurator extends ContextAwareBase implements Configurator { 14 | 15 | public LogbackConfigurator() { 16 | } 17 | 18 | public ExecutionStatus configure(LoggerContext lc) { 19 | /** 20 | * This code will be executed at graalvm build time because 21 | * the Loggers are created at build time 22 | */ 23 | addInfo("Setting up default configuration."); 24 | 25 | // same as 26 | // PatternLayout layout = new PatternLayout(); 27 | // layout.setPattern("%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - 28 | // %msg%n"); 29 | TTLLLayout layout = new TTLLLayout(); 30 | layout.setContext(lc); 31 | layout.start(); 32 | 33 | LayoutWrappingEncoder encoder = new LayoutWrappingEncoder(); 34 | encoder.setContext(lc); 35 | encoder.setLayout(layout); 36 | 37 | ConsoleAppender consoleAppender = new ConsoleAppender(); 38 | consoleAppender.setContext(lc); 39 | consoleAppender.setName("console"); 40 | consoleAppender.setEncoder(encoder); 41 | consoleAppender.start(); 42 | 43 | Logger rootLogger = lc.getLogger(Logger.ROOT_LOGGER_NAME); 44 | rootLogger.setLevel(Level.INFO); 45 | rootLogger.addAppender(consoleAppender); 46 | 47 | // let the caller decide 48 | return ExecutionStatus.DO_NOT_INVOKE_NEXT_IF_ANY; 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/spb/Main.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import ch.qos.logback.classic.Level; 4 | import ch.qos.logback.classic.Logger; 5 | import ch.qos.logback.classic.LoggerContext; 6 | import ch.qos.logback.classic.layout.TTLLLayout; 7 | import ch.qos.logback.classic.spi.ILoggingEvent; 8 | import ch.qos.logback.core.ConsoleAppender; 9 | import ch.qos.logback.core.FileAppender; 10 | import ch.qos.logback.core.encoder.LayoutWrappingEncoder; 11 | import ch.qos.logback.core.filter.Filter; 12 | import ch.qos.logback.core.spi.FilterReply; 13 | import org.slf4j.LoggerFactory; 14 | import picocli.CommandLine; 15 | 16 | public class Main { 17 | static org.slf4j.Logger logger = LoggerFactory.getLogger(Main.class); 18 | 19 | public static void main(String[] args) { 20 | configureLogger(); 21 | int exitCode = new CommandLine(new Spb()).execute(args); 22 | System.exit(exitCode); 23 | } 24 | 25 | private static void configureLogger() { 26 | LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory(); 27 | TTLLLayout layout = new TTLLLayout(); 28 | layout.setContext(lc); 29 | layout.start(); 30 | 31 | LayoutWrappingEncoder encoder = new LayoutWrappingEncoder(); 32 | encoder.setContext(lc); 33 | encoder.setLayout(layout); 34 | 35 | FileAppender fileAppender = new FileAppender<>(); 36 | fileAppender.setName("file"); 37 | fileAppender.setFile(System.getProperty("user.home") + "/spb.log"); 38 | fileAppender.setContext(lc); 39 | fileAppender.setEncoder(encoder); 40 | fileAppender.start(); 41 | 42 | Logger rootLogger = lc.getLogger(Logger.ROOT_LOGGER_NAME); 43 | rootLogger.setLevel(Level.INFO); 44 | rootLogger.addAppender(fileAppender); 45 | Logger spb = lc.getLogger("spb"); 46 | spb.setLevel(Level.DEBUG); 47 | 48 | ConsoleAppender console = (ConsoleAppender) rootLogger.getAppender("console"); 49 | Filter filter = new Filter<>() { 50 | @Override 51 | public FilterReply decide(ILoggingEvent event) { 52 | if (!isStarted()) { 53 | return FilterReply.NEUTRAL; 54 | } 55 | if (event.getLevel().isGreaterOrEqual(Level.INFO)) { 56 | return FilterReply.ACCEPT; 57 | } 58 | return FilterReply.DENY; 59 | } 60 | }; 61 | filter.start(); 62 | console.addFilter(filter); 63 | 64 | } 65 | } 66 | 67 | -------------------------------------------------------------------------------- /src/main/java/spb/PropertiesConfigProvider.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import java.io.File; 7 | import java.io.FileInputStream; 8 | import java.io.FileNotFoundException; 9 | import java.io.IOException; 10 | import java.util.ArrayList; 11 | import java.util.Base64; 12 | import java.util.LinkedHashSet; 13 | import java.util.List; 14 | import java.util.Properties; 15 | import java.util.Set; 16 | 17 | public class PropertiesConfigProvider implements ConfigProvider { 18 | 19 | static int HUNDRED_MB = 100 * 1024 * 1024; 20 | private static final String SPB_CONFIG_FILE = "spb.config"; 21 | private static final String CONFIG_BUCKET_NAME = "bucket.name"; 22 | private static final String CONFIG_SECRET_KEY = "secret.key"; 23 | private final Logger logger = LoggerFactory.getLogger("spb"); 24 | 25 | private byte[] rawSecretKeyBytes; 26 | private String bucketName; 27 | private List foldersBackupConfig; 28 | 29 | public PropertiesConfigProvider() throws IOException { 30 | readConfigFile(); 31 | } 32 | 33 | private void readConfigFile() throws IOException { 34 | Properties properties = new Properties(); 35 | FileInputStream fileInputStream; 36 | File configFile = new File(System.getProperty("user.home"), SPB_CONFIG_FILE); 37 | try { 38 | fileInputStream = new FileInputStream(configFile); 39 | } catch (FileNotFoundException e) { 40 | logger.error("~/{} not found or can't be read", SPB_CONFIG_FILE, e); 41 | throw new RuntimeException("Invalid config"); 42 | } 43 | logger.info("Start reading config file {}", configFile); 44 | properties.load(fileInputStream); 45 | bucketName = properties.getProperty(CONFIG_BUCKET_NAME); 46 | if (bucketName == null || bucketName.length() == 0) { 47 | logger.error("Invalid config: {} expected to specify the S3 bucket name", CONFIG_BUCKET_NAME); 48 | throw new RuntimeException("Invalid config"); 49 | } 50 | logger.info("Using S3 bucket '{}'", bucketName); 51 | String secretKey = properties.getProperty(CONFIG_SECRET_KEY); 52 | if (secretKey == null || secretKey.length() == 0) { 53 | logger.error("Invalid config: {} expected to specify the secret key", CONFIG_SECRET_KEY); 54 | throw new RuntimeException("Invalid config"); 55 | } 56 | try { 57 | rawSecretKeyBytes = Base64.getDecoder().decode(secretKey); 58 | } catch (IllegalArgumentException e) { 59 | logger.error("Invalid config: {} expected to be Base64 encoded", CONFIG_SECRET_KEY); 60 | throw new RuntimeException("Invalid config"); 61 | } 62 | if (rawSecretKeyBytes.length != 32) { 63 | logger.error("expected Base64 encoded 256 bits/32 bytes long secret key, but found {} bits/{} bytes", rawSecretKeyBytes.length * 8, rawSecretKeyBytes.length); 64 | throw new RuntimeException("invalid config"); 65 | } 66 | foldersBackupConfig = readFilesToBackupConfig(properties); 67 | logger.info("{} backup folders", foldersBackupConfig.size()); 68 | } 69 | 70 | 71 | private List readFilesToBackupConfig(Properties properties) { 72 | List result = new ArrayList<>(); 73 | Set keys = properties.keySet(); 74 | Set backupPropertyPrefixes = new LinkedHashSet<>(); 75 | for (Object o : keys) { 76 | String key = (String) o; 77 | if (key.matches("backup\\.\\d+\\..+")) { 78 | backupPropertyPrefixes.add(key.substring(0, key.indexOf(".", key.indexOf(".") + 1))); 79 | } 80 | } 81 | 82 | for (String backupPropertyPrefix : backupPropertyPrefixes) { 83 | String backupFolder = properties.getProperty(backupPropertyPrefix + ".folder"); 84 | if (backupFolder == null || backupFolder.length() == 0) { 85 | logger.error("invalid config for {}. Expected {}.folder", backupPropertyPrefix, backupFolder); 86 | continue; 87 | } 88 | String backupName = properties.getProperty(backupPropertyPrefix + ".name"); 89 | if (backupName == null || backupName.length() == 0) { 90 | logger.error("invalid config for {}. Expected {}.name", backupPropertyPrefix, backupFolder); 91 | continue; 92 | } 93 | result.add(new FolderToBackupConfig(backupFolder, backupName)); 94 | } 95 | return result; 96 | } 97 | 98 | public byte[] getRawSecretKeyBytes() { 99 | return rawSecretKeyBytes; 100 | } 101 | 102 | public String getBucketName() { 103 | return bucketName; 104 | } 105 | 106 | public List getFoldersBackupConfig() { 107 | return foldersBackupConfig; 108 | } 109 | 110 | @Override 111 | public int getMultiPartUploadLimitInBytes() { 112 | return HUNDRED_MB; 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/main/java/spb/Restore.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine; 4 | import picocli.CommandLine.Command; 5 | import picocli.CommandLine.Option; 6 | 7 | import java.io.File; 8 | import java.util.concurrent.Callable; 9 | 10 | @Command(name = "restore", mixinStandardHelpOptions = true, description = "restore previously backed up files") 11 | public class Restore implements Callable { 12 | 13 | @Option(names = {"--target-folder"}, description = "The folder to restore files into", 14 | paramLabel = "target-folder", required = true) 15 | private File targetFolder; 16 | 17 | @Option(names = {"--backup-name"}, description = "The name of the backup", paramLabel = "backupName", required = true) 18 | private String backupName; 19 | 20 | @Option(names = {"--file-name"}, description = "A specific file to restore", paramLabel = "file", required = false) 21 | private String file; 22 | 23 | @CommandLine.Option(names = {"--version-id"}, description = "The version of a specific file to restore. " + 24 | "Only allowed in combination with --file-name", 25 | paramLabel = "versionId", required = false) 26 | private String versionId; 27 | 28 | 29 | @Override 30 | public Integer call() throws Exception { 31 | Impl impl = new Impl(); 32 | if (file == null) { 33 | impl.restoreFullBackup(backupName, targetFolder.toPath()); 34 | } else { 35 | if (versionId == null) { 36 | impl.restoreFile(targetFolder.toPath(), backupName, file); 37 | } else { 38 | impl.restoreHistoricalFile(backupName, targetFolder.toPath(), file, versionId); 39 | } 40 | } 41 | impl.shutdown(); 42 | return 0; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main/java/spb/Spb.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine; 4 | 5 | import java.util.concurrent.Callable; 6 | 7 | @CommandLine.Command(name = "spb", 8 | mixinStandardHelpOptions = true, 9 | subcommands = {Backup.class, Restore.class, ListFiles.class, Verify.class, GenerateKey.class}, 10 | versionProvider = SpbVersionProvider.class, 11 | description = "Simple and secure personal backup") 12 | public class Spb implements Callable { 13 | 14 | @CommandLine.Spec 15 | CommandLine.Model.CommandSpec spec; 16 | 17 | @Override 18 | public Integer call() throws Exception { 19 | spec.commandLine().usage(System.err); 20 | return 0; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/spb/SpbVersionProvider.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine; 4 | 5 | public class SpbVersionProvider implements CommandLine.IVersionProvider { 6 | @Override 7 | public String[] getVersion() throws Exception { 8 | Package aPackage = Main.class.getPackage(); 9 | String implementationVersion = aPackage.getImplementationVersion(); 10 | return new String[]{implementationVersion}; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/spb/Util.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import java.io.BufferedInputStream; 4 | import java.io.File; 5 | import java.io.FileInputStream; 6 | import java.io.IOException; 7 | import java.nio.ByteBuffer; 8 | import java.nio.file.Files; 9 | import java.nio.file.Path; 10 | import java.security.MessageDigest; 11 | import java.security.NoSuchAlgorithmException; 12 | import java.util.Base64; 13 | import java.util.Comparator; 14 | 15 | public class Util { 16 | 17 | private static final int TEN_MB = 10 * 1024 * 1024; 18 | public static String DIVIDER = "***************"; 19 | 20 | public static String sha256Base64ForFile(Path path) throws IOException, NoSuchAlgorithmException { 21 | FileInputStream is = new FileInputStream(path.toFile()); 22 | 23 | BufferedInputStream bis = new BufferedInputStream(is); 24 | MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); 25 | byte[] buffer = new byte[TEN_MB]; 26 | int bytesRead; 27 | while ((bytesRead = bis.read(buffer, 0, buffer.length)) != -1) { 28 | messageDigest.update(buffer, 0, bytesRead); 29 | } 30 | return Base64.getEncoder().encodeToString(messageDigest.digest()); 31 | } 32 | 33 | public static String sha256Base64(ByteBuffer input) throws NoSuchAlgorithmException { 34 | MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); 35 | messageDigest.update(input); 36 | input.rewind(); 37 | return Base64.getEncoder().encodeToString(messageDigest.digest()); 38 | 39 | } 40 | 41 | public static void deleteFolderRecursively(Path path) throws IOException { 42 | Files.walk(path) 43 | .sorted(Comparator.reverseOrder()) 44 | .map(Path::toFile) 45 | .forEach(File::delete); 46 | } 47 | 48 | public static String bytesToHumanReadableFormat(long bytes) { 49 | if (bytes < 0) { 50 | throw new RuntimeException("should not happen: negative file size"); 51 | } 52 | long kilobyte = 1024; 53 | long megabyte = kilobyte * 1024; 54 | long gigabyte = megabyte * 1024; 55 | long terabyte = gigabyte * 1024; 56 | 57 | if (bytes < kilobyte) { 58 | return bytes + "b"; 59 | } else if (bytes < megabyte) { 60 | return (bytes / kilobyte) + "kb"; 61 | } else if (bytes < gigabyte) { 62 | return (bytes / megabyte) + "mb"; 63 | } else if (bytes < terabyte) { 64 | return (bytes / gigabyte) + "gb"; 65 | } else { 66 | return (bytes / terabyte) + "tb"; 67 | } 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/spb/Verify.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import picocli.CommandLine.Command; 4 | 5 | import java.util.concurrent.Callable; 6 | 7 | @Command(name = "verify", mixinStandardHelpOptions = true, description = "verify backups") 8 | public class Verify implements Callable { 9 | 10 | @Override 11 | public Integer call() throws Exception { 12 | Impl impl = new Impl(); 13 | impl.verifyAllBackup(); 14 | impl.shutdown(); 15 | return 0; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator: -------------------------------------------------------------------------------- 1 | spb.LogbackConfigurator -------------------------------------------------------------------------------- /src/test/java/RandomUUID.java: -------------------------------------------------------------------------------- 1 | import java.util.UUID; 2 | 3 | public class RandomUUID { 4 | 5 | public static void main(String[] args) { 6 | System.out.println(UUID.randomUUID().toString()); 7 | 8 | System.out.println("test: " + (int) Math.ceil((float) 120 / 100)); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/test/java/spb/E2ETest.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import org.junit.jupiter.api.AfterAll; 4 | import org.junit.jupiter.api.BeforeAll; 5 | import org.junit.jupiter.api.Test; 6 | import spb.BackupFolderSummary.BackedUpFile; 7 | import spb.BackupFolderSummary.BackedUpFile.ChangedFile; 8 | import spb.BackupFolderSummary.BackedUpFile.UnchangedFile; 9 | import spb.Impl.HistoricalFile; 10 | 11 | import java.io.IOException; 12 | import java.io.OutputStream; 13 | import java.nio.ByteBuffer; 14 | import java.nio.file.Files; 15 | import java.nio.file.Path; 16 | import java.nio.file.StandardOpenOption; 17 | import java.security.InvalidKeyException; 18 | import java.security.NoSuchAlgorithmException; 19 | import java.security.NoSuchProviderException; 20 | import java.security.SecureRandom; 21 | import java.util.ArrayList; 22 | import java.util.Base64; 23 | import java.util.LinkedHashMap; 24 | import java.util.List; 25 | import java.util.Map; 26 | import java.util.Random; 27 | import java.util.concurrent.ExecutionException; 28 | 29 | import static java.nio.file.Files.createDirectory; 30 | import static java.nio.file.Files.createFile; 31 | import static org.assertj.core.api.Assertions.assertThat; 32 | 33 | public class E2ETest { 34 | 35 | static Path rootTestDataFolder; 36 | 37 | static final int HUNDRED_BYTE = 100; 38 | static final int ONE_MB = 1024 * 1024; 39 | static final int TEN_MB = 10 * 1024 * 1024; 40 | 41 | static ByteBuffer randomData1Mb = ByteBuffer.allocate(ONE_MB); 42 | static ByteBuffer randomData10Mb = ByteBuffer.allocate(TEN_MB); 43 | static ByteBuffer randomData100b = ByteBuffer.allocate(HUNDRED_BYTE); 44 | 45 | static byte[] secretKey; 46 | private static Path backupsOne; 47 | private static List backupsOneFiles = new ArrayList<>(); 48 | private static Map fileToSha256Base64 = new LinkedHashMap<>(); 49 | 50 | private static String bucketName; 51 | private static String file6Key; 52 | private static String file1Key; 53 | private static String largeFileKey; 54 | private static String file2Key; 55 | 56 | @BeforeAll 57 | static void init() throws IOException, NoSuchAlgorithmException { 58 | readBucketName(); 59 | rootTestDataFolder = Files.createTempDirectory("spb-e2e"); 60 | createSecretKey(); 61 | createRandomData(); 62 | createSimpleBackupsFolder(); 63 | 64 | } 65 | 66 | private static void readBucketName() { 67 | bucketName = System.getenv("BUCKET_NAME"); 68 | if (bucketName == null || bucketName.length() == 0) { 69 | System.err.println("tests requires a BUCKET_NAME env value"); 70 | throw new RuntimeException("tests requires a BUCKET_NAME env value"); 71 | } 72 | System.out.println("executing test again bucket: " + bucketName); 73 | } 74 | 75 | private static void createSecretKey() { 76 | secretKey = new byte[32]; 77 | new SecureRandom().nextBytes(secretKey); 78 | } 79 | 80 | private static void createRandomData() { 81 | Random random = new Random(); 82 | random.nextBytes(randomData1Mb.array()); 83 | random.nextBytes(randomData100b.array()); 84 | random.nextBytes(randomData10Mb.array()); 85 | } 86 | 87 | private static void createSimpleBackupsFolder() throws IOException, NoSuchAlgorithmException { 88 | /** 89 | * A couple of files in different sizes in different folders. 90 | */ 91 | backupsOne = rootTestDataFolder.resolve("backups-1"); 92 | createDirectory(backupsOne); 93 | Path file1 = createFile(backupsOne.resolve("file-1.txt")); 94 | backupsOneFiles.add(file1); 95 | writeFile100Bytes(file1, 1); 96 | file1Key = backupsOne.relativize(file1).toString(); 97 | fileToSha256Base64.put(file1Key, Util.sha256Base64ForFile(file1)); 98 | 99 | Path file2 = createFile(backupsOne.resolve("file-2.txt")); 100 | backupsOneFiles.add(file2); 101 | writeFileTenMB(file2, 1); 102 | file2Key = backupsOne.relativize(file2).toString(); 103 | fileToSha256Base64.put(file2Key, Util.sha256Base64ForFile(file2)); 104 | 105 | Path largeFile = createFile(backupsOne.resolve("file-large.txt")); 106 | backupsOneFiles.add(largeFile); 107 | writeFileTenMB(largeFile, 3); 108 | largeFileKey = backupsOne.relativize(largeFile).toString(); 109 | fileToSha256Base64.put(largeFileKey, Util.sha256Base64ForFile(largeFile)); 110 | 111 | Path folder1 = createDirectory(backupsOne.resolve("folder-1")); 112 | 113 | Path file4 = createFile(folder1.resolve("file-4.txt")); 114 | backupsOneFiles.add(file4); 115 | writeFileTenMB(file4, 2); 116 | fileToSha256Base64.put(backupsOne.relativize(file4).toString(), Util.sha256Base64ForFile(file4)); 117 | 118 | Path subFolder1 = createDirectory(folder1.resolve("sub-folder-1")); 119 | 120 | Path file5 = createFile(subFolder1.resolve("file-5.txt")); 121 | writeFileMB(file5, 3); 122 | backupsOneFiles.add(file5); 123 | fileToSha256Base64.put(backupsOne.relativize(file5).toString(), Util.sha256Base64ForFile(file5)); 124 | 125 | Path folder2 = createDirectory(backupsOne.resolve("folder-2")); 126 | 127 | Path file6 = createFile(folder2.resolve("file-6.txt")); 128 | backupsOneFiles.add(file6); 129 | writeFile100Bytes(file6, 3); 130 | file6Key = backupsOne.relativize(file6).toString(); 131 | fileToSha256Base64.put(file6Key, Util.sha256Base64ForFile(file6)); 132 | 133 | } 134 | 135 | private static void writeFile100Bytes(Path path, int hundredBytesCount) throws IOException { 136 | try (OutputStream outputStream = Files.newOutputStream(path)) { 137 | for (int i = 0; i < hundredBytesCount; i++) { 138 | outputStream.write(randomData100b.array(), 0, randomData100b.array().length); 139 | } 140 | } 141 | 142 | } 143 | 144 | private static void writeFileMB(Path path, int mbCount) throws IOException { 145 | try (OutputStream outputStream = Files.newOutputStream(path)) { 146 | for (int i = 0; i < mbCount; i++) { 147 | outputStream.write(randomData1Mb.array(), 0, randomData1Mb.array().length); 148 | } 149 | } 150 | 151 | } 152 | 153 | private static void writeFileTenMB(Path path, int tenMbCount) throws IOException { 154 | try (OutputStream outputStream = Files.newOutputStream(path)) { 155 | for (int i = 0; i < tenMbCount; i++) { 156 | outputStream.write(randomData10Mb.array(), 0, randomData10Mb.array().length); 157 | } 158 | } 159 | 160 | } 161 | 162 | @AfterAll 163 | static void deleteTestData() throws IOException { 164 | Util.deleteFolderRecursively(rootTestDataFolder); 165 | } 166 | 167 | 168 | @Test 169 | void backupVerifyAndChangeFolder() throws NoSuchAlgorithmException, NoSuchProviderException, InvalidKeyException, IOException, ExecutionException, InterruptedException { 170 | String backupName = createRandomBackupName(); 171 | 172 | FolderToBackupConfig folderToBackupConfig = new FolderToBackupConfig(backupsOne.toString(), backupName); 173 | 174 | TestConfigProvider testConfigProvider = new TestConfigProvider( 175 | secretKey, 176 | bucketName, 177 | List.of(folderToBackupConfig), 178 | TEN_MB); 179 | Impl impl = new Impl(testConfigProvider); 180 | List backupFolderSummaries = impl.backupFolders(false); 181 | assertThat(backupFolderSummaries).hasSize(1); 182 | BackupFolderSummary backupFolderSummary = backupFolderSummaries.get(0); 183 | assertThat(backupFolderSummary.backedUpFiles()).hasSize(6); 184 | assertThat(backupFolderSummary.deletedFiles()).isEmpty(); 185 | 186 | // all files are changed files 187 | for (BackedUpFile backedUpFile : backupFolderSummary.backedUpFiles()) { 188 | assertThat(backedUpFile).isInstanceOf(ChangedFile.class); 189 | ChangedFile changedFile = (ChangedFile) backedUpFile; 190 | String expectedSha256 = fileToSha256Base64.get(changedFile.relativePath()); 191 | assertThat(expectedSha256).isNotNull(); 192 | assertThat(changedFile.sha256Base64()).isEqualTo(expectedSha256); 193 | } 194 | impl.verifyAllBackup(); 195 | 196 | // now delete two files and change another one 197 | backupsOneFiles.get(0).toFile().delete(); 198 | backupsOneFiles.get(2).toFile().delete(); 199 | 200 | Files.writeString(backupsOneFiles.get(backupsOneFiles.size() - 1), 201 | "hello", 202 | StandardOpenOption.APPEND); 203 | 204 | List backupFolderSummaries1 = impl.backupFolders(false); 205 | assertThat(backupFolderSummaries1).hasSize(1); 206 | BackupFolderSummary backupFolderSummary1 = backupFolderSummaries1.get(0); 207 | assertThat(backupFolderSummary1.backedUpFiles()).hasSize(4); 208 | assertThat(backupFolderSummary1.backedUpFiles() 209 | .stream() 210 | .filter(backedUpFile -> backedUpFile instanceof UnchangedFile)).hasSize(3); 211 | assertThat(backupFolderSummary1.backedUpFiles() 212 | .stream() 213 | .filter(backedUpFile -> backedUpFile instanceof ChangedFile)).hasSize(1); 214 | assertThat(backupFolderSummary1.deletedFiles()).hasSize(2); 215 | 216 | impl.verifyAllBackup(); 217 | 218 | Map>> allBackedUpFilesIncludingHistory = impl.allBackedUpFilesIncludingHistory(); 219 | assertThat(allBackedUpFilesIncludingHistory).hasSize(1); 220 | assertThat(allBackedUpFilesIncludingHistory.keySet().iterator().next()).isEqualTo(backupName); 221 | 222 | Map> history = allBackedUpFilesIncludingHistory.get(backupName); 223 | assertThat(history).hasSize(6); 224 | assertThat(history.get(file1Key)).hasSize(2); 225 | assertThat(history.get(file1Key).get(0)).isInstanceOf(HistoricalFile.HistoricalBackedUpFile.class); 226 | assertThat(history.get(file1Key).get(1)).isInstanceOf(HistoricalFile.HistoricalDeletedFile.class); 227 | 228 | assertThat(history.get(file2Key)).hasSize(1); 229 | assertThat(history.get(file2Key).get(0)).isInstanceOf(HistoricalFile.HistoricalBackedUpFile.class); 230 | 231 | assertThat(history.get(largeFileKey)).hasSize(2); 232 | assertThat(history.get(largeFileKey).get(0)).isInstanceOf(HistoricalFile.HistoricalBackedUpFile.class); 233 | assertThat(history.get(largeFileKey).get(1)).isInstanceOf(HistoricalFile.HistoricalDeletedFile.class); 234 | 235 | assertThat(history.get(file6Key)).hasSize(2); 236 | assertThat(history.get(file6Key).get(0)).isInstanceOf(HistoricalFile.HistoricalBackedUpFile.class); 237 | assertThat(history.get(file6Key).get(1)).isInstanceOf(HistoricalFile.HistoricalBackedUpFile.class); 238 | 239 | /** 240 | * Restore historic versions of file-large 241 | */ 242 | Path tempDirectory = Files.createTempDirectory("spb-e2e"); 243 | impl.restoreFile((HistoricalFile.HistoricalBackedUpFile) history.get("file-large.txt").get(0), tempDirectory); 244 | 245 | Path restoredFileLarge = tempDirectory.resolve("file-large.txt"); 246 | String restoredFileSha256Base64FileLarge = Util.sha256Base64ForFile(restoredFileLarge); 247 | assertThat(restoredFileSha256Base64FileLarge).isEqualTo(fileToSha256Base64.get("file-large.txt")); 248 | 249 | /** 250 | * Restore historic versions of file-6 251 | */ 252 | HistoricalFile.HistoricalBackedUpFile historicBackedUpFile = (HistoricalFile.HistoricalBackedUpFile) history.get(file6Key).get(0); 253 | impl.restoreFile(historicBackedUpFile, tempDirectory); 254 | 255 | Path restoredFile6 = tempDirectory.resolve(file6Key); 256 | String restoredFileSha256Base64File6 = Util.sha256Base64ForFile(restoredFile6); 257 | assertThat(restoredFileSha256Base64File6).isEqualTo(fileToSha256Base64.get(file6Key)); 258 | 259 | 260 | } 261 | 262 | private static String createRandomBackupName() { 263 | byte[] randomBytes = new byte[10]; 264 | new Random().nextBytes(randomBytes); 265 | String suffix = Base64.getUrlEncoder().encodeToString(randomBytes); 266 | return "spb-e2e-test-" + suffix; 267 | } 268 | 269 | @Test 270 | void dryRun() throws IOException, ExecutionException, InterruptedException { 271 | String backupName = createRandomBackupName(); 272 | 273 | FolderToBackupConfig folderToBackupConfig = new FolderToBackupConfig(backupsOne.toString(), backupName); 274 | 275 | TestConfigProvider testConfigProvider = new TestConfigProvider( 276 | secretKey, 277 | bucketName, 278 | List.of(folderToBackupConfig), 279 | TEN_MB); 280 | Impl impl = new Impl(testConfigProvider); 281 | List backupFolderSummaries = impl.backupFolders(true); 282 | assertThat(backupFolderSummaries).hasSize(1); 283 | BackupFolderSummary backupFolderSummary = backupFolderSummaries.get(0); 284 | assertThat(backupFolderSummary.backedUpFiles()).hasSize(6); 285 | assertThat(backupFolderSummary.deletedFiles()).isEmpty(); 286 | 287 | // all files are changed files 288 | for (BackedUpFile backedUpFile : backupFolderSummary.backedUpFiles()) { 289 | assertThat(backedUpFile).isInstanceOf(ChangedFile.class); 290 | ChangedFile changedFile = (ChangedFile) backedUpFile; 291 | String expectedSha256 = fileToSha256Base64.get(changedFile.relativePath()); 292 | assertThat(expectedSha256).isNotNull(); 293 | assertThat(changedFile.sha256Base64()).isEqualTo(expectedSha256); 294 | } 295 | 296 | 297 | } 298 | } 299 | -------------------------------------------------------------------------------- /src/test/java/spb/ImplTest.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import static org.assertj.core.api.Assertions.assertThat; 6 | import static spb.Impl.shouldIgnoreFile; 7 | 8 | public class ImplTest { 9 | 10 | @Test 11 | void testDefaultPattern() { 12 | assertThat(shouldIgnoreFile(".DS_Store")).isTrue(); 13 | assertThat(shouldIgnoreFile("folder/.DS_Store")).isTrue(); 14 | assertThat(shouldIgnoreFile("/folder1/folder2/folder/.DS_Store")).isTrue(); 15 | 16 | assertThat(shouldIgnoreFile(".DS_store")).isFalse(); 17 | assertThat(shouldIgnoreFile("DS_Store")).isFalse(); 18 | assertThat(shouldIgnoreFile("SomethingElse.DS_Store")).isFalse(); 19 | } 20 | 21 | } 22 | -------------------------------------------------------------------------------- /src/test/java/spb/TestConfigProvider.java: -------------------------------------------------------------------------------- 1 | package spb; 2 | 3 | import java.util.List; 4 | 5 | public class TestConfigProvider implements ConfigProvider { 6 | 7 | private final byte[] rawSecretKeyBytes; 8 | private final String bucketName; 9 | private final List foldersBackupConfig; 10 | private final int multiPartUploadLimitInBytes; 11 | 12 | public TestConfigProvider(byte[] rawSecretKeyBytes, 13 | String bucketName, 14 | List foldersBackupConfig, 15 | int multiPartUploadLimitInBytes 16 | ) { 17 | this.rawSecretKeyBytes = rawSecretKeyBytes; 18 | this.bucketName = bucketName; 19 | this.foldersBackupConfig = foldersBackupConfig; 20 | this.multiPartUploadLimitInBytes = multiPartUploadLimitInBytes; 21 | } 22 | 23 | @Override 24 | public byte[] getRawSecretKeyBytes() { 25 | return rawSecretKeyBytes; 26 | } 27 | 28 | @Override 29 | public String getBucketName() { 30 | return bucketName; 31 | } 32 | 33 | @Override 34 | public List getFoldersBackupConfig() { 35 | return foldersBackupConfig; 36 | } 37 | 38 | @Override 39 | public int getMultiPartUploadLimitInBytes() { 40 | return multiPartUploadLimitInBytes; 41 | } 42 | } 43 | --------------------------------------------------------------------------------