├── .envrc ├── .github ├── stale.yml └── workflows │ ├── cicd.yml │ ├── int_test.yml │ └── publish.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Contributing.md ├── LICENSE.txt ├── README.md ├── build.rs ├── ci ├── README.md ├── docker-compose.yml ├── docspell-server │ ├── Dockerfile │ └── docspell-0.24.0-dump-h2-1.24.0-2021-07-13-2307.sql ├── docspell.conf └── dsc-config.toml ├── default.nix ├── docker ├── build.sh └── dsc.dockerfile ├── flake.lock ├── flake.nix ├── nix ├── module.nix ├── nixosConfigurations │ └── default.nix └── tests │ ├── buildvm.sh │ ├── startvm.sh │ └── test-config.nix ├── renovate.json ├── run-tests.sh ├── shell.nix ├── src ├── cli.rs ├── cli │ ├── cmd.rs │ ├── cmd │ │ ├── admin.rs │ │ ├── admin │ │ │ ├── convert_all_pdfs.rs │ │ │ ├── disable_2fa.rs │ │ │ ├── file_clone_repository.rs │ │ │ ├── file_integrity_check.rs │ │ │ ├── generate_previews.rs │ │ │ ├── recreate_index.rs │ │ │ └── reset_password.rs │ │ ├── bookmark.rs │ │ ├── bookmark │ │ │ └── get.rs │ │ ├── cleanup.rs │ │ ├── download.rs │ │ ├── export.rs │ │ ├── file_exists.rs │ │ ├── generate_completions.rs │ │ ├── geninvite.rs │ │ ├── item.rs │ │ ├── item │ │ │ ├── fields.rs │ │ │ ├── get.rs │ │ │ └── tags.rs │ │ ├── login.rs │ │ ├── logout.rs │ │ ├── open_item.rs │ │ ├── register.rs │ │ ├── search.rs │ │ ├── search_summary.rs │ │ ├── source.rs │ │ ├── source │ │ │ └── list.rs │ │ ├── upload.rs │ │ ├── version.rs │ │ ├── view.rs │ │ └── watch.rs │ ├── opts.rs │ ├── sink.rs │ └── table.rs ├── config.rs ├── error.rs ├── http.rs ├── http │ ├── payload.rs │ ├── proxy.rs │ ├── session.rs │ └── util.rs ├── lib.rs ├── main.rs ├── util.rs └── util │ ├── digest.rs │ ├── dupes.rs │ ├── file.rs │ └── pass.rs └── tests ├── common └── mod.rs ├── integration.rs └── login.rs /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 30 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 7 5 | onlyLabels: 6 | - question 7 | # Label to use when marking an issue as stale 8 | staleLabel: stale 9 | # Comment to post when marking an issue as stale. Set to `false` to disable 10 | markComment: > 11 | This issue has been automatically marked as stale because it has not 12 | had recent activity. It will be closed if no further activity 13 | occurs. This only applies to 'question' issues. Always feel free to 14 | reopen or create new issues. Thank you! 15 | # Comment to post when closing a stale issue. Set to `false` to disable 16 | closeComment: false 17 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yml: -------------------------------------------------------------------------------- 1 | name: CICD 2 | on: 3 | pull_request: 4 | jobs: 5 | cicd: 6 | runs-on: ubuntu-latest 7 | strategy: 8 | fail-fast: true 9 | matrix: 10 | build: 11 | - aarch64 12 | - aarch64-musl 13 | - armv7 14 | - i686 15 | - amd64-musl 16 | include: 17 | - build: aarch64 18 | os: ubuntu-latest 19 | target: aarch64-unknown-linux-gnu 20 | use-cross: true 21 | features: "--no-default-features --features rustls" 22 | - build: aarch64-musl 23 | os: ubuntu-latest 24 | target: aarch64-unknown-linux-musl 25 | use-cross: true 26 | features: "--no-default-features --features rustls" 27 | - build: armv7 28 | os: ubuntu-latest 29 | target: armv7-unknown-linux-gnueabihf 30 | use-cross: true 31 | features: "--no-default-features --features rustls" 32 | - build: i686 33 | os: ubuntu-latest 34 | target: i686-unknown-linux-gnu 35 | use-cross: true 36 | features: "--no-default-features --features rustls" 37 | - build: amd64-musl 38 | os: ubuntu-latest 39 | target: x86_64-unknown-linux-musl 40 | use-cross: true 41 | features: "--no-default-features --features rustls" 42 | steps: 43 | - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 44 | with: 45 | fetch-depth: 0 46 | 47 | - name: Install prerequisites 48 | shell: bash 49 | run: | 50 | case ${{ matrix.target }} in 51 | arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; 52 | armv7-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; 53 | aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; 54 | aarch64-unknown-linux-musl) sudo apt-get -y update; sudo apt-get -y install gcc-aarch64-linux-gnu ;; 55 | esac 56 | 57 | - name: Extract crate information 58 | shell: bash 59 | run: | 60 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 61 | echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV 62 | echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV 63 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 64 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 65 | if [[ $PROJECT_VERSION == *-pre ]]; then 66 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 67 | else 68 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 69 | fi 70 | 71 | - name: Install Rust toolchain 72 | uses: actions-rs/toolchain@v1 73 | with: 74 | toolchain: stable 75 | target: ${{ matrix.target }} 76 | override: true 77 | default: true 78 | 79 | - name: Show version information (Rust, cargo, GCC) 80 | shell: bash 81 | run: | 82 | gcc --version || true 83 | rustup -V 84 | rustup toolchain list 85 | rustup default 86 | cargo -V 87 | rustc -V 88 | 89 | - name: Check format 90 | run: cargo fmt -- --check 91 | 92 | - name: Build 93 | uses: actions-rs/cargo@v1 94 | with: 95 | use-cross: ${{ matrix.use-cross }} 96 | command: build 97 | args: ${{ matrix.features }} --release --target=${{ matrix.target }} 98 | 99 | cicd-win: 100 | runs-on: windows-latest 101 | 102 | steps: 103 | - name: Checkout 104 | uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 105 | with: 106 | fetch-depth: 0 107 | 108 | - name: Install Rust toolchain 109 | uses: actions-rs/toolchain@v1 110 | with: 111 | toolchain: stable 112 | override: true 113 | default: true 114 | 115 | - name: Extract crate information 116 | shell: bash 117 | run: | 118 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 119 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 120 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 121 | if [[ $PROJECT_VERSION == *-pre ]]; then 122 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 123 | else 124 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 125 | fi 126 | 127 | - name: Build 128 | uses: actions-rs/cargo@v1 129 | with: 130 | command: build 131 | args: ${{ matrix.features }} --release 132 | 133 | cicd-mac: 134 | runs-on: macos-latest 135 | 136 | steps: 137 | - name: Checkout 138 | uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 139 | with: 140 | fetch-depth: 0 141 | 142 | - name: Install Rust toolchain 143 | uses: actions-rs/toolchain@v1 144 | with: 145 | toolchain: stable 146 | target: x86_64-apple-darwin 147 | override: true 148 | default: true 149 | 150 | - name: Extract crate information 151 | shell: bash 152 | run: | 153 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 154 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 155 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 156 | if [[ $PROJECT_VERSION == *-pre ]]; then 157 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 158 | else 159 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 160 | fi 161 | 162 | - name: Build 163 | uses: actions-rs/cargo@v1 164 | with: 165 | command: build 166 | args: ${{ matrix.features }} --release 167 | -------------------------------------------------------------------------------- /.github/workflows/int_test.yml: -------------------------------------------------------------------------------- 1 | name: Integration-Tests 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | integration-tests: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | fail-fast: true 13 | steps: 14 | - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Extract crate information 19 | shell: bash 20 | run: | 21 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 22 | echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 23 | 24 | - name: Install Rust toolchain 25 | uses: actions-rs/toolchain@v1 26 | with: 27 | toolchain: stable 28 | override: true 29 | default: true 30 | 31 | - run: rustup component add clippy 32 | 33 | - name: Show version information (Rust, cargo, GCC) 34 | shell: bash 35 | run: | 36 | gcc --version || true 37 | rustup -V 38 | rustup toolchain list 39 | rustup default 40 | cargo -V 41 | rustc -V 42 | 43 | - name: Run clippy 44 | uses: actions-rs/clippy-check@v1 45 | with: 46 | token: ${{ secrets.GITHUB_TOKEN }} 47 | args: --all-features 48 | 49 | - name: Tests 50 | run: ./run-tests.sh 51 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | on: 3 | push: 4 | tags: 5 | - 'v*' 6 | branches: 7 | - master 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | fail-fast: true 13 | matrix: 14 | build: 15 | - aarch64 16 | - aarch64-musl 17 | - armv7 18 | - i686 19 | - amd64-musl 20 | include: 21 | - build: aarch64 22 | os: ubuntu-latest 23 | target: aarch64-unknown-linux-gnu 24 | use-cross: true 25 | features: "--no-default-features --features rustls" 26 | - build: aarch64-musl 27 | os: ubuntu-latest 28 | target: aarch64-unknown-linux-musl 29 | use-cross: true 30 | features: "--no-default-features --features rustls" 31 | - build: armv7 32 | os: ubuntu-latest 33 | target: armv7-unknown-linux-gnueabihf 34 | use-cross: true 35 | features: "--no-default-features --features rustls" 36 | - build: i686 37 | os: ubuntu-latest 38 | target: i686-unknown-linux-gnu 39 | use-cross: true 40 | features: "--no-default-features --features rustls" 41 | - build: amd64-musl 42 | os: ubuntu-latest 43 | target: x86_64-unknown-linux-musl 44 | use-cross: true 45 | features: "--no-default-features --features rustls" 46 | steps: 47 | - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 48 | with: 49 | fetch-depth: 0 50 | 51 | - name: Install prerequisites 52 | shell: bash 53 | run: | 54 | case ${{ matrix.target }} in 55 | arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; 56 | armv7-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; 57 | aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; 58 | aarch64-unknown-linux-musl) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; 59 | esac 60 | 61 | - name: Extract crate information 62 | shell: bash 63 | run: | 64 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 65 | echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV 66 | echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV 67 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 68 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 69 | if [[ $PROJECT_VERSION == *-pre ]]; then 70 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 71 | else 72 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 73 | fi 74 | 75 | - name: Install Rust toolchain 76 | uses: actions-rs/toolchain@v1 77 | with: 78 | toolchain: stable 79 | target: ${{ matrix.target }} 80 | override: true 81 | default: true 82 | 83 | - name: Show version information (Rust, cargo, GCC) 84 | shell: bash 85 | run: | 86 | gcc --version || true 87 | rustup -V 88 | rustup toolchain list 89 | rustup default 90 | cargo -V 91 | rustc -V 92 | - name: Build 93 | uses: actions-rs/cargo@v1 94 | with: 95 | use-cross: ${{ matrix.use-cross }} 96 | command: build 97 | args: ${{ matrix.features }} --release --target=${{ matrix.target }} 98 | 99 | - name: Strip debug information from executable 100 | id: strip 101 | shell: bash 102 | run: | 103 | # Figure out what strip tool to use if any 104 | STRIP="strip" 105 | case ${{ matrix.target }} in 106 | arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; 107 | armv7-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; 108 | aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; 109 | aarch64-unknown-linux-musl) STRIP="aarch64-linux-gnu-strip" ;; 110 | *-pc-windows-msvc) STRIP="" ;; 111 | esac; 112 | # Setup paths 113 | BIN_DIR="_cicd/stripped-release-bin/" 114 | mkdir -p "${BIN_DIR}" 115 | BIN_NAME="${{ env.PROJECT_NAME }}" 116 | BIN_PATH="${BIN_DIR}/${BIN_NAME}_${{ matrix.build }}-${{ env.VERSION_SUFFIX }}" 117 | # Copy the release build binary to the result location 118 | cp "target/${{ matrix.target }}/release/${BIN_NAME}" "${BIN_PATH}" 119 | # Also strip if possible 120 | if [ -n "${STRIP}" ]; then 121 | "${STRIP}" "${BIN_PATH}" 122 | fi 123 | # Let subsequent steps know where to find the (stripped) bin 124 | echo ::set-output name=BIN_PATH::${BIN_PATH} 125 | 126 | - name: Publish Release 127 | id: publish 128 | uses: softprops/action-gh-release@v2 129 | if: startsWith(github.ref, 'refs/tags/') 130 | env: 131 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 132 | with: 133 | files: | 134 | ${{ steps.strip.outputs.BIN_PATH }} 135 | 136 | - name: Publish Pre-Release 137 | id: publish-pre 138 | if: ${{ github.ref }} == 'refs/heads/master' 139 | env: 140 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 141 | uses: "ncipollo/release-action@v1" 142 | with: 143 | token: "${{ secrets.GITHUB_TOKEN }}" 144 | prerelease: true 145 | allowUpdates: true 146 | tag: "nightly" 147 | commit: "master" 148 | name: "dsc nightly" 149 | replacesArtifacts: true 150 | artifacts: ${{ steps.strip.outputs.BIN_PATH }} 151 | 152 | build-win: 153 | runs-on: windows-latest 154 | 155 | steps: 156 | - name: Checkout 157 | uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 158 | with: 159 | fetch-depth: 0 160 | 161 | - name: Install Rust toolchain 162 | uses: actions-rs/toolchain@v1 163 | with: 164 | toolchain: stable 165 | override: true 166 | default: true 167 | 168 | - name: Extract crate information 169 | shell: bash 170 | run: | 171 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 172 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 173 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 174 | if [[ $PROJECT_VERSION == *-pre ]]; then 175 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 176 | else 177 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 178 | fi 179 | 180 | - name: Build 181 | uses: actions-rs/cargo@v1 182 | with: 183 | command: build 184 | args: ${{ matrix.features }} --release 185 | 186 | - name: Rename binary 187 | run: | 188 | mv target/release/${{ env.PROJECT_NAME }}.exe target/release/${{ env.PROJECT_NAME }}-${{ env.VERSION_SUFFIX }}.exe 189 | 190 | - name: Publish Release 191 | id: publish 192 | uses: softprops/action-gh-release@v2 193 | if: startsWith(github.ref, 'refs/tags/') 194 | env: 195 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 196 | with: 197 | files: | 198 | target/release/${{ env.PROJECT_NAME }}-${{ env.VERSION_SUFFIX }}.exe 199 | 200 | - name: Publish Pre-Release 201 | id: publish-pre 202 | if: ${{ github.ref }} == 'refs/heads/master' 203 | env: 204 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 205 | uses: "ncipollo/release-action@v1" 206 | with: 207 | token: "${{ secrets.GITHUB_TOKEN }}" 208 | prerelease: true 209 | allowUpdates: true 210 | tag: "nightly" 211 | commit: "master" 212 | name: "dsc nightly" 213 | replacesArtifacts: true 214 | artifacts: target/release/${{ env.PROJECT_NAME }}-${{ env.VERSION_SUFFIX }}.exe 215 | 216 | build-mac: 217 | runs-on: macos-latest 218 | 219 | steps: 220 | - name: Checkout 221 | uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4 222 | with: 223 | fetch-depth: 0 224 | 225 | - name: Install Rust toolchain 226 | uses: actions-rs/toolchain@v1 227 | with: 228 | toolchain: stable 229 | target: x86_64-apple-darwin 230 | override: true 231 | default: true 232 | 233 | - name: Extract crate information 234 | shell: bash 235 | run: | 236 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 237 | PROJECT_VERSION="$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" 238 | echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV 239 | if [[ $PROJECT_VERSION == *-pre ]]; then 240 | echo "VERSION_SUFFIX=nightly" >> $GITHUB_ENV 241 | else 242 | echo "VERSION_SUFFIX=$PROJECT_VERSION" >> $GITHUB_ENV 243 | fi 244 | 245 | - name: Build 246 | uses: actions-rs/cargo@v1 247 | with: 248 | command: build 249 | args: ${{ matrix.features }} --release 250 | 251 | - name: Rename and strip binary 252 | run: | 253 | strip target/release/${{ env.PROJECT_NAME }} 254 | mv target/release/${{ env.PROJECT_NAME }} target/release/${{ env.PROJECT_NAME }}_darwin-${{ env.VERSION_SUFFIX }} 255 | 256 | - name: Publish Release 257 | id: publish 258 | uses: softprops/action-gh-release@v2 259 | if: startsWith(github.ref, 'refs/tags/') 260 | env: 261 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 262 | with: 263 | prerelease: ${{ github.ref }} == "refs/heads/master" 264 | files: | 265 | target/release/${{ env.PROJECT_NAME }}_darwin-${{ env.VERSION_SUFFIX }} 266 | 267 | - name: Publish Pre-Release 268 | id: publish-pre 269 | if: ${{ github.ref }} == 'refs/heads/master' 270 | env: 271 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 272 | uses: "ncipollo/release-action@v1" 273 | with: 274 | token: "${{ secrets.GITHUB_TOKEN }}" 275 | prerelease: true 276 | allowUpdates: true 277 | tag: "nightly" 278 | commit: "master" 279 | name: "dsc nightly" 280 | replacesArtifacts: true 281 | artifacts: target/release/${{ env.PROJECT_NAME }}_darwin-${{ env.VERSION_SUFFIX }} 282 | 283 | docker: 284 | needs: [ build, build-win, build-mac ] 285 | runs-on: ubuntu-latest 286 | steps: 287 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 288 | with: 289 | fetch-depth: 0 290 | - name: Extract crate information 291 | shell: bash 292 | run: | 293 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 294 | echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 295 | - name: Set up QEMU 296 | uses: docker/setup-qemu-action@v3 297 | - name: Set up Docker Buildx 298 | id: buildx 299 | uses: docker/setup-buildx-action@v3 300 | - name: Log in to Docker Hub 301 | uses: docker/login-action@v3 302 | with: 303 | username: ${{ secrets.DOCKER_USERNAME }} 304 | password: ${{ secrets.DOCKER_PASSWORD }} 305 | - name: Build and push Docker Images (${{ env.PROJECT_VERSION }}) 306 | run: ./docker/build.sh ${{ env.PROJECT_VERSION }} --push 307 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | result 3 | /local 4 | *.qcow2 5 | .direnv/ 6 | .vscode 7 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dsc" 3 | version = "0.12.0-pre" 4 | authors = ["eikek "] 5 | edition = "2021" 6 | description = "A command line interface to Docspell." 7 | homepage = "https://docspell.org" 8 | license = "GPLv3" 9 | repository = "https://github.com/docspell/dsc" 10 | build = "build.rs" 11 | keywords = [ "docspell", "cli" ] 12 | categories = [ "command-line-utilities" ] 13 | readme = "README.md" 14 | 15 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 16 | 17 | [dependencies] 18 | chrono = "0.4.38" 19 | clap = { version = "4.5.4", features = ["derive", "wrap_help"] } 20 | clap_complete = "4.5.2" 21 | console = {version = "0.15.8"} 22 | csv = "1.3" 23 | dialoguer = { version = "0.11" } 24 | dirs = { version = "5.0.1" } 25 | env_logger = { version = "0.11.3" } 26 | glob = "0.3.1" 27 | hex = "0.4.3" 28 | log = { version = "0.4.21" } 29 | notify = "4.0.17" 30 | openssl = { version = "0.10.64", optional = true } 31 | percent-encoding = { version = "2.3.1" } 32 | prettytable-rs = { version = "0.10" } 33 | reqwest = { version = "0.12.4", default-features = false, features = ["json", "blocking", "multipart"] } 34 | rsotp = "0.1.0" 35 | serde = { version = "1.0.200", features = ["derive"] } 36 | serde-lexpr = "0.1.3" 37 | serde_json = "1.0.116" 38 | sha2 = { version = "0.10.8" } 39 | snafu = { version = "0.8.2" } 40 | toml = { version = "0.8.12" } 41 | webbrowser = { version = "0.8.15" } 42 | zip = { version = "0.6.6" } 43 | pathdiff = "0.2.1" 44 | sanitize-filename = "0.6" 45 | 46 | [target.'cfg(unix)'.dependencies] 47 | file-locker = { version = "1.1.2" } 48 | 49 | [target.'cfg(macos)'.dependencies] 50 | file-locker = { version = "1.1.2" } 51 | 52 | 53 | [features] 54 | default = ["reqwest/default-tls"] 55 | rustls = ["reqwest/rustls-tls"] 56 | vendored-openssl = ["openssl/vendored"] 57 | 58 | [dev-dependencies] 59 | assert_cmd = "2.0.14" 60 | predicates = "3.1.0" 61 | 62 | [build-dependencies] 63 | vergen = "7.5.1" 64 | #anyhow = "1" -------------------------------------------------------------------------------- /Contributing.md: -------------------------------------------------------------------------------- 1 | # Contributions 2 | 3 | Thank you very much for your interest! 4 | 5 | Contributions are much appreciated, be it in code, issues or general 6 | feedback. You can use the [issue 7 | tracker](https://github.com/docspell/dsc/issues) or the [gitter 8 | chat](https://gitter.im/eikek/docspell) which can be accessed via 9 | [matrix](https://matrix.to/#/#eikek_docspell:gitter.im), too. 10 | 11 | 12 | ## Code Contributions 13 | 14 | This project is written in Rust based on a bunch of nice crates. The 15 | code should be auto-formatted using `rustfmt`. 16 | 17 | There are some tests, that can be run using the `run-tests.sh` script 18 | in the source root. These are also run on CI. The CI pipeline builds 19 | this app for/on different platforms. This happens when a PR is 20 | created on each change. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Integration-Tests](https://github.com/docspell/dsc/actions/workflows/int_test.yml/badge.svg)](https://github.com/docspell/dsc/actions/workflows/int_test.yml) 2 | [![License](https://img.shields.io/github/license/docspell/dsc.svg?style=flat&color=steelblue)](https://github.com/docspell/dsc/blob/master/LICENSE.txt) 3 | [![Chat](https://img.shields.io/gitter/room/eikek/docspell?style=flat&color=steelblue&logo=gitter)](https://gitter.im/eikek/docspell) 4 | [![Docker Pulls](https://img.shields.io/docker/pulls/docspell/dsc?color=steelblue&style=flat&logo=docker)](https://hub.docker.com/u/docspell/dsc) 5 | 6 | # docspell cli 7 | 8 | This is a command line interface to Docspell, a personal document 9 | management system. 10 | 11 | The CLI is developed independently with the docspell server. Commands 12 | support the current (at the time of writing) version of docspell. When 13 | the server upgrades its api, the cli is adopted accordingly. 14 | 15 | This CLI is meant to be used by humans and other programs. The default 16 | output format is `tabular` which prints a table to stdout. This same 17 | table can be formatted to CSV by using the format option `csv`. These 18 | two formats may omit some details from the server responses for 19 | readability reasons. When using this cli from other programs, or when 20 | you're looking for some detail, the formats `json` and `lisp` are 21 | recommended. They contain every detail in a well structured form. 22 | 23 | ## State 24 | 25 | This CLI is beta … probably forever. I'm using it a lot and hope that 26 | it will be improved over time. 27 | 28 | The goal is to eventually have all the [REST 29 | endpoints](https://docspell.org/openapi/docspell-openapi.html) covered 30 | here, or at least those that are frequently used. 31 | 32 | ## Usage 33 | 34 | There are binaries provided at the [release 35 | page](https://github.com/docspell/dsc/releases/latest) that you can 36 | download. Or you can build it as described below. 37 | 38 | Run `dsc help` to see a command overview and global options. These 39 | options apply to (almost) all commands. Additionally, each command has 40 | its own set of options and arguments. A command has its help available 41 | via `dsc [subcommand] --help`. 42 | 43 | 44 | ## Config File 45 | 46 | The config file is read from the OS defined location, or it can be 47 | specfified explicitly either via an environment variable `DSC_CONFIG` 48 | or as an option. You can run `dsc write-config-file` to create a 49 | default config file in the standard location. The default location on 50 | linux systems is `~/.config/dsc/config.toml`. 51 | 52 | The config file looks like this (also, look in the `ci/` folder for 53 | another and always up to date example): 54 | 55 | ``` toml 56 | docspell_url = "http://localhost:7880" 57 | default_format = "Tabular" 58 | # admin_secret = "test123" 59 | # default_source_id = "" 60 | # pass_entry = "my/pass/entry" 61 | # pass_otp_secret = "key:totp" #or "my/pass/totp_entry" 62 | # default_account = "demo" 63 | pdf_viewer = ["zathura", "{}"] 64 | # proxy = myproxy.com 65 | # proxy_user = me 66 | # proxy_password = superword 67 | # extra_certificate = /path/to/trust.pem #PEM or DER 68 | # accept_invalid_certificates = false 69 | ``` 70 | 71 | The `pdf_viewer` is used with the `view` command to display the PDF 72 | file. It must be a list where the first element is the program to run 73 | and subsequent elements are its arguments. For each argument, any `{}` 74 | is replaced by the path to the file. 75 | 76 | 77 | ## Authentication 78 | 79 | The `login` command can be used to initially login to docspell server. 80 | 81 | It accepts a username and password. It also supports the 82 | [pass](https://www.passwordstore.org/) password manager. The user name 83 | can be fixed in the config file as well as the entry to use with 84 | [pass](https://www.passwordstore.org/). This means you can then just 85 | run `dsc login` without any arguments. The retrieved session token is 86 | stored on your file system next to the config file. Subsequent 87 | commands can use the session token. Once it is expired, you need to 88 | call `dsc login` again. 89 | 90 | When TOTP is enabled, it is also possible to store the secret in 91 | [pass](https://www.passwordstore.org/) and specify the entry to this 92 | in the config file at `pass_otp_secret`. Dsc can then calculate the 93 | otp accordingly. If the value starts with `key:` then the renaming 94 | part is used to lookup such a line in the main entry (defined via 95 | `pass_entry`). If not prefixed with `key:` a separate pass entry is 96 | looked up. 97 | 98 | For commands `file-exists` and `upload` it is possible to use a source 99 | id or the integration endpoint instead of being authenticated. 100 | 101 | 102 | ## Building 103 | 104 | Install [nix](https://nixos.org/download.html#nix-quick-install) and 105 | run `nix-shell` or `nix develop` in the source root. This installs 106 | required rust tools. Alternatively, the rust tool chain can be setup 107 | with [rustup](https://rustup.rs/). Currently, dsc requires rust >= 108 | 1.54.0. 109 | 110 | Building the binary for your platform (The second line strips the 111 | binary of debug symbols): 112 | 113 | ``` bash 114 | > cargo build --release 115 | > strip target/release/dsc 116 | ``` 117 | 118 | This requires the openssl libraries installed on your system. 119 | 120 | To build against a statically linked rustls library instead, use: 121 | ``` bash 122 | > cargo build --release --no-default-features --features rustls 123 | ``` 124 | 125 | To include a statically linked openssl, build it via: 126 | ``` bash 127 | > cargo build --release --no-default-features --features vendored-openssl 128 | ``` 129 | 130 | 131 | ## Shell Integration 132 | 133 | The [library for parsing command line arguments](https://clap.rs/) has 134 | a nice feature that generates completions for various shells. This has 135 | been build into the `dsc` tool. For example, in order to have 136 | completions in [fish](https://fishshell.com/), run: 137 | 138 | ``` fish 139 | $ dsc generate-completions --shell fish | source 140 | ``` 141 | 142 | … and enjoy tab completion :wink: 143 | 144 | Run `dsc generate-completions --help` to see what other shells are 145 | supported. 146 | 147 | 148 | ## Nix Package 149 | 150 | The `nix/dsc.nix` contains a nix expression to build this package. It 151 | can be build using [flake enabled](https://nixos.wiki/wiki/Flakes) nix: 152 | 153 | ``` bash 154 | nix build 155 | ``` 156 | 157 | Or ran as 158 | 159 | ``` bash 160 | nix run github:docspell/dsc 161 | ``` 162 | 163 | The repository provides a package which can be included in your system 164 | flake by adding: 165 | 166 | ``` nix 167 | { 168 | inputs.dsc-flake.url = "github:docspell/dsc"; 169 | ... 170 | outputs = 171 | ... 172 | environment.systemPackages = [ dsc-flake.packages.${system}.default ] 173 | ... 174 | } 175 | ``` 176 | 177 | And a module for dsc watch. An example of its usage is in nixosConfigurations 178 | output of the flake. 179 | 180 | 181 | ## Examples 182 | 183 | Reset the password of an account: 184 | ``` bash 185 | > dsc admin reset-password user32 186 | ┌─────────┬──────────────┬──────────────────┐ 187 | │ success │ new password │ message │ 188 | │ true │ 9rRVrhq19jz │ Password updated │ 189 | └─────────┴──────────────┴──────────────────┘ 190 | ``` 191 | 192 | 193 | Recreate the full text index: 194 | ``` bash 195 | > dsc admin recreate-index 196 | ┌─────────┬─────────────────────────────────────┐ 197 | │ success │ message │ 198 | │ true │ Full-text index will be re-created. │ 199 | └─────────┴─────────────────────────────────────┘ 200 | ``` 201 | 202 | Search some documents: 203 | ``` bash 204 | > dsc search 'date>2020-08-01 corr:acme*' 205 | ┌──────────┬─────────────────────────┬───────────┬────────────┬─────┬───────────────┬─────────────┬────────┬──────────────────────────────┬────────┐ 206 | │ id │ name │ state │ date │ due │ correspondent │ concerning │ folder │ tags │ fields │ 207 | │ 7xoiE4Xd │ DOC-20191223-155729.jpg │ created │ 2020-09-08 │ │ Acme │ │ │ Invoice │ │ 208 | │ BV2po65m │ DOC-20200808-154204.jpg │ confirmed │ 2020-08-08 │ │ Acme │ │ │ Receipt, Tax │ │ 209 | │ 8GA2ewgE │ DOC-20200807-115654.jpg │ created │ 2020-08-07 │ │ Acme │ │ │ Paper, Receipt │ │ 210 | │ FTUnhZ3A │ DOC-20200804-132305.jpg │ confirmed │ 2020-08-04 │ │ Acme │ │ │ Receipt, Tax │ │ 211 | │ 6MKV6SEQ │ DOC-20191223-155707.jpg │ confirmed │ 2020-08-03 │ │ Acme │ Derek Jeter │ │ Important, Information, Todo │ │ 212 | └──────────┴─────────────────────────┴───────────┴────────────┴─────┴───────────────┴─────────────┴────────┴──────────────────────────────┴────────┘ 213 | ``` 214 | 215 | Use JSON: 216 | ``` bash 217 | > dsc -f json search 'date>2020-08-01 corr:acme*' | jq | head -n20 218 | { 219 | "groups": [ 220 | { 221 | "name": "2020-09", 222 | "items": [ 223 | { 224 | "id": "7xoiE4XdwgD-FTGjD91MptP-yrnKpLrJTfg-Eb2S3BCSd38", 225 | "name": "DOC-20191223-155729.jpg", 226 | "state": "created", 227 | "date": 1599566400000, 228 | "due_date": null, 229 | "source": "android", 230 | "direction": "incoming", 231 | "corr_org": { 232 | "id": "GDceAkgrk8m-kjBWUmcuLTV-Zrzp85ByXpX-hq5SS4Yp3Pg", 233 | "name": "Acme" 234 | }, 235 | "corr_person": null, 236 | "conc_person": null, 237 | "conc_equip": null, 238 | ``` 239 | 240 | Upload some files: 241 | ``` bash 242 | > dsc up README.* 243 | File already in Docspell: README.md 244 | Adding to request: README.txt 245 | Sending request … 246 | ┌─────────┬──────────────────┐ 247 | │ success │ message │ 248 | ├─────────┼──────────────────┤ 249 | │ true │ Files submitted. │ 250 | └─────────┴──────────────────┘ 251 | ``` 252 | 253 | 254 | ## Making a release 255 | 256 | 1. Set version in `Cargo.toml` 257 | 2. Run `cargo update` to update `Cargo.lock` 258 | 3. Run `nix-build` and fix hashes 259 | 4. Commit + Tag 260 | 5. push tag to github 261 | 262 | The release is being built by github actions as well as the docker 263 | images. 264 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | use vergen::{vergen, Config}; 3 | 4 | fn main() { 5 | // Generate the default 'cargo:' instruction output 6 | let mut vergen_cfg = Config::default(); 7 | let dot_git = PathBuf::from(".git"); 8 | *vergen_cfg.git_mut().enabled_mut() = dot_git.exists(); 9 | vergen(vergen_cfg).unwrap(); 10 | 11 | if !dot_git.exists() { 12 | println!("cargo:rustc-env=VERGEN_GIT_SHA=unknown"); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ci/README.md: -------------------------------------------------------------------------------- 1 | # Integration Tests 2 | 3 | For the integration tests, a docspell server (only the restserver) is 4 | started via docker-compose. To start with some data, a new image is 5 | created containing an existing H2 database. 6 | 7 | The tests can be run using the script `run-tests.sh` in the source 8 | root. 9 | 10 | To play with the tests interactively, run docspell via docker-compose: 11 | 12 | ``` bash 13 | docker-compose -f docker-compose.yml up -d 14 | ``` 15 | 16 | This builds the image and starts docspell. Then run `dsc` using the 17 | config file from this directory: 18 | 19 | ``` bash 20 | dsc -c dsc-config.toml source list 21 | ``` 22 | 23 | The server can be stopped via: 24 | ``` bash 25 | docker-compose -f docker-compose.yml down 26 | ``` 27 | -------------------------------------------------------------------------------- /ci/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | 4 | restserver: 5 | container_name: docspell-restserver 6 | image: ci_restserver:latest 7 | command: 8 | - /opt/docspell.conf 9 | restart: unless-stopped 10 | ports: 11 | - "7779:7779" 12 | volumes: 13 | - ./docspell.conf:/opt/docspell.conf 14 | build: 15 | context: docspell-server 16 | dockerfile: Dockerfile 17 | -------------------------------------------------------------------------------- /ci/docspell-server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docspell/restserver:nightly 2 | 3 | COPY ./docspell-0.24.0-dump-h2-1.24.0-2021-07-13-2307.sql /opt/h2-dump.sql 4 | RUN java -cp /opt/docspell-restserver/lib/com.h2database.h2* \ 5 | -Dfile.encoding=UTF-8 org.h2.tools.RunScript \ 6 | -url "jdbc:h2:///var/docspell/db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE" \ 7 | -user "sa" -password "" \ 8 | -script /opt/h2-dump.sql 9 | -------------------------------------------------------------------------------- /ci/docspell.conf: -------------------------------------------------------------------------------- 1 | db_url = "jdbc:h2:///var/docspell/db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE" 2 | 3 | docspell.server { 4 | backend.jdbc.url = ${db_url} 5 | 6 | auth.session-valid = "10 minutes" 7 | 8 | bind { 9 | address = "0.0.0.0" 10 | port = 7779 11 | } 12 | 13 | integration-endpoint { 14 | enabled = true 15 | http-header { 16 | enabled = true 17 | header-value = "test123" 18 | } 19 | } 20 | 21 | admin-endpoint.secret = "admin123" 22 | } 23 | -------------------------------------------------------------------------------- /ci/dsc-config.toml: -------------------------------------------------------------------------------- 1 | docspell_url = "http://localhost:7779" 2 | default_format = "Json" 3 | admin_secret = "admin123" 4 | default_account = "demo" 5 | pdf_viewer = ["zathura", "{}"] 6 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | (import 2 | ( 3 | let 4 | lock = builtins.fromJSON (builtins.readFile ./flake.lock); 5 | in 6 | fetchTarball { 7 | url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; 8 | sha256 = lock.nodes.flake-compat.locked.narHash; 9 | } 10 | ) 11 | { 12 | src = ./.; 13 | }).defaultNix 14 | -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [ -z "$1" ]; then 4 | echo "Please specify a version" 5 | exit 1 6 | fi 7 | 8 | version="$1" 9 | if [[ $version == v* ]]; then 10 | version="${version:1}" 11 | fi 12 | 13 | push="" 14 | if [ -z "$2" ] || [ "$2" == "--push" ]; then 15 | push="$2" 16 | if [ ! -z "$push" ]; then 17 | echo "Running with $push !" 18 | fi 19 | else 20 | echo "Don't understand second argument: $2" 21 | exit 1 22 | fi 23 | 24 | if ! docker buildx version > /dev/null; then 25 | echo "The docker buildx command is required." 26 | echo "See: https://github.com/docker/buildx#binary-release" 27 | exit 1 28 | fi 29 | 30 | set -e 31 | cd "$(dirname "$0")" 32 | 33 | trap "{ docker buildx rm dsc-builder; }" EXIT 34 | 35 | platforms="linux/amd64,linux/aarch64,linux/arm/v7" 36 | docker buildx create --name dsc-builder --use 37 | 38 | if [[ $version == *pre* ]]; then 39 | echo ">>>> Building nightly images for $version <<<<<" 40 | echo "============ Building dsc ============" 41 | docker buildx build \ 42 | --platform="$platforms" $push \ 43 | --build-arg version=nightly \ 44 | --tag docspell/dsc:nightly \ 45 | -f dsc.dockerfile . 46 | else 47 | echo ">>>> Building release images for $version <<<<<" 48 | echo "============ Building dsc ============" 49 | docker buildx build \ 50 | --platform="$platforms" $push \ 51 | --build-arg version=$version \ 52 | --tag docspell/dsc:v$version \ 53 | --tag docspell/dsc:latest \ 54 | -f dsc.dockerfile . 55 | 56 | fi 57 | -------------------------------------------------------------------------------- /docker/dsc.dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | ARG version= 4 | ARG dsc_url= 5 | ARG TARGETPLATFORM 6 | 7 | WORKDIR /opt 8 | RUN apk add --no-cache curl jq 9 | RUN binary=""; release=""; \ 10 | if [ "$TARGETPLATFORM" = "linux/amd64" ]; then binary="dsc_amd64-musl-$version"; fi; \ 11 | if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then binary="dsc_armv7-$version"; fi; \ 12 | if [ "$TARGETPLATFORM" = "linux/aarch64" ]; then binary="dsc_aarch64-musl-$version"; fi; \ 13 | if [ "$TARGETPLATFORM" = "linux/arm64" ]; then binary="dsc_aarch64-musl-$version"; fi; \ 14 | if [[ $version == "nightly" ]]; then release="nightly"; else release="v$version"; fi; \ 15 | echo "Downloading ${dsc_url:-https://github.com/docspell/dsc/releases/download/$release/$binary} ..." && \ 16 | curl --fail -o dsc -L ${dsc_url:-https://github.com/docspell/dsc/releases/download/$release/$binary} && \ 17 | mv dsc /usr/local/bin/ && \ 18 | chmod 755 /usr/local/bin/dsc 19 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "devshell-tools": { 4 | "inputs": { 5 | "flake-utils": "flake-utils", 6 | "nixpkgs": "nixpkgs" 7 | }, 8 | "locked": { 9 | "lastModified": 1737104970, 10 | "narHash": "sha256-WbKxxro4ZlVql5yDJpQMWc8SXsmzAQbEVo9RBnuYs8U=", 11 | "owner": "eikek", 12 | "repo": "devshell-tools", 13 | "rev": "8dbbd035cff334476c9d46ddbb316c1f2ab7df3a", 14 | "type": "github" 15 | }, 16 | "original": { 17 | "owner": "eikek", 18 | "repo": "devshell-tools", 19 | "type": "github" 20 | } 21 | }, 22 | "docspell-flake": { 23 | "inputs": { 24 | "devshell-tools": "devshell-tools", 25 | "flake-utils": "flake-utils_2", 26 | "nixpkgs": "nixpkgs_2" 27 | }, 28 | "locked": { 29 | "lastModified": 1740551141, 30 | "narHash": "sha256-LmKCaZ/H9xnjk11t9bcGXjtQ4w1tKrdrYm+eRay4kAE=", 31 | "owner": "eikek", 32 | "repo": "docspell", 33 | "rev": "ba1b86fdef7d91d66c97e960088871847def175e", 34 | "type": "github" 35 | }, 36 | "original": { 37 | "owner": "eikek", 38 | "repo": "docspell", 39 | "type": "github" 40 | } 41 | }, 42 | "flake-parts": { 43 | "inputs": { 44 | "nixpkgs-lib": "nixpkgs-lib" 45 | }, 46 | "locked": { 47 | "lastModified": 1738453229, 48 | "narHash": "sha256-7H9XgNiGLKN1G1CgRh0vUL4AheZSYzPm+zmZ7vxbJdo=", 49 | "owner": "hercules-ci", 50 | "repo": "flake-parts", 51 | "rev": "32ea77a06711b758da0ad9bd6a844c5740a87abd", 52 | "type": "github" 53 | }, 54 | "original": { 55 | "id": "flake-parts", 56 | "type": "indirect" 57 | } 58 | }, 59 | "flake-utils": { 60 | "inputs": { 61 | "systems": "systems" 62 | }, 63 | "locked": { 64 | "lastModified": 1731533236, 65 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 66 | "owner": "numtide", 67 | "repo": "flake-utils", 68 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 69 | "type": "github" 70 | }, 71 | "original": { 72 | "owner": "numtide", 73 | "repo": "flake-utils", 74 | "type": "github" 75 | } 76 | }, 77 | "flake-utils_2": { 78 | "inputs": { 79 | "systems": "systems_2" 80 | }, 81 | "locked": { 82 | "lastModified": 1731533236, 83 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 84 | "owner": "numtide", 85 | "repo": "flake-utils", 86 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "numtide", 91 | "repo": "flake-utils", 92 | "type": "github" 93 | } 94 | }, 95 | "naersk": { 96 | "inputs": { 97 | "nixpkgs": [ 98 | "nixpkgs" 99 | ] 100 | }, 101 | "locked": { 102 | "lastModified": 1739824009, 103 | "narHash": "sha256-fcNrCMUWVLMG3gKC5M9CBqVOAnJtyRvGPxptQFl5mVg=", 104 | "owner": "nix-community", 105 | "repo": "naersk", 106 | "rev": "e5130d37369bfa600144c2424270c96f0ef0e11d", 107 | "type": "github" 108 | }, 109 | "original": { 110 | "owner": "nix-community", 111 | "ref": "master", 112 | "repo": "naersk", 113 | "type": "github" 114 | } 115 | }, 116 | "nixpkgs": { 117 | "locked": { 118 | "lastModified": 1736867362, 119 | "narHash": "sha256-i/UJ5I7HoqmFMwZEH6vAvBxOrjjOJNU739lnZnhUln8=", 120 | "owner": "NixOS", 121 | "repo": "nixpkgs", 122 | "rev": "9c6b49aeac36e2ed73a8c472f1546f6d9cf1addc", 123 | "type": "github" 124 | }, 125 | "original": { 126 | "owner": "NixOS", 127 | "ref": "nixos-24.11", 128 | "repo": "nixpkgs", 129 | "type": "github" 130 | } 131 | }, 132 | "nixpkgs-lib": { 133 | "locked": { 134 | "lastModified": 1738452942, 135 | "narHash": "sha256-vJzFZGaCpnmo7I6i416HaBLpC+hvcURh/BQwROcGIp8=", 136 | "type": "tarball", 137 | "url": "https://github.com/NixOS/nixpkgs/archive/072a6db25e947df2f31aab9eccd0ab75d5b2da11.tar.gz" 138 | }, 139 | "original": { 140 | "type": "tarball", 141 | "url": "https://github.com/NixOS/nixpkgs/archive/072a6db25e947df2f31aab9eccd0ab75d5b2da11.tar.gz" 142 | } 143 | }, 144 | "nixpkgs_2": { 145 | "locked": { 146 | "lastModified": 1738435198, 147 | "narHash": "sha256-5+Hmo4nbqw8FrW85FlNm4IIrRnZ7bn0cmXlScNsNRLo=", 148 | "owner": "NixOS", 149 | "repo": "nixpkgs", 150 | "rev": "f6687779bf4c396250831aa5a32cbfeb85bb07a3", 151 | "type": "github" 152 | }, 153 | "original": { 154 | "owner": "NixOS", 155 | "ref": "nixos-24.11", 156 | "repo": "nixpkgs", 157 | "type": "github" 158 | } 159 | }, 160 | "nixpkgs_3": { 161 | "locked": { 162 | "lastModified": 1740560979, 163 | "narHash": "sha256-Vr3Qi346M+8CjedtbyUevIGDZW8LcA1fTG0ugPY/Hic=", 164 | "owner": "NixOS", 165 | "repo": "nixpkgs", 166 | "rev": "5135c59491985879812717f4c9fea69604e7f26f", 167 | "type": "github" 168 | }, 169 | "original": { 170 | "owner": "NixOS", 171 | "ref": "nixos-unstable", 172 | "repo": "nixpkgs", 173 | "type": "github" 174 | } 175 | }, 176 | "root": { 177 | "inputs": { 178 | "docspell-flake": "docspell-flake", 179 | "flake-parts": "flake-parts", 180 | "naersk": "naersk", 181 | "nixpkgs": "nixpkgs_3" 182 | } 183 | }, 184 | "systems": { 185 | "locked": { 186 | "lastModified": 1681028828, 187 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 188 | "owner": "nix-systems", 189 | "repo": "default", 190 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 191 | "type": "github" 192 | }, 193 | "original": { 194 | "owner": "nix-systems", 195 | "repo": "default", 196 | "type": "github" 197 | } 198 | }, 199 | "systems_2": { 200 | "locked": { 201 | "lastModified": 1681028828, 202 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 203 | "owner": "nix-systems", 204 | "repo": "default", 205 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 206 | "type": "github" 207 | }, 208 | "original": { 209 | "owner": "nix-systems", 210 | "repo": "default", 211 | "type": "github" 212 | } 213 | } 214 | }, 215 | "root": "root", 216 | "version": 7 217 | } 218 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "A command line interface to Docspell"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 6 | naersk.url = "github:nix-community/naersk/master"; 7 | naersk.inputs.nixpkgs.follows = "nixpkgs"; 8 | docspell-flake = { url = "github:eikek/docspell"; }; 9 | }; 10 | 11 | outputs = inputs@{ flake-parts, self, ... }: 12 | flake-parts.lib.mkFlake { inherit inputs; } { 13 | imports = [ inputs.flake-parts.flakeModules.easyOverlay ]; 14 | systems = [ 15 | "aarch64-linux" 16 | "aarch64-darwin" 17 | "x86_64-darwin" 18 | "x86_64-linux" 19 | ]; # List taken from flake-utils 20 | perSystem = { config, self', inputs', pkgs, system, ... }: 21 | let naersk-lib = pkgs.callPackage inputs.naersk { }; 22 | in rec { 23 | packages = rec { 24 | default = naersk-lib.buildPackage { 25 | root = ./.; 26 | meta = with pkgs.lib; { 27 | description = "A command line interface to Docspell"; 28 | homepage = "https://github.com/docspell/dsc"; 29 | license = with licenses; [ gpl3 ]; 30 | maintainers = with maintainers; [ eikek ]; 31 | }; 32 | nativeBuildInputs = with pkgs; [ pkg-config ]; 33 | buildInputs = with pkgs; [ openssl installShellFiles ]; 34 | postInstall = '' 35 | for shell in fish zsh bash; do 36 | $out/bin/dsc generate-completions --shell $shell > dsc.$shell 37 | installShellCompletion --$shell dsc.$shell 38 | done 39 | ''; 40 | }; 41 | dsc = default; 42 | }; 43 | apps.default = { 44 | type = "app"; 45 | program = "${packages.default}/bin/dsc"; 46 | }; 47 | devShells.default = with pkgs; 48 | mkShell { 49 | buildInputs = [ cargo rustc openssl ]; 50 | nativeBuildInputs = with pkgs; [ pkg-config ]; 51 | RUST_SRC_PATH = rustPlatform.rustLibSrc; 52 | }; 53 | overlayAttrs = { inherit (config.packages) dsc; }; 54 | formatter = pkgs.nixpkgs-fmt; 55 | }; 56 | flake = { 57 | # The usual flake attributes can be defined here, including system- 58 | # agnostic ones like nixosModule and system-enumerating ones, although 59 | # those are more easily expressed in perSystem. 60 | nixosModules = rec { 61 | default = dsc-watch; 62 | dsc-watch = import ./nix/module.nix; 63 | }; 64 | nixosConfigurations.dev-vm = let 65 | system = "x86_64-linux"; 66 | pkgs = import inputs.nixpkgs { 67 | inherit system; 68 | overlays = 69 | [ self.overlays.default inputs.docspell-flake.overlays.default ]; 70 | }; 71 | in inputs.nixpkgs.lib.nixosSystem { 72 | inherit pkgs system; 73 | modules = [ 74 | inputs.docspell-flake.nixosModules.default 75 | self.nixosModules.dsc-watch 76 | ./nix/nixosConfigurations 77 | ]; 78 | }; 79 | }; 80 | }; 81 | } 82 | -------------------------------------------------------------------------------- /nix/module.nix: -------------------------------------------------------------------------------- 1 | { config, lib, pkgs, ... }: 2 | 3 | with lib; 4 | let cfg = config.services.dsc-watch; 5 | in { 6 | 7 | ## interface 8 | options = { 9 | services.dsc-watch = { 10 | enable = mkOption { 11 | default = false; 12 | description = "Whether to enable dsc watch directory service."; 13 | }; 14 | 15 | package = mkOption { 16 | default = pkgs.dsc; 17 | description = "Package that provides the dsc binary."; 18 | type = types.package; 19 | }; 20 | 21 | docspell-url = mkOption { 22 | type = types.nullOr types.str; 23 | default = null; 24 | example = "http://localhost:7880"; 25 | description = "The base url to the docspell server."; 26 | }; 27 | 28 | configFile = mkOption { 29 | type = types.nullOr types.path; 30 | default = null; 31 | example = "./docspell-conf.toml"; 32 | description = "Config file that can be used to group together multiple options."; 33 | }; 34 | 35 | watchDirs = mkOption { 36 | type = types.listOf types.str; 37 | description = "The directories to watch for new files."; 38 | }; 39 | 40 | recursive = mkOption { 41 | type = types.bool; 42 | default = true; 43 | description = "Whether to watch directories recursively."; 44 | }; 45 | 46 | verbose = mkOption { 47 | type = types.bool; 48 | default = false; 49 | description = "Run in verbose mode"; 50 | }; 51 | 52 | delete-files = mkOption { 53 | type = types.bool; 54 | default = false; 55 | description = "Whether to delete successfully uploaded files."; 56 | }; 57 | 58 | include-filter = mkOption { 59 | type = types.nullOr types.str; 60 | default = null; 61 | description = "A filter for files to include when watching"; 62 | }; 63 | 64 | exclude-filter = mkOption { 65 | type = types.nullOr types.str; 66 | default = ".*"; 67 | description = "A filter for files to exclude when watching"; 68 | }; 69 | 70 | integration-endpoint = mkOption { 71 | type = types.submodule ({ 72 | options = { 73 | enabled = mkOption { 74 | type = types.bool; 75 | default = false; 76 | description = "Whether to upload to the integration endpoint."; 77 | }; 78 | header-file = mkOption { 79 | type = types.nullOr types.path; 80 | default = null; 81 | description = 82 | "A file containing the `header:value` pair for the integration endpoint."; 83 | }; 84 | basic-file = mkOption { 85 | type = types.nullOr types.path; 86 | default = null; 87 | description = 88 | "A file containing the `user:password` pair for the integration endpoint."; 89 | }; 90 | header = mkOption { 91 | type = types.nullOr types.str; 92 | default = null; 93 | description = '' 94 | The `header:value` string matching the configured header-name 95 | and value for the integration endpoint. 96 | ''; 97 | }; 98 | basic = mkOption { 99 | type = types.nullOr types.str; 100 | default = null; 101 | description = '' 102 | The `user:password` string matching the configured user and password 103 | for the integration endpoint. Since both are separated by a colon, the 104 | user name may not contain a colon (the password can). 105 | ''; 106 | }; 107 | }; 108 | }); 109 | default = { 110 | enabled = false; 111 | header = null; 112 | basic = null; 113 | }; 114 | description = "Settings for using the integration endpoint."; 115 | }; 116 | source-id = mkOption { 117 | type = types.nullOr types.str; 118 | default = null; 119 | example = "abced-12345-abcde-12345"; 120 | description = '' 121 | A source id to use for uploading. This is used when the 122 | integration endpoint setting is disabled. 123 | ''; 124 | }; 125 | }; 126 | }; 127 | 128 | ## implementation 129 | config = mkIf config.services.dsc-watch.enable { 130 | 131 | systemd.user.services.dsc-watch = let 132 | argmap = [ 133 | { 134 | when = cfg.recursive; 135 | opt = [ "-r" ]; 136 | } 137 | { 138 | when = cfg.delete-files; 139 | opt = [ "--delete" ]; 140 | } 141 | { 142 | when = cfg.integration-endpoint.enabled; 143 | opt = [ "-i" ]; 144 | } 145 | { 146 | when = cfg.integration-endpoint.header-file != null; 147 | opt = [ "--header-file" "'${cfg.integration-endpoint.header-file}'" ]; 148 | } 149 | { 150 | when = cfg.integration-endpoint.basic-file != null; 151 | opt = [ "--basic-file" "'${cfg.integration-endpoint.basic-file}'" ]; 152 | } 153 | { 154 | when = cfg.integration-endpoint.header != null; 155 | opt = [ "--header" "'${cfg.integration-endpoint.header}'" ]; 156 | } 157 | { 158 | when = cfg.integration-endpoint.basic != null; 159 | opt = [ "--basic" "'${cfg.integration-endpoint.basic}'" ]; 160 | } 161 | { 162 | when = cfg.include-filter != null; 163 | opt = [ "--matches" "'${toString cfg.include-filter}'" ]; 164 | } 165 | { 166 | when = cfg.exclude-filter != null; 167 | opt = [ "--not-matches" "'${toString cfg.exclude-filter}'" ]; 168 | } 169 | { 170 | when = cfg.source-id != null; 171 | opt = [ "--source" "'${cfg.source-id}'" ]; 172 | } 173 | ]; 174 | 175 | globalmap = [ 176 | { 177 | when = cfg.verbose; 178 | opt = [ "-vv" ]; 179 | } 180 | { 181 | when = cfg.docspell-url != null; 182 | opt = [ "-d" "'${cfg.docspell-url}'" ]; 183 | } 184 | { 185 | when = cfg.configFile != null; 186 | opt = [ "-c" "'${cfg.configFile}'" ]; 187 | } 188 | ]; 189 | 190 | to_args = m: builtins.concatLists (builtins.map (a: a.opt) 191 | (builtins.filter (a: a.when) m)); 192 | 193 | argv = builtins.concatStringsSep " " (to_args argmap); 194 | globv = builtins.concatStringsSep " " (to_args globalmap); 195 | dirs = builtins.concatStringsSep " " (builtins.map (d: "'${d}'") cfg.watchDirs); 196 | 197 | cmd = "${cfg.package}/bin/dsc ${globv} watch ${argv} ${dirs}"; 198 | in { 199 | description = "Docspell Watch Directory"; 200 | after = [ "networking.target" ]; 201 | wants = [ "networking.target" ]; 202 | wantedBy = [ "default.target" ]; 203 | serviceConfig = { 204 | Restart = "on-failure"; 205 | RestartSec = 5; 206 | }; 207 | path = [ ]; 208 | 209 | script = ''echo "Running for user: $(whoami)" && ${cmd}''; 210 | }; 211 | }; 212 | } 213 | -------------------------------------------------------------------------------- /nix/nixosConfigurations/default.nix: -------------------------------------------------------------------------------- 1 | { modulesPath, pkgs, lib, config, ... }: 2 | let 3 | full-text-search = { 4 | enabled = true; 5 | backend = "postgresql"; 6 | postgresql = { pg-config = { "german" = "my-germam"; }; }; 7 | }; 8 | watchDir = "/docspell-watch"; 9 | integrationHeaderValue = "test123"; 10 | in { 11 | # Common development config 12 | imports = [ (modulesPath + "/virtualisation/qemu-vm.nix") ]; 13 | services.openssh = { 14 | enable = true; 15 | settings.PermitRootLogin = "yes"; 16 | }; 17 | i18n = { defaultLocale = "en_US.UTF-8"; }; 18 | console.keyMap = "us"; 19 | 20 | services.xserver = { enable = false; }; 21 | 22 | networking = { 23 | hostName = "docspelltest"; 24 | firewall.allowedTCPPorts = [ 7880 ]; 25 | }; 26 | users.users.root.password = "root"; 27 | 28 | # Otherwise oomkiller kills docspell 29 | virtualisation.memorySize = 4096; 30 | 31 | virtualisation.forwardPorts = [ 32 | # SSH 33 | { 34 | from = "host"; 35 | host.port = 64022; 36 | guest.port = 22; 37 | } 38 | # Docspell 39 | { 40 | from = "host"; 41 | host.port = 64080; 42 | guest.port = 7880; 43 | } 44 | ]; 45 | system.stateVersion = "23.11"; 46 | # This slows down the build of a vm 47 | documentation.enable = false; 48 | 49 | # Add dsc to the environment 50 | environment.systemPackages = [ pkgs.dsc ]; 51 | # configure dsc-watch 52 | systemd.tmpfiles.rules = [ 53 | "d ${watchDir} 1777 root root 10d" # directory to watch 54 | ]; 55 | 56 | services.dsc-watch = { 57 | enable = true; 58 | docspell-url = "http://localhost:7880"; 59 | exclude-filter = null; 60 | watchDirs = [ 61 | watchDir # Note, dsc expects files to be in a subdirectory corresponding to a collective. There is no way to declaratively create a collective as of the time of writing 62 | ]; 63 | integration-endpoint = let 64 | headerFile = pkgs.writeText "int-header-file" '' 65 | Docspell-Integration:${integrationHeaderValue} 66 | ''; 67 | in { 68 | enabled = true; 69 | header-file = headerFile; 70 | }; 71 | }; 72 | 73 | # Docspell service configuration and its requirements 74 | services.docspell-joex = { 75 | enable = true; 76 | bind.address = "0.0.0.0"; 77 | base-url = "http://localhost:7878"; 78 | jvmArgs = [ "-J-Xmx1536M" ]; 79 | inherit full-text-search; 80 | }; 81 | services.docspell-restserver = { 82 | enable = true; 83 | bind.address = "0.0.0.0"; 84 | openid = lib.mkForce [ ]; 85 | backend = { 86 | addons.enabled = true; 87 | signup = { mode = "open"; }; 88 | }; 89 | integration-endpoint = { 90 | enabled = true; 91 | http-header = { 92 | enabled = true; 93 | header-value = integrationHeaderValue; 94 | }; 95 | }; 96 | inherit full-text-search; 97 | extraConfig = { 98 | files = { 99 | default-store = "database"; 100 | stores = { minio = { enabled = true; }; }; 101 | }; 102 | }; 103 | }; 104 | } 105 | -------------------------------------------------------------------------------- /nix/tests/buildvm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [ "$1" = "-f" ]; then 4 | echo "Deleting current state image..." 5 | rm *.qcow2 6 | fi 7 | 8 | nixos-rebuild build-vm \ 9 | -I nixos-config=./test-config.nix \ 10 | -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-21.05.tar.gz 11 | -------------------------------------------------------------------------------- /nix/tests/startvm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | export QEMU_OPTS="-m 2048" 6 | export QEMU_NET_OPTS "hostfwd=tcp::7880-:7880" 7 | ./result/bin/run-dsctest-vm 8 | -------------------------------------------------------------------------------- /nix/tests/test-config.nix: -------------------------------------------------------------------------------- 1 | { config, pkgs, ... }: 2 | let 3 | dsc = import ../release.nix; 4 | docspellsrc = builtins.fetchTarball 5 | "https://github.com/eikek/docspell/archive/master.tar.gz"; 6 | docspell = import "${docspellsrc}/nix/release.nix"; 7 | in { 8 | imports = [ ../module.nix ] ++ docspell.modules; 9 | 10 | i18n = { defaultLocale = "en_US.UTF-8"; }; 11 | console.keyMap = "de"; 12 | 13 | users.users.root = { password = "root"; }; 14 | 15 | nixpkgs = { 16 | config = { 17 | packageOverrides = pkgs: 18 | let 19 | callPackage = pkgs.lib.callPackageWith (custom // pkgs); 20 | custom = { 21 | dsc = callPackage dsc { }; 22 | docspell = callPackage docspell.currentPkg { }; 23 | }; 24 | in custom; 25 | }; 26 | }; 27 | 28 | services.dsc-watch = { 29 | enable = true; 30 | verbose = false; 31 | delete-files = true; 32 | docspell-url = "http://localhost:7880"; 33 | integration-endpoint = { 34 | enabled = true; 35 | header = "Docspell-Integration:test123"; 36 | }; 37 | watchDirs = [ "/tmp/docs" ]; 38 | }; 39 | 40 | services.docspell-restserver = { 41 | enable = true; 42 | bind.address = "0.0.0.0"; 43 | integration-endpoint = { 44 | enabled = true; 45 | http-header = { 46 | enabled = true; 47 | header-value = "test123"; 48 | }; 49 | }; 50 | full-text-search = { enabled = false; }; 51 | }; 52 | 53 | environment.systemPackages = [ pkgs.jq pkgs.telnet pkgs.htop pkgs.dsc ]; 54 | 55 | services.xserver = { enable = false; }; 56 | 57 | networking = { 58 | hostName = "dsctest"; 59 | firewall.allowedTCPPorts = [ 7880 ]; 60 | }; 61 | 62 | system.activationScripts = { 63 | initUploadDir = '' 64 | mkdir -p ${ 65 | builtins.concatStringsSep " " config.services.dsc-watch.watchDirs 66 | } 67 | 68 | ''; 69 | }; 70 | system.stateVersion = "21.05"; 71 | } 72 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ], 5 | "lockFileMaintenance": { 6 | "enabled": true, 7 | "automerge": true 8 | }, 9 | "packageRules": [ 10 | { 11 | "matchDepTypes": [ 12 | "devDependencies" 13 | ], 14 | "matchPackagePatterns": [ 15 | "lint", 16 | "prettier" 17 | ], 18 | "automerge": true 19 | }, 20 | { 21 | "matchUpdateTypes": [ 22 | "minor", 23 | "patch" 24 | ], 25 | "automerge": true 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /run-tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | base=$(dirname "$(readlink -f "$0")") 5 | 6 | start_docker() { 7 | cd $base/ci 8 | docker-compose -f docker-compose.yml up -d 9 | sleep 5 10 | } 11 | 12 | stop_docker() { 13 | cd $base/ci 14 | docker-compose -f docker-compose.yml down 15 | docker-compose -f docker-compose.yml kill 16 | } 17 | 18 | trap "{ stop_docker ; }" EXIT 19 | 20 | start_docker 21 | 22 | cargo test --doc 23 | cargo test --test login 24 | cargo test 25 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | (import 2 | ( 3 | let 4 | lock = builtins.fromJSON (builtins.readFile ./flake.lock); 5 | in 6 | fetchTarball { 7 | url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; 8 | sha256 = lock.nodes.flake-compat.locked.narHash; 9 | } 10 | ) 11 | { 12 | src = ./.; 13 | }).shellNix 14 | -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | //! Defines the command line interface. 2 | 3 | pub mod cmd; 4 | pub mod opts; 5 | pub mod sink; 6 | pub mod table; 7 | 8 | use crate::config::DsConfig; 9 | use clap::CommandFactory; 10 | 11 | use self::cmd::{Cmd, CmdError, Context}; 12 | use self::opts::{MainOpts, SubCommand}; 13 | 14 | /// Given the config and arguments, runs the corresponding command. 15 | pub fn execute_cmd(cfg: DsConfig, opts: MainOpts) -> Result<(), CmdError> { 16 | let ctx = Context::new(&opts.common_opts, &cfg)?; 17 | 18 | log::info!("Running command: {:?}", opts.subcmd); 19 | match &opts.subcmd { 20 | SubCommand::WriteDefaultConfig => { 21 | let cfg_file = DsConfig::write_default_file()?; 22 | eprintln!("Wrote config to {:}", cfg_file.display()); 23 | } 24 | SubCommand::GenerateCompletions(input) => { 25 | let mut app = MainOpts::command(); 26 | input.print_completions(&mut app); 27 | } 28 | SubCommand::Bookmark(input) => input.exec(&ctx)?, 29 | SubCommand::Item(input) => input.exec(&ctx)?, 30 | SubCommand::Watch(input) => input.exec(&ctx)?, 31 | SubCommand::Version(input) => input.exec(&ctx)?, 32 | SubCommand::Login(input) => input.exec(&ctx)?, 33 | SubCommand::Logout(input) => input.exec(&ctx)?, 34 | SubCommand::Search(input) => input.exec(&ctx)?, 35 | SubCommand::SearchSummary(input) => input.exec(&ctx)?, 36 | SubCommand::Source(input) => input.exec(&ctx)?, 37 | SubCommand::Admin(input) => input.exec(&ctx)?, 38 | SubCommand::FileExists(input) => input.exec(&ctx)?, 39 | SubCommand::GenInvite(input) => input.exec(&ctx)?, 40 | SubCommand::Register(input) => input.exec(&ctx)?, 41 | SubCommand::OpenItem(input) => input.exec(&ctx)?, 42 | SubCommand::Upload(input) => input.exec(&ctx)?, 43 | SubCommand::Download(input) => input.exec(&ctx)?, 44 | SubCommand::View(input) => input.exec(&ctx)?, 45 | SubCommand::Cleanup(input) => input.exec(&ctx)?, 46 | SubCommand::Export(input) => input.exec(&ctx)?, 47 | }; 48 | Ok(()) 49 | } 50 | -------------------------------------------------------------------------------- /src/cli/cmd.rs: -------------------------------------------------------------------------------- 1 | //! Defines all commands of the cli. 2 | //! 3 | //! A command is defined by the trait [`Cmd`]. Besides the type it is 4 | //! defined on, it expects a [`Context`] argument which contains the 5 | //! configuration file, the common options and an instance of the 6 | //! [`crate::http::Client`]. 7 | //! 8 | //! Each command defines its inputs via [clap](https://clap.rs) and 9 | //! implements for this type the `Cmd` trait. Each input type is 10 | //! referenced in the subcommand enum. 11 | 12 | pub mod admin; 13 | pub mod bookmark; 14 | pub mod cleanup; 15 | pub mod download; 16 | pub mod export; 17 | pub mod file_exists; 18 | pub mod generate_completions; 19 | pub mod geninvite; 20 | pub mod item; 21 | pub mod login; 22 | pub mod logout; 23 | pub mod open_item; 24 | pub mod register; 25 | pub mod search; 26 | pub mod search_summary; 27 | pub mod source; 28 | pub mod upload; 29 | pub mod version; 30 | pub mod view; 31 | pub mod watch; 32 | 33 | use std::path::PathBuf; 34 | use std::str::FromStr; 35 | 36 | use super::opts::Format; 37 | use super::sink::{Error as SinkError, Sink}; 38 | use crate::cli; 39 | use crate::cli::opts::CommonOpts; 40 | use crate::config::{ConfigError, DsConfig}; 41 | use crate::http::proxy::ProxySetting; 42 | use crate::http::{self, Client}; 43 | use serde::Serialize; 44 | use snafu::{ResultExt, Snafu}; 45 | 46 | /// A command for the cli. 47 | /// 48 | /// The [`Context`] argument is defined for all commands. 49 | pub trait Cmd { 50 | type CmdError; 51 | 52 | fn exec<'a>(&self, args: &'a Context) -> Result<(), Self::CmdError>; 53 | } 54 | 55 | /// An environment for running a command. 56 | /// 57 | /// It has by default access to the configuration and the common 58 | /// options. The http client is also provided. 59 | pub struct Context<'a> { 60 | pub opts: &'a CommonOpts, 61 | pub cfg: &'a DsConfig, 62 | pub client: Client, 63 | } 64 | 65 | impl Context<'_> { 66 | pub fn new<'a>(opts: &'a CommonOpts, cfg: &'a DsConfig) -> Result, CmdError> { 67 | let client = Client::new( 68 | docspell_url(opts, cfg), 69 | proxy_settings(opts, cfg), 70 | &extra_certificate(opts, cfg), 71 | accept_invalid_certs(opts, cfg), 72 | ) 73 | .context(ContextCreateSnafu)?; 74 | Ok(Context { opts, cfg, client }) 75 | } 76 | 77 | fn base_url(&self) -> String { 78 | docspell_url(self.opts, self.cfg) 79 | } 80 | 81 | fn write_result(&self, value: A) -> Result<(), SinkError> { 82 | let fmt = self.format(); 83 | Sink::write_value(fmt, &value) 84 | } 85 | 86 | fn format(&self) -> Format { 87 | self.opts.format.unwrap_or(self.cfg.default_format) 88 | } 89 | } 90 | 91 | fn docspell_url(opts: &CommonOpts, cfg: &DsConfig) -> String { 92 | match &opts.docspell_url { 93 | Some(u) => { 94 | log::debug!("Use docspell url from arguments: {}", u); 95 | u.clone() 96 | } 97 | None => match std::env::var(DSC_DOCSPELL_URL).ok() { 98 | Some(u) => { 99 | log::debug!("Use docspell url from env: {}", u); 100 | u 101 | } 102 | None => { 103 | log::debug!("Use docspell url from config: {}", cfg.docspell_url); 104 | cfg.docspell_url.clone() 105 | } 106 | }, 107 | } 108 | } 109 | 110 | fn accept_invalid_certs(opts: &CommonOpts, cfg: &DsConfig) -> bool { 111 | opts.accept_invalid_certificates || cfg.accept_invalid_certificates.unwrap_or(false) 112 | } 113 | 114 | fn extra_certificate(opts: &CommonOpts, cfg: &DsConfig) -> Option { 115 | opts.extra_certificate 116 | .clone() 117 | .or_else(|| cfg.extra_certificate.clone()) 118 | } 119 | 120 | fn proxy_settings(opts: &CommonOpts, cfg: &DsConfig) -> ProxySetting { 121 | let user = opts.proxy_user.clone().or_else(|| cfg.proxy_user.clone()); 122 | let pass = opts 123 | .proxy_password 124 | .clone() 125 | .or_else(|| cfg.proxy_password.clone()); 126 | let prx = opts.proxy.clone().or_else(|| match &cfg.proxy { 127 | None => None, 128 | Some(str) => cli::opts::ProxySetting::from_str(str).ok(), 129 | }); 130 | 131 | log::debug!("Using proxy: {:?} @ {:?}", user, prx); 132 | CommonOpts::to_proxy_setting(&prx, user, pass) 133 | } 134 | 135 | #[derive(Debug, Snafu)] 136 | pub enum CmdError { 137 | #[snafu(display("Bookmark - {}", source))] 138 | Bookmark { source: bookmark::Error }, 139 | 140 | #[snafu(display("ContextCreate - {}", source))] 141 | ContextCreate { source: http::Error }, 142 | 143 | #[snafu(display("Export - {}", source))] 144 | Export { source: export::Error }, 145 | 146 | #[snafu(display("Watch - {}", source))] 147 | Watch { source: watch::Error }, 148 | 149 | #[snafu(display("Upload - {}", source))] 150 | Upload { source: upload::Error }, 151 | 152 | #[snafu(display("Admin - {}", source))] 153 | Admin { source: admin::Error }, 154 | 155 | #[snafu(display("Cleanup - {}", source))] 156 | Cleanup { source: cleanup::Error }, 157 | 158 | #[snafu(display("Download - {}", source))] 159 | Download { source: download::Error }, 160 | 161 | #[snafu(display("FileExists - {}", source))] 162 | FileExists { source: file_exists::Error }, 163 | 164 | #[snafu(display("GenInvite - {}", source))] 165 | GenInvite { source: geninvite::Error }, 166 | 167 | #[snafu(display("Item - {}", source))] 168 | Item { source: item::Error }, 169 | 170 | #[snafu(display("Login - {}", source))] 171 | Login { source: login::Error }, 172 | 173 | #[snafu(display("Logout - {}", source))] 174 | Logout { source: logout::Error }, 175 | 176 | #[snafu(display("OpenItem - {}", source))] 177 | OpenItem { source: open_item::Error }, 178 | 179 | #[snafu(display("Register - {}", source))] 180 | Register { source: register::Error }, 181 | 182 | #[snafu(display("Search - {}", source))] 183 | Search { source: search::Error }, 184 | 185 | #[snafu(display("SearchSummary - {}", source))] 186 | SearchSummary { source: search_summary::Error }, 187 | 188 | #[snafu(display("Source - {}", source))] 189 | Source { source: source::Error }, 190 | 191 | #[snafu(display("Version - {}", source))] 192 | Version { source: version::Error }, 193 | 194 | #[snafu(display("View - {}", source))] 195 | View { source: view::Error }, 196 | 197 | #[snafu(display("WriteConfig - {}", source))] 198 | WriteConfig { source: ConfigError }, 199 | 200 | #[snafu(display("{}", source))] 201 | WriteSink { source: SinkError }, 202 | } 203 | 204 | impl From for CmdError { 205 | fn from(source: bookmark::Error) -> Self { 206 | CmdError::Bookmark { source } 207 | } 208 | } 209 | 210 | impl From for CmdError { 211 | fn from(source: open_item::Error) -> Self { 212 | CmdError::OpenItem { source } 213 | } 214 | } 215 | impl From for CmdError { 216 | fn from(source: ConfigError) -> Self { 217 | CmdError::WriteConfig { source } 218 | } 219 | } 220 | impl From for CmdError { 221 | fn from(source: version::Error) -> Self { 222 | CmdError::Version { source } 223 | } 224 | } 225 | impl From for CmdError { 226 | fn from(source: login::Error) -> Self { 227 | CmdError::Login { source } 228 | } 229 | } 230 | impl From for CmdError { 231 | fn from(source: logout::Error) -> Self { 232 | CmdError::Logout { source } 233 | } 234 | } 235 | impl From for CmdError { 236 | fn from(source: search::Error) -> Self { 237 | CmdError::Search { source } 238 | } 239 | } 240 | impl From for CmdError { 241 | fn from(source: file_exists::Error) -> Self { 242 | CmdError::FileExists { source } 243 | } 244 | } 245 | impl From for CmdError { 246 | fn from(source: geninvite::Error) -> Self { 247 | CmdError::GenInvite { source } 248 | } 249 | } 250 | impl From for CmdError { 251 | fn from(source: register::Error) -> Self { 252 | CmdError::Register { source } 253 | } 254 | } 255 | impl From for CmdError { 256 | fn from(source: search_summary::Error) -> Self { 257 | CmdError::SearchSummary { source } 258 | } 259 | } 260 | impl From for CmdError { 261 | fn from(source: source::Error) -> Self { 262 | CmdError::Source { source } 263 | } 264 | } 265 | impl From for CmdError { 266 | fn from(source: item::Error) -> Self { 267 | CmdError::Item { source } 268 | } 269 | } 270 | impl From for CmdError { 271 | fn from(source: admin::Error) -> Self { 272 | CmdError::Admin { source } 273 | } 274 | } 275 | impl From for CmdError { 276 | fn from(source: download::Error) -> Self { 277 | CmdError::Download { source } 278 | } 279 | } 280 | impl From for CmdError { 281 | fn from(source: view::Error) -> Self { 282 | CmdError::View { source } 283 | } 284 | } 285 | impl From for CmdError { 286 | fn from(source: cleanup::Error) -> Self { 287 | CmdError::Cleanup { source } 288 | } 289 | } 290 | impl From for CmdError { 291 | fn from(source: upload::Error) -> Self { 292 | CmdError::Upload { source } 293 | } 294 | } 295 | impl From for CmdError { 296 | fn from(source: watch::Error) -> Self { 297 | CmdError::Watch { source } 298 | } 299 | } 300 | impl From for CmdError { 301 | fn from(source: export::Error) -> Self { 302 | CmdError::Export { source } 303 | } 304 | } 305 | 306 | const DSC_DOCSPELL_URL: &str = "DSC_DOCSPELL_URL"; 307 | -------------------------------------------------------------------------------- /src/cli/cmd/admin.rs: -------------------------------------------------------------------------------- 1 | pub mod convert_all_pdfs; 2 | pub mod disable_2fa; 3 | pub mod file_clone_repository; 4 | pub mod file_integrity_check; 5 | pub mod generate_previews; 6 | pub mod recreate_index; 7 | pub mod reset_password; 8 | 9 | use clap::Parser; 10 | use snafu::{ResultExt, Snafu}; 11 | 12 | use super::{Cmd, Context}; 13 | 14 | /// Admin commands. 15 | /// 16 | /// These commands require the admin secret from the server config 17 | /// file. 18 | #[derive(Parser, std::fmt::Debug)] 19 | pub struct Input { 20 | /// This secret is required to access them. If not given here, it 21 | /// is taken from the config file. 22 | #[arg(short, long)] 23 | pub admin_secret: Option, 24 | 25 | #[clap(subcommand)] 26 | pub subcmd: AdminCommand, 27 | } 28 | 29 | #[derive(Debug, Snafu)] 30 | pub enum Error { 31 | GeneratePreview { 32 | source: generate_previews::Error, 33 | }, 34 | RecreateIndex { 35 | source: recreate_index::Error, 36 | }, 37 | ResetPassword { 38 | source: reset_password::Error, 39 | }, 40 | ConvertAllPdfs { 41 | source: convert_all_pdfs::Error, 42 | }, 43 | Disable2FA { 44 | source: disable_2fa::Error, 45 | }, 46 | CloneFileRepo { 47 | source: file_clone_repository::Error, 48 | }, 49 | FileIntegrityCheck { 50 | source: file_integrity_check::Error, 51 | }, 52 | } 53 | 54 | #[derive(Parser, Debug)] 55 | pub enum AdminCommand { 56 | #[command(version)] 57 | GeneratePreviews(generate_previews::Input), 58 | 59 | #[command(version)] 60 | RecreateIndex(recreate_index::Input), 61 | 62 | #[command(version)] 63 | ResetPassword(reset_password::Input), 64 | 65 | #[command(version)] 66 | ConvertAllPdfs(convert_all_pdfs::Input), 67 | 68 | #[command(name = "disable-2fa")] 69 | #[command(version)] 70 | Disable2FA(disable_2fa::Input), 71 | 72 | #[command(version)] 73 | CloneFileRepository(file_clone_repository::Input), 74 | 75 | #[command(version)] 76 | FileIntegrityCheck(file_integrity_check::Input), 77 | } 78 | 79 | impl Cmd for Input { 80 | type CmdError = Error; 81 | 82 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 83 | match &self.subcmd { 84 | AdminCommand::GeneratePreviews(input) => { 85 | input.exec(self, ctx).context(GeneratePreviewSnafu) 86 | } 87 | AdminCommand::RecreateIndex(input) => input.exec(self, ctx).context(RecreateIndexSnafu), 88 | AdminCommand::ResetPassword(input) => input.exec(self, ctx).context(ResetPasswordSnafu), 89 | AdminCommand::ConvertAllPdfs(input) => { 90 | input.exec(self, ctx).context(ConvertAllPdfsSnafu) 91 | } 92 | AdminCommand::Disable2FA(input) => input.exec(self, ctx).context(Disable2FASnafu), 93 | AdminCommand::CloneFileRepository(input) => { 94 | input.exec(self, ctx).context(CloneFileRepoSnafu) 95 | } 96 | AdminCommand::FileIntegrityCheck(input) => { 97 | input.exec(self, ctx).context(FileIntegrityCheckSnafu) 98 | } 99 | } 100 | } 101 | } 102 | 103 | pub trait AdminCmd { 104 | type CmdError; 105 | 106 | fn exec<'a>(&self, admin_opts: &'a Input, args: &'a Context) -> Result<(), Self::CmdError>; 107 | } 108 | 109 | fn get_secret(opts: &Input, ctx: &Context) -> Option { 110 | let secret = opts 111 | .admin_secret 112 | .as_ref() 113 | .or(ctx.cfg.admin_secret.as_ref()) 114 | .map(String::clone); 115 | 116 | if secret.is_some() && ctx.opts.verbose > 2 { 117 | log::debug!("Using secret: {:?}", secret); 118 | } 119 | 120 | secret 121 | } 122 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/convert_all_pdfs.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Submits a task to convert all pdfs via the configured tool (by 10 | /// default ocrmypdf). 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input {} 13 | 14 | impl AdminCmd for Input { 15 | type CmdError = Error; 16 | 17 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 18 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 19 | let result = ctx 20 | .client 21 | .admin_convert_all_pdfs(secret) 22 | .context(HttpClientSnafu)?; 23 | ctx.write_result(result).context(WriteResultSnafu)?; 24 | Ok(()) 25 | } 26 | } 27 | 28 | #[derive(Debug, Snafu)] 29 | pub enum Error { 30 | #[snafu(display("An http error occurred: {}", source))] 31 | HttpClient { source: HttpError }, 32 | 33 | #[snafu(display("Error writing data: {}", source))] 34 | WriteResult { source: SinkError }, 35 | 36 | #[snafu(display("No admin secret provided"))] 37 | NoAdminSecret, 38 | } 39 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/disable_2fa.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueHint}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::{Account, BasicResult}; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Disables the two-factor authentication for a given account 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input { 13 | #[arg(long, short, value_hint = ValueHint::Username)] 14 | pub account: String, 15 | } 16 | 17 | impl AdminCmd for Input { 18 | type CmdError = Error; 19 | 20 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 21 | let result = disable_2fa(self, admin_opts, ctx)?; 22 | ctx.write_result(result).context(WriteResultSnafu)?; 23 | Ok(()) 24 | } 25 | } 26 | 27 | #[derive(Debug, Snafu)] 28 | pub enum Error { 29 | #[snafu(display("An http error occurred: {}", source))] 30 | HttpClient { source: HttpError }, 31 | 32 | #[snafu(display("Error writing data: {}", source))] 33 | WriteResult { source: SinkError }, 34 | 35 | #[snafu(display("No admin secret provided"))] 36 | NoAdminSecret, 37 | } 38 | 39 | pub fn disable_2fa( 40 | input: &Input, 41 | admin_opts: &super::Input, 42 | ctx: &Context, 43 | ) -> Result { 44 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 45 | let account = Account { 46 | account: input.account.clone(), 47 | }; 48 | ctx.client 49 | .admin_reset_otp(secret, &account) 50 | .context(HttpClientSnafu) 51 | } 52 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/file_clone_repository.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::FileCloneRequest; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Submits a task to clone the default file repository to a different 11 | /// one. 12 | #[derive(Parser, std::fmt::Debug)] 13 | pub struct Input { 14 | #[arg(long, short)] 15 | pub target: Vec, 16 | } 17 | 18 | impl AdminCmd for Input { 19 | type CmdError = Error; 20 | 21 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 22 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 23 | let req = FileCloneRequest { 24 | target_repositories: self.target.clone(), 25 | }; 26 | log::info!("Sending task to clone file repository to: {:?}", req); 27 | let result = ctx 28 | .client 29 | .admin_files_clone_repository(secret, &req) 30 | .context(HttpClientSnafu)?; 31 | ctx.write_result(result).context(WriteResultSnafu)?; 32 | Ok(()) 33 | } 34 | } 35 | 36 | #[derive(Debug, Snafu)] 37 | pub enum Error { 38 | #[snafu(display("An http error occurred: {}", source))] 39 | HttpClient { source: HttpError }, 40 | 41 | #[snafu(display("Error writing data: {}", source))] 42 | WriteResult { source: SinkError }, 43 | 44 | #[snafu(display("No admin secret provided"))] 45 | NoAdminSecret, 46 | } 47 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/file_integrity_check.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::FileIntegrityCheckRequest; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Submits a task that goes through all files or the files of a give 11 | /// collective and verifies the stored checksum against a new 12 | /// calculated one. 13 | #[derive(Parser, std::fmt::Debug)] 14 | pub struct Input { 15 | #[arg(long, short)] 16 | pub collective: Option, 17 | } 18 | 19 | impl AdminCmd for Input { 20 | type CmdError = Error; 21 | 22 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 23 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 24 | let req = FileIntegrityCheckRequest { 25 | collective: self.collective.clone(), 26 | }; 27 | log::info!("Sending task to clone file repository to: {:?}", req); 28 | let result = ctx 29 | .client 30 | .admin_files_integrity_check(secret, &req) 31 | .context(HttpClientSnafu)?; 32 | ctx.write_result(result).context(WriteResultSnafu)?; 33 | Ok(()) 34 | } 35 | } 36 | 37 | #[derive(Debug, Snafu)] 38 | pub enum Error { 39 | #[snafu(display("An http error occurred: {}", source))] 40 | HttpClient { source: HttpError }, 41 | 42 | #[snafu(display("Error writing data: {}", source))] 43 | WriteResult { source: SinkError }, 44 | 45 | #[snafu(display("No admin secret provided"))] 46 | NoAdminSecret, 47 | } 48 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/generate_previews.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::BasicResult; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Submits a task to generate preview images of all files. 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input {} 13 | 14 | impl AdminCmd for Input { 15 | type CmdError = Error; 16 | 17 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 18 | let result = generate_previews(admin_opts, ctx)?; 19 | 20 | ctx.write_result(result).context(WriteResultSnafu)?; 21 | Ok(()) 22 | } 23 | } 24 | 25 | #[derive(Debug, Snafu)] 26 | pub enum Error { 27 | #[snafu(display("An http error occurred: {}", source))] 28 | HttpClient { source: HttpError }, 29 | 30 | #[snafu(display("Error writing data: {}", source))] 31 | WriteResult { source: SinkError }, 32 | 33 | #[snafu(display("No admin secret provided"))] 34 | NoAdminSecret, 35 | } 36 | 37 | pub fn generate_previews(admin_opts: &super::Input, ctx: &Context) -> Result { 38 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 39 | ctx.client 40 | .admin_generate_previews(secret) 41 | .context(HttpClientSnafu) 42 | } 43 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/recreate_index.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::BasicResult; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Submits a task to re-create the entire fulltext search index. 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input {} 13 | 14 | impl AdminCmd for Input { 15 | type CmdError = Error; 16 | 17 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 18 | let result = recreate_index(admin_opts, ctx)?; 19 | ctx.write_result(result).context(WriteResultSnafu)?; 20 | Ok(()) 21 | } 22 | } 23 | 24 | #[derive(Debug, Snafu)] 25 | pub enum Error { 26 | #[snafu(display("An http error occurred: {}", source))] 27 | HttpClient { source: HttpError }, 28 | 29 | #[snafu(display("Error writing data: {}", source))] 30 | WriteResult { source: SinkError }, 31 | 32 | #[snafu(display("No admin secret provided"))] 33 | NoAdminSecret, 34 | } 35 | 36 | pub fn recreate_index(admin_opts: &super::Input, ctx: &Context) -> Result { 37 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 38 | ctx.client 39 | .admin_recreate_index(secret) 40 | .context(HttpClientSnafu) 41 | } 42 | -------------------------------------------------------------------------------- /src/cli/cmd/admin/reset_password.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueHint}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::AdminCmd; 5 | use super::Context; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::{Account, ResetPasswordResp}; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Resets the password of the given account. 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input { 13 | #[arg(long, short, value_hint = ValueHint::Username)] 14 | pub account: String, 15 | } 16 | 17 | impl AdminCmd for Input { 18 | type CmdError = Error; 19 | 20 | fn exec(&self, admin_opts: &super::Input, ctx: &Context) -> Result<(), Error> { 21 | let result = reset_password(self, admin_opts, ctx)?; 22 | ctx.write_result(result).context(WriteResultSnafu)?; 23 | Ok(()) 24 | } 25 | } 26 | 27 | #[derive(Debug, Snafu)] 28 | pub enum Error { 29 | #[snafu(display("An http error occurred: {}", source))] 30 | HttpClient { source: HttpError }, 31 | 32 | #[snafu(display("Error writing data: {}", source))] 33 | WriteResult { source: SinkError }, 34 | 35 | #[snafu(display("No admin secret provided"))] 36 | NoAdminSecret, 37 | } 38 | 39 | pub fn reset_password( 40 | input: &Input, 41 | admin_opts: &super::Input, 42 | ctx: &Context, 43 | ) -> Result { 44 | let secret = super::get_secret(admin_opts, ctx).ok_or(Error::NoAdminSecret)?; 45 | let account = Account { 46 | account: input.account.clone(), 47 | }; 48 | ctx.client 49 | .admin_reset_password(secret, &account) 50 | .context(HttpClientSnafu) 51 | } 52 | -------------------------------------------------------------------------------- /src/cli/cmd/bookmark.rs: -------------------------------------------------------------------------------- 1 | pub mod get; 2 | 3 | use clap::Parser; 4 | use snafu::{ResultExt, Snafu}; 5 | 6 | use super::{Cmd, Context}; 7 | 8 | /// Manage bookmarks. 9 | #[derive(Parser, std::fmt::Debug)] 10 | pub struct Input { 11 | #[command(subcommand)] 12 | pub subcmd: BookmarkCommand, 13 | } 14 | 15 | #[derive(Parser, Debug)] 16 | pub enum BookmarkCommand { 17 | #[command(version)] 18 | Get(get::Input), 19 | } 20 | 21 | #[derive(Debug, Snafu)] 22 | pub enum Error { 23 | Get { source: get::Error }, 24 | } 25 | 26 | impl Cmd for Input { 27 | type CmdError = Error; 28 | 29 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 30 | match &self.subcmd { 31 | BookmarkCommand::Get(input) => input.exec(ctx).context(GetSnafu), 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/cli/cmd/bookmark/get.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::{Bookmark, BookmarkList}; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Gets all bookmarks. 10 | #[derive(Parser, Debug)] 11 | pub struct Input {} 12 | 13 | #[derive(Debug, Snafu)] 14 | pub enum Error { 15 | #[snafu(display("An http error occurred: {}", source))] 16 | HttpClient { source: HttpError }, 17 | 18 | #[snafu(display("Error writing data: {}", source))] 19 | WriteResult { source: SinkError }, 20 | 21 | #[snafu(display("The item was not found"))] 22 | ItemNotFound, 23 | } 24 | 25 | impl Cmd for Input { 26 | type CmdError = Error; 27 | 28 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 29 | let bookmarks = get_bookmarks(ctx)?; 30 | let bmlist = BookmarkList { bookmarks }; 31 | ctx.write_result(bmlist).context(WriteResultSnafu)?; 32 | Ok(()) 33 | } 34 | } 35 | 36 | fn get_bookmarks(ctx: &Context) -> Result, Error> { 37 | ctx.client 38 | .get_bookmarks(&ctx.opts.session) 39 | .context(HttpClientSnafu) 40 | } 41 | -------------------------------------------------------------------------------- /src/cli/cmd/cleanup.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | use std::path::{Path, PathBuf}; 4 | 5 | use super::{Cmd, Context}; 6 | use crate::http::Error as HttpError; 7 | use crate::util::{digest, file}; 8 | use crate::{ 9 | cli::opts::{EndpointOpts, FileAction, FileAuthError}, 10 | util::file::FileActionResult, 11 | }; 12 | use crate::{cli::sink::Error as SinkError, http::payload::BasicResult}; 13 | 14 | /// Cleans directories from files that are in Docspell. 15 | /// 16 | /// Traverses one or more directories and check each file whether it 17 | /// exists in Docspell. If so, it can be deleted or moved to another 18 | /// place. 19 | /// 20 | /// If you want to upload all files that don't exists in some 21 | /// directory, use the `upload` command. 22 | /// 23 | /// When using the integration endpoint and a collective is not 24 | /// specified, it will be guessed from the first subdirectory of the 25 | /// directory that is specified. 26 | #[derive(Parser, Debug)] 27 | pub struct Input { 28 | #[clap(flatten)] 29 | pub endpoint: EndpointOpts, 30 | 31 | #[clap(flatten)] 32 | pub action: FileAction, 33 | 34 | /// Each file is printed. 35 | #[arg(long)] 36 | pub dry_run: bool, 37 | 38 | /// One or more files/directories to check. Directories are 39 | /// traversed recursively. 40 | #[arg(required = true, num_args = 1)] 41 | pub files: Vec, 42 | } 43 | 44 | #[derive(Debug, Snafu)] 45 | pub enum Error { 46 | #[snafu(display("An http error occurred: {}", source))] 47 | HttpClient { source: HttpError }, 48 | 49 | #[snafu(display("Error writing data: {}", source))] 50 | WriteResult { source: SinkError }, 51 | 52 | #[snafu(display("Pattern error: {}", source))] 53 | Pattern { source: glob::PatternError }, 54 | 55 | #[snafu(display("Glob error: {}", source))] 56 | Glob { source: glob::GlobError }, 57 | 58 | #[snafu(display("Cannot delete or move: {}", source))] 59 | FileActionError { source: std::io::Error }, 60 | 61 | #[snafu(display("No action given. Use --move or --delete."))] 62 | NoAction, 63 | 64 | #[snafu(display("Cannot get credentials: {}", source))] 65 | CredentialsRead { source: FileAuthError }, 66 | 67 | #[snafu(display("The target '{}' is not a directory", path.display()))] 68 | TargetNotDirectory { path: PathBuf }, 69 | 70 | #[snafu(display("Calculating digest of file {} failed: {}", path.display(), source))] 71 | DigestFail { 72 | source: std::io::Error, 73 | path: PathBuf, 74 | }, 75 | } 76 | 77 | impl Cmd for Input { 78 | type CmdError = Error; 79 | 80 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 81 | check_args(self)?; 82 | let result = cleanup(self, ctx)?; 83 | ctx.write_result(BasicResult { 84 | success: true, 85 | message: format!("Cleaned up files: {}", result), 86 | }) 87 | .context(WriteResultSnafu)?; 88 | Ok(()) 89 | } 90 | } 91 | 92 | fn check_args(args: &Input) -> Result<(), Error> { 93 | match &args.action.move_to { 94 | Some(path) => { 95 | if path.is_dir() { 96 | Ok(()) 97 | } else { 98 | Err(Error::TargetNotDirectory { path: path.clone() }) 99 | } 100 | } 101 | None => { 102 | if args.action.delete { 103 | Ok(()) 104 | } else { 105 | Err(Error::NoAction) 106 | } 107 | } 108 | } 109 | } 110 | 111 | fn cleanup(args: &Input, ctx: &Context) -> Result { 112 | let mut counter = 0; 113 | for file in &args.files { 114 | if file.is_dir() { 115 | let pattern = file.join("**/*").display().to_string(); 116 | for child in glob::glob(&pattern).context(PatternSnafu)? { 117 | let cf = child.context(GlobSnafu)?; 118 | if cf.is_file() { 119 | counter += cleanup_and_report(&cf, Some(file), args, ctx)?; 120 | } 121 | } 122 | } else { 123 | counter += cleanup_and_report(file, None, args, ctx)?; 124 | } 125 | } 126 | Ok(counter) 127 | } 128 | 129 | fn cleanup_and_report( 130 | file: &Path, 131 | root: Option<&PathBuf>, 132 | args: &Input, 133 | ctx: &Context, 134 | ) -> Result { 135 | eprintln!("Check file: {}", file.display()); 136 | let exists = check_file_exists(file, root, &args.endpoint, ctx)?; 137 | log::debug!("Checking file: {} (exists: {})", file.display(), exists); 138 | if exists { 139 | eprint!(" - exists: "); 140 | if !args.dry_run { 141 | let res = args.action.execute(file, root).context(FileActionSnafu)?; 142 | log::debug!("Action executed: {:?}", res); 143 | match res { 144 | FileActionResult::Deleted(_p) => { 145 | eprintln!("deleted."); 146 | return Ok(1); 147 | } 148 | FileActionResult::Moved(_p) => { 149 | eprintln!("moved."); 150 | return Ok(1); 151 | } 152 | FileActionResult::Nothing => { 153 | log::error!("No file action defined. This should not happen, because user was able to not define it"); 154 | return Ok(0); 155 | } 156 | } 157 | } else { 158 | eprintln!("{}", exists); 159 | return Ok(1); 160 | } 161 | } 162 | Ok(0) 163 | } 164 | 165 | fn check_file_exists( 166 | path: &Path, 167 | root: Option<&PathBuf>, 168 | opts: &EndpointOpts, 169 | ctx: &Context, 170 | ) -> Result { 171 | let dirs: Vec = match root { 172 | Some(d) => vec![d.clone()], 173 | None => vec![], 174 | }; 175 | 176 | let fauth = opts 177 | .to_file_auth(ctx, &|| { 178 | file::collective_from_subdir(path, &dirs).unwrap_or(None) 179 | }) 180 | .context(CredentialsReadSnafu)?; 181 | 182 | let hash = digest::digest_file_sha256(path).context(DigestFailSnafu { path })?; 183 | let result = ctx 184 | .client 185 | .file_exists(hash, &fauth) 186 | .context(HttpClientSnafu)?; 187 | 188 | Ok(result.exists) 189 | } 190 | -------------------------------------------------------------------------------- /src/cli/cmd/download.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgGroup, Parser, ValueEnum}; 2 | use snafu::{ResultExt, Snafu}; 3 | use std::path::{Display, Path, PathBuf}; 4 | 5 | use super::{Cmd, Context}; 6 | use crate::http::payload::SearchReq; 7 | use crate::{ 8 | cli::opts::SearchMode, 9 | http::{Downloads, Error as HttpError}, 10 | util::dupes::Dupes, 11 | }; 12 | 13 | /// Downloads files given a query. 14 | /// 15 | /// Searches for documents via a query and downloads all associated 16 | /// files. It downloads by default the converted PDF files, which can 17 | /// be changed using options `--original` and `--archive`, 18 | /// respectively. 19 | /// 20 | /// Use the `search-summary` command with the same query to get an 21 | /// idea how much is being downloaded. 22 | #[derive(Parser, std::fmt::Debug)] 23 | #[command(group = ArgGroup::new("kind"))] 24 | pub struct Input { 25 | /// The query string. See 26 | query: String, 27 | 28 | #[clap(flatten)] 29 | pub search_mode: SearchMode, 30 | 31 | /// Limit the number of results. 32 | #[arg(short, long, default_value = "60")] 33 | limit: u32, 34 | 35 | /// Skip the first n results. 36 | #[arg(short, long, default_value = "0")] 37 | offset: u32, 38 | 39 | /// Whether to overwrite already existing files. By default the 40 | /// download is skipped if there is already a file with the target 41 | /// name present. When using `--zip` this will remove an existing 42 | /// zip file before downloading. 43 | #[arg(long)] 44 | overwrite: bool, 45 | 46 | /// Download the original file instead of the converted PDF. 47 | #[arg(long, group = "kind")] 48 | original: bool, 49 | 50 | /// Download the original archive file to the attachment if 51 | /// available. Since often multiple files map to a single archive, 52 | /// the option `--dupes skip` can be used here. 53 | #[arg(long, group = "kind")] 54 | archive: bool, 55 | 56 | /// Creates a single zip file containing all files (flat). If this 57 | /// is enabled, the `target` option is expected to be the target 58 | /// zip file and not a directory. 59 | #[arg(long)] 60 | zip: bool, 61 | 62 | /// What to do when multiple files map to the same name. Can be 63 | /// one of: skip, rename. For rename, the target file is renamed 64 | /// by appending a number suffix. 65 | #[arg(long, value_enum, default_value = "rename")] 66 | dupes: DupeMode, 67 | 68 | /// Download everything into this directory. If not given, the 69 | /// current working directory is used. If `--zip` is used, this is 70 | /// the zip file to create. 71 | #[arg(short, long)] 72 | target: Option, 73 | } 74 | impl Input { 75 | fn download_type(&self) -> &'static str { 76 | if self.original { 77 | "original" 78 | } else if self.archive { 79 | "archive" 80 | } else { 81 | "attachment" 82 | } 83 | } 84 | } 85 | 86 | #[derive(ValueEnum, Clone, Debug, PartialEq, Eq)] 87 | pub enum DupeMode { 88 | Skip, 89 | Rename, 90 | } 91 | 92 | #[derive(Debug, Snafu)] 93 | pub enum Error { 94 | #[snafu(display("An http error occurred: {}", source))] 95 | HttpClient { source: HttpError }, 96 | 97 | #[snafu(display("Error creating a file. {}", source))] 98 | CreateFile { source: std::io::Error }, 99 | 100 | #[snafu(display("Error creating zip file. {}", source))] 101 | Zip { source: zip::result::ZipError }, 102 | 103 | #[snafu(display("{}", given))] 104 | InvalidDupeMode { given: String }, 105 | 106 | #[snafu(display("Not a directory: {}", path.display()))] 107 | NotADirectory { path: PathBuf }, 108 | 109 | #[snafu(display("Not a file: {}", path.display()))] 110 | NotAFile { path: PathBuf }, 111 | } 112 | 113 | impl Cmd for Input { 114 | type CmdError = Error; 115 | 116 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 117 | check_args(self)?; 118 | let req = SearchReq { 119 | offset: self.offset, 120 | limit: self.limit, 121 | with_details: true, 122 | query: self.query.clone(), 123 | search_mode: self.search_mode.to_mode(), 124 | }; 125 | let attachs = ctx 126 | .client 127 | .download_search(&ctx.opts.session, &req) 128 | .context(HttpClientSnafu)?; 129 | 130 | if attachs.is_empty() { 131 | println!("The search result is empty."); 132 | Ok(()) 133 | } else { 134 | match self.zip { 135 | true => { 136 | let zip_file = self 137 | .target 138 | .clone() 139 | .unwrap_or_else(|| PathBuf::from("docspell-files.zip")); 140 | if let Some(parent) = zip_file.parent() { 141 | if !parent.exists() { 142 | std::fs::create_dir_all(&parent).context(CreateFileSnafu)?; 143 | } 144 | } 145 | println!( 146 | "Zipping {}", 147 | action_msg(self, attachs.len(), zip_file.display()) 148 | ); 149 | 150 | download_zip(attachs, self, ctx, &zip_file) 151 | } 152 | false => { 153 | let parent = self 154 | .target 155 | .clone() 156 | .unwrap_or(std::env::current_dir().context(CreateFileSnafu)?); 157 | 158 | if !parent.exists() { 159 | std::fs::create_dir_all(&parent).context(CreateFileSnafu)?; 160 | } 161 | println!( 162 | "Downloading {}", 163 | action_msg(self, attachs.len(), parent.display()) 164 | ); 165 | 166 | download_flat(attachs, self, ctx, &parent) 167 | } 168 | } 169 | } 170 | } 171 | } 172 | 173 | fn download_flat( 174 | attachs: Downloads, 175 | opts: &Input, 176 | ctx: &Context, 177 | parent: &Path, 178 | ) -> Result<(), Error> { 179 | let mut dupes = Dupes::new(); 180 | for dref in attachs { 181 | let dlopt = if opts.original { 182 | dref.get_original(&ctx.client, &ctx.opts.session) 183 | } else if opts.archive { 184 | dref.get_archive(&ctx.client, &ctx.opts.session) 185 | } else { 186 | dref.get(&ctx.client, &ctx.opts.session) 187 | } 188 | .context(HttpClientSnafu)?; 189 | 190 | if let Some(mut dl) = dlopt { 191 | let org_name = dl.get_filename().unwrap_or(dref.name); 192 | let (fname, duplicate) = dupes.use_name(&org_name); 193 | let path = parent.join(&fname); 194 | if path.exists() && !opts.overwrite { 195 | println!("File exists: {}. Skipping.", path.display()); 196 | } else if duplicate && opts.dupes == DupeMode::Skip { 197 | println!("Skipping already downloaded file {}", org_name); 198 | } else { 199 | println!("Downloading {} …", &fname); 200 | let file = std::fs::File::create(path).context(CreateFileSnafu)?; 201 | let mut writer = std::io::BufWriter::new(file); 202 | dl.copy_to(&mut writer).context(HttpClientSnafu)?; 203 | } 204 | } else { 205 | println!( 206 | "No {} file for attachment {}", 207 | opts.download_type(), 208 | dref.name 209 | ); 210 | } 211 | } 212 | Ok(()) 213 | } 214 | 215 | fn download_zip( 216 | attachs: Downloads, 217 | opts: &Input, 218 | ctx: &Context, 219 | zip_file: &Path, 220 | ) -> Result<(), Error> { 221 | if zip_file.exists() && !opts.overwrite { 222 | println!("Zip file already exists! {}", zip_file.display()); 223 | } else { 224 | if zip_file.exists() { 225 | std::fs::remove_file(zip_file).context(CreateFileSnafu)?; 226 | } 227 | let zip = std::fs::File::create(zip_file).context(CreateFileSnafu)?; 228 | let mut zw = zip::ZipWriter::new(zip); 229 | let mut dupes = Dupes::new(); 230 | for dref in attachs { 231 | let dlopt = if opts.original { 232 | dref.get_original(&ctx.client, &ctx.opts.session) 233 | } else if opts.archive { 234 | dref.get_archive(&ctx.client, &ctx.opts.session) 235 | } else { 236 | dref.get(&ctx.client, &ctx.opts.session) 237 | } 238 | .context(HttpClientSnafu)?; 239 | 240 | if let Some(mut dl) = dlopt { 241 | let org_name = dl.get_filename().unwrap_or(dref.name); 242 | let (fname, duplicate) = dupes.use_name(&org_name); 243 | if duplicate && opts.dupes == DupeMode::Skip { 244 | println!("Skipping already downloaded file {}", org_name); 245 | } else { 246 | zw.start_file(&fname, zip::write::FileOptions::default()) 247 | .context(ZipSnafu)?; 248 | println!("Downloading {} …", &fname); 249 | dl.copy_to(&mut zw).context(HttpClientSnafu)?; 250 | } 251 | } else { 252 | println!( 253 | "No {} file for attachment {}", 254 | opts.download_type(), 255 | dref.name 256 | ); 257 | } 258 | } 259 | zw.finish().context(ZipSnafu)?; 260 | 261 | if dupes.is_empty() { 262 | match std::fs::remove_file(zip_file) { 263 | Ok(_) => log::info!("Emtpy zip file deleted."), 264 | Err(e) => log::warn!("Empty zip file could not be deleted! {}", e), 265 | } 266 | } 267 | } 268 | Ok(()) 269 | } 270 | 271 | fn check_args(args: &Input) -> Result<(), Error> { 272 | match &args.target { 273 | Some(path) => { 274 | if args.zip && path.exists() && path.is_dir() { 275 | Err(Error::NotAFile { path: path.clone() }) 276 | } else if !args.zip && !path.is_dir() && path.exists() { 277 | Err(Error::NotADirectory { path: path.clone() }) 278 | } else { 279 | Ok(()) 280 | } 281 | } 282 | None => Ok(()), 283 | } 284 | } 285 | 286 | fn action_msg(opts: &Input, len: usize, target: Display) -> String { 287 | if opts.original { 288 | format!("original files of {} attachments into {}", len, target) 289 | } else if opts.archive { 290 | format!("archives of {} attachments into {}", len, target) 291 | } else { 292 | format!("{} attachments into {}", len, target) 293 | } 294 | } 295 | -------------------------------------------------------------------------------- /src/cli/cmd/export.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgGroup, Parser, ValueEnum}; 2 | use snafu::{ResultExt, Snafu}; 3 | use std::path::{Path, PathBuf}; 4 | 5 | use super::{Cmd, Context}; 6 | use crate::cli::opts::Format; 7 | use crate::cli::sink::Error as SinkError; 8 | use crate::cli::table::format_date_by; 9 | use crate::http::payload::{Item, SearchMode, SearchReq}; 10 | use crate::http::{Downloads, Error as HttpError}; 11 | use crate::util::file; 12 | 13 | #[derive(ValueEnum, Clone, Copy, Debug)] 14 | pub enum LinkNaming { 15 | /// Name links to items after the items' id. 16 | Id, 17 | // Name links to items after the items' sanitized name. 18 | Name, 19 | } 20 | impl Default for LinkNaming { 21 | fn default() -> Self { 22 | LinkNaming::Id 23 | } 24 | } 25 | 26 | /// Exports data for a query. 27 | /// 28 | /// Searches for documents via a query and downloads all associated 29 | /// files and metadata. It downloads the original file and not the 30 | /// converted one. 31 | /// 32 | /// Use the `search-summary` command with the same query to get an 33 | /// idea how much is being downloaded. 34 | /// 35 | /// This commands creates a specific directory structure in the 36 | /// `target` directory. All files are stored below the `items` 37 | /// subdirectory. In there the first two letters of the item id are 38 | /// used to create another subdirectory. Then the complete item id is 39 | /// used for another subdirectory. In the last one, a file 40 | /// `metadata.json` is created that contains all the metadata to the 41 | /// item (tags, correspondents, etc). The attachments are all stored 42 | /// in the `files` subdirectory. 43 | /// 44 | /// The `--*-links` options can be used to create a symlink tree based 45 | /// on some metadata, like tags, correspondents or item date. 46 | #[derive(Parser, std::fmt::Debug)] 47 | #[command(group = ArgGroup::new("kind"))] 48 | pub struct Input { 49 | /// Limit the number of results. 50 | #[arg(short, long, default_value = "100")] 51 | limit: u32, 52 | 53 | /// Skip the first n results. 54 | #[arg(short, long, default_value = "0")] 55 | offset: u32, 56 | 57 | /// If `true`, all entries are exported. That is, the `offset` is 58 | /// incremented until all entries have been exported. 59 | #[arg(short, long)] 60 | all: bool, 61 | 62 | /// Overwrite already existing files. By default the download is 63 | /// skipped if there is already a file with the same name present. 64 | #[arg(long)] 65 | overwrite: bool, 66 | 67 | /// Specify after which of an items' property the links to it 68 | /// should be named. (Defaults to id) 69 | #[arg(long, value_enum)] 70 | link_naming: Option, 71 | 72 | /// Creates symlinks by item date. This may not work on some file 73 | /// systems. 74 | #[arg(long)] 75 | date_links: bool, 76 | 77 | /// Create symlinks by tag. This may not work on some file 78 | /// systems. 79 | #[arg(long)] 80 | tag_links: bool, 81 | 82 | /// Create symlinks by folder. This may not work on some 83 | /// file systems. 84 | #[arg(long)] 85 | folder_links: bool, 86 | 87 | /// Create symlinks by correspondent. This may not work on some 88 | /// file systems. 89 | #[arg(long)] 90 | correspondent_links: bool, 91 | 92 | /// If your Folder-names contain a custom delimiter used to represent 93 | /// flat hierarchy (e.g. "Financial/Invoices"), the delimiter you set 94 | /// with this option is used to split the Folder name into a path, which 95 | /// is then created on the file-system when using the folder-links export. 96 | #[arg(long)] 97 | folder_delimiter: Option, 98 | 99 | /// Download everything into this directory. 100 | #[arg(short, long)] 101 | target: PathBuf, 102 | 103 | /// The optional query string. If not given everything is 104 | /// exported. See 105 | query: Option, 106 | } 107 | 108 | #[derive(Debug, Snafu)] 109 | pub enum Error { 110 | #[snafu(display("An http error occurred: {}", source))] 111 | HttpClient { source: HttpError }, 112 | 113 | #[snafu(display("Error writing data: {}", source))] 114 | WriteResult { source: SinkError }, 115 | 116 | #[snafu(display("Error creating json: {}", source))] 117 | Json { source: serde_json::Error }, 118 | 119 | #[snafu(display("Error creating a file: {}", source))] 120 | CreateFile { source: std::io::Error }, 121 | 122 | #[snafu(display("Error deleting a file: {}", source))] 123 | DeleteFile { source: std::io::Error }, 124 | 125 | #[snafu(display("Error creating a symlink: {}", source))] 126 | Symlink { source: std::io::Error }, 127 | 128 | #[snafu(display("Not a directory: {}", path.display()))] 129 | NotADirectory { path: PathBuf }, 130 | } 131 | 132 | impl Cmd for Input { 133 | type CmdError = Error; 134 | 135 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 136 | let mut req = SearchReq { 137 | offset: self.offset, 138 | limit: self.limit, 139 | with_details: true, 140 | query: self.query.clone().unwrap_or_else(|| "".into()), 141 | search_mode: SearchMode::Normal, 142 | }; 143 | 144 | let mut counter = 0; 145 | loop { 146 | let next = export(&req, self, ctx)?; 147 | counter += next; 148 | if self.all && next >= self.limit as usize { 149 | req.offset += req.limit; 150 | } else { 151 | break; 152 | } 153 | } 154 | eprintln!("Exported {} items.", counter); 155 | Ok(()) 156 | } 157 | } 158 | 159 | fn export(req: &SearchReq, opts: &Input, ctx: &Context) -> Result { 160 | let results = ctx 161 | .client 162 | .search(&ctx.opts.session, req) 163 | .context(HttpClientSnafu)?; 164 | let mut item_counter = 0; 165 | let items = opts.target.join("items"); 166 | let by_date = opts.target.join("by_date"); 167 | let by_tag = opts.target.join("by_tag"); 168 | let by_folder = opts.target.join("by_folder"); 169 | let by_corr = opts.target.join("by_correspondent"); 170 | for g in results.groups { 171 | for item in g.items { 172 | item_counter += 1; 173 | let item_dir = items.join(&item.id[0..2]).join(&item.id); 174 | export_item(&item, opts.overwrite, &item_dir, ctx)?; 175 | 176 | if opts.date_links { 177 | let link_dir = by_date.join(format_date_by(item.date, "%Y-%m")); 178 | make_links(&item, opts, &item_dir, &link_dir)?; 179 | } 180 | if opts.correspondent_links { 181 | let corr_opt = item.corr_org.as_ref().or(item.corr_person.as_ref()); 182 | if let Some(corr) = corr_opt { 183 | let link_dir = by_corr.join(file::safe_filename(&corr.name)); 184 | make_links(&item, opts, &item_dir, &link_dir)?; 185 | } 186 | } 187 | if opts.tag_links { 188 | for tag in &item.tags { 189 | let link_dir = by_tag.join(file::safe_filename(&tag.name)); 190 | make_links(&item, opts, &item_dir, &link_dir)?; 191 | } 192 | } 193 | if opts.folder_links { 194 | let folder_opt = item 195 | .folder 196 | .as_ref() 197 | .map(|f| file::safe_filepath(&f.name, &opts.folder_delimiter)); 198 | if let Some(folder_name) = folder_opt { 199 | let link_dir = by_folder.join(folder_name); 200 | make_links(&item, opts, &item_dir, &link_dir)?; 201 | } 202 | } 203 | export_message(item, ctx)?; 204 | } 205 | } 206 | Ok(item_counter) 207 | } 208 | 209 | fn export_message(item: Item, ctx: &Context) -> Result<(), Error> { 210 | match ctx.format() { 211 | Format::Tabular => eprintln!("Exported item: {}", item.name), 212 | Format::Csv => eprintln!("Exported item: {}", item.name), 213 | _ => ctx.write_result(item).context(WriteResultSnafu)?, 214 | } 215 | 216 | Ok(()) 217 | } 218 | 219 | fn export_item(item: &Item, overwrite: bool, item_dir: &Path, ctx: &Context) -> Result<(), Error> { 220 | log::debug!("Exporting item {}/{}", item.id, item.name); 221 | let meta_file = item_dir.join("metadata.json"); 222 | if meta_file.exists() && overwrite { 223 | log::debug!( 224 | "Remove existing meta file {}, due to overwrite=true", 225 | meta_file.display() 226 | ); 227 | std::fs::remove_file(&meta_file).context(DeleteFileSnafu)?; 228 | } 229 | if !item_dir.exists() { 230 | std::fs::create_dir_all(&item_dir).context(CreateFileSnafu)?; 231 | } 232 | if !&meta_file.exists() { 233 | let file = std::fs::File::create(&meta_file).context(CreateFileSnafu)?; 234 | let fw = std::io::BufWriter::new(file); 235 | serde_json::to_writer_pretty(fw, item).context(JsonSnafu)?; 236 | } else { 237 | log::debug!("Skip existing meta file: {}", meta_file.display()); 238 | } 239 | 240 | let file_dir = item_dir.join("files"); 241 | if !file_dir.exists() { 242 | std::fs::create_dir_all(&file_dir).context(CreateFileSnafu)?; 243 | } 244 | let dl = Downloads::from_item(item); 245 | for attach in dl { 246 | log::debug!("Saving attachment: {}/{}", attach.id, attach.name); 247 | let orig = attach 248 | .get_original(&ctx.client, &ctx.opts.session) 249 | .context(HttpClientSnafu)?; 250 | if let Some(mut orig_file) = orig { 251 | let file_name = orig_file.get_filename().unwrap_or(attach.name); 252 | let file_path = file_dir.join(file_name); 253 | if file_path.exists() && overwrite { 254 | log::debug!( 255 | "Removing existing {}, due to overwrite=true", 256 | file_path.display() 257 | ); 258 | std::fs::remove_file(&meta_file).context(DeleteFileSnafu)?; 259 | } 260 | if !file_path.exists() { 261 | let file = std::fs::File::create(&file_path).context(CreateFileSnafu)?; 262 | let mut fw = std::io::BufWriter::new(file); 263 | orig_file.copy_to(&mut fw).context(HttpClientSnafu)?; 264 | } else { 265 | log::debug!("Skipping existing file {}", file_path.display()); 266 | } 267 | } 268 | } 269 | Ok(()) 270 | } 271 | 272 | fn make_links( 273 | item: &Item, 274 | opts: &Input, 275 | link_target: &Path, 276 | link_name_path: &Path, 277 | ) -> Result<(), Error> { 278 | if !link_name_path.exists() { 279 | std::fs::create_dir_all(&link_name_path).context(CreateFileSnafu)?; 280 | } 281 | let link_filename = match opts.link_naming.unwrap_or_default() { 282 | LinkNaming::Id => item.id.clone(), 283 | LinkNaming::Name => file::safe_filename(&item.name), 284 | }; 285 | 286 | let rel_link_target = pathdiff::diff_paths(&link_target, &link_name_path).unwrap(); 287 | // Append the item's id as link name on the link's path. 288 | let mut link_name = link_name_path.join(&link_filename); 289 | let mut collision_counter = 1; 290 | let create_link = loop { 291 | // Use read_link() instead of exists(), because the latter traverses links and instead 292 | // checks whether the link-target exists. 293 | let link_data = link_name.read_link(); 294 | match link_data { 295 | // A link with this name already exists. 296 | Ok(link_data) => { 297 | if link_data == rel_link_target { 298 | // This link is pointing to the item we want to create a link for 299 | // skip depending on whether the "overwrite" property is set 300 | break opts.overwrite; 301 | } else { 302 | // this is simply a name collision (same name, different document). 303 | // append a number to the name, to remove conflict, then try again 304 | log::debug!("Found name collision for: \"{}\"", link_name.display()); 305 | link_name = 306 | link_name_path.join(format!("{} ({})", link_filename, collision_counter)); 307 | collision_counter += 1; 308 | } 309 | } 310 | // Link does not yet exist, all good, we can have a go 311 | _ => { 312 | break true; 313 | } 314 | } 315 | }; 316 | 317 | if create_link { 318 | file::symlink(rel_link_target, link_name).context(SymlinkSnafu)?; 319 | } else { 320 | log::debug!("Skip existing link: {}", link_target.display()); 321 | } 322 | Ok(()) 323 | } 324 | -------------------------------------------------------------------------------- /src/cli/cmd/file_exists.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueHint}; 2 | use snafu::{ResultExt, Snafu}; 3 | use std::path::{Path, PathBuf}; 4 | 5 | use crate::cli::opts::{EndpointOpts, FileAuthError}; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::CheckFileResult; 8 | use crate::http::Error as HttpError; 9 | use crate::util::digest; 10 | 11 | use super::{Cmd, Context}; 12 | 13 | /// Checks if the given files exist in docspell. 14 | /// 15 | /// To check a file, an authenticated user is required, a source id or 16 | /// the secret to the integration endpoint. The latter allows to check 17 | /// across collectives. 18 | #[derive(Parser, Debug)] 19 | pub struct Input { 20 | #[clap(flatten)] 21 | pub endpoint: EndpointOpts, 22 | 23 | /// One or more files to check 24 | #[arg(required = true, num_args = 1, value_hint = ValueHint::FilePath)] 25 | pub files: Vec, 26 | } 27 | 28 | #[derive(Debug, Snafu)] 29 | pub enum Error { 30 | #[snafu(display("Cannot get credentials: {}", source))] 31 | CredentialsRead { source: FileAuthError }, 32 | 33 | #[snafu(display("Calculating digest of file {} failed: {}", path.display(), source))] 34 | DigestFail { 35 | source: std::io::Error, 36 | path: PathBuf, 37 | }, 38 | 39 | #[snafu(display("An http error occurred: {}", source))] 40 | HttpClient { source: HttpError }, 41 | 42 | #[snafu(display("Error writing data: {}", source))] 43 | WriteResult { source: SinkError }, 44 | } 45 | 46 | impl Cmd for Input { 47 | type CmdError = Error; 48 | 49 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 50 | let mut results = Vec::with_capacity(self.files.capacity()); 51 | for file in &self.files { 52 | if file.is_file() { 53 | let result = check_file(file, &self.endpoint, ctx)?; 54 | results.push(result); 55 | } else { 56 | log::debug!("Ignoring directory: {}", file.display()); 57 | } 58 | } 59 | ctx.write_result(results).context(WriteResultSnafu)?; 60 | Ok(()) 61 | } 62 | } 63 | 64 | pub fn check_file( 65 | file: &Path, 66 | opts: &EndpointOpts, 67 | ctx: &Context, 68 | ) -> Result { 69 | let fa = opts 70 | .to_file_auth(ctx, &|| None) 71 | .context(CredentialsReadSnafu)?; 72 | let hash = digest::digest_file_sha256(file).context(DigestFailSnafu { path: file })?; 73 | let mut result = ctx.client.file_exists(hash, &fa).context(HttpClientSnafu)?; 74 | result.file = file.canonicalize().ok().map(|p| p.display().to_string()); 75 | Ok(result) 76 | } 77 | -------------------------------------------------------------------------------- /src/cli/cmd/generate_completions.rs: -------------------------------------------------------------------------------- 1 | use clap::{Command, Parser, ValueEnum}; 2 | use clap_complete::{generate, Generator, Shell}; 3 | 4 | /// Generates completions for some shells. 5 | /// 6 | #[derive(Parser, std::fmt::Debug)] 7 | pub struct Input { 8 | #[arg(long, value_enum)] 9 | pub shell: GeneratorChoice, 10 | } 11 | 12 | #[derive(ValueEnum, Clone, Debug, PartialEq)] 13 | pub enum GeneratorChoice { 14 | Bash, 15 | Elvish, 16 | Fish, 17 | #[value(name = "powershell")] 18 | PowerShell, 19 | Zsh, 20 | } 21 | 22 | impl Input { 23 | pub fn print_completions(&self, app: &mut Command) { 24 | match &self.shell { 25 | GeneratorChoice::Bash => generate_completions(Shell::Bash, app), 26 | GeneratorChoice::Elvish => generate_completions(Shell::Elvish, app), 27 | GeneratorChoice::Fish => generate_completions(Shell::Fish, app), 28 | GeneratorChoice::PowerShell => generate_completions(Shell::PowerShell, app), 29 | GeneratorChoice::Zsh => generate_completions(Shell::Zsh, app), 30 | } 31 | } 32 | } 33 | 34 | fn generate_completions(gen: G, app: &mut Command) { 35 | generate(gen, app, "dsc", &mut std::io::stdout()); 36 | } 37 | -------------------------------------------------------------------------------- /src/cli/cmd/geninvite.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::GenInvite; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Generates a new invitation key. 10 | /// 11 | /// The password can be found in the config file of the Docspell 12 | /// server. 13 | #[derive(Parser, Debug)] 14 | pub struct Input { 15 | #[arg(long, short)] 16 | password: String, 17 | } 18 | 19 | impl Cmd for Input { 20 | type CmdError = Error; 21 | 22 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 23 | let req = GenInvite { 24 | password: self.password.clone(), 25 | }; 26 | let result = ctx.client.gen_invite(&req).context(HttpClientSnafu)?; 27 | ctx.write_result(result).context(WriteResultSnafu)?; 28 | Ok(()) 29 | } 30 | } 31 | 32 | #[derive(Debug, Snafu)] 33 | pub enum Error { 34 | #[snafu(display("An http error occurred: {}", source))] 35 | HttpClient { source: HttpError }, 36 | 37 | #[snafu(display("Error writing data: {}", source))] 38 | WriteResult { source: SinkError }, 39 | } 40 | -------------------------------------------------------------------------------- /src/cli/cmd/item.rs: -------------------------------------------------------------------------------- 1 | pub mod fields; 2 | pub mod get; 3 | pub mod tags; 4 | 5 | use clap::Parser; 6 | use snafu::{ResultExt, Snafu}; 7 | 8 | use super::{Cmd, Context}; 9 | 10 | /// Manage items. 11 | #[derive(Parser, std::fmt::Debug)] 12 | pub struct Input { 13 | #[command(subcommand)] 14 | pub subcmd: ItemCommand, 15 | } 16 | 17 | #[derive(Parser, Debug)] 18 | pub enum ItemCommand { 19 | #[command(version)] 20 | Get(get::Input), 21 | 22 | #[command(version)] 23 | Tags(tags::Input), 24 | 25 | #[command(version)] 26 | Fields(fields::Input), 27 | } 28 | 29 | #[derive(Debug, Snafu)] 30 | pub enum Error { 31 | Get { source: get::Error }, 32 | Tags { source: tags::Error }, 33 | Fields { source: fields::Error }, 34 | } 35 | 36 | impl Cmd for Input { 37 | type CmdError = Error; 38 | 39 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 40 | match &self.subcmd { 41 | ItemCommand::Get(input) => input.exec(ctx).context(GetSnafu), 42 | ItemCommand::Tags(input) => input.exec(ctx).context(TagsSnafu), 43 | ItemCommand::Fields(input) => input.exec(ctx).context(FieldsSnafu), 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/cli/cmd/item/fields.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgGroup, Parser}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::{BasicResult, CustomFieldValue}; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Set or remove field values for an item. 10 | #[derive(Parser, Debug)] 11 | #[command(group = ArgGroup::new("action"))] 12 | pub struct Input { 13 | /// The item id (can be abbreviated to a prefix) 14 | #[arg(long)] 15 | pub id: String, 16 | 17 | /// Set the value of the field. 18 | #[arg(long, group = "action")] 19 | pub set: Option, 20 | 21 | /// Remove the field from the item. 22 | #[arg(long, group = "action")] 23 | pub remove: bool, 24 | 25 | /// The field name. 26 | #[arg(long)] 27 | pub name: String, 28 | } 29 | 30 | impl Input { 31 | fn to_action(&self) -> Result { 32 | if self.remove { 33 | Ok(Action::Remove) 34 | } else { 35 | match &self.set { 36 | Some(v) => Ok(Action::Set(v.clone())), 37 | None => Err(Error::NoAction), 38 | } 39 | } 40 | } 41 | } 42 | 43 | enum Action { 44 | Set(String), 45 | Remove, 46 | } 47 | 48 | #[derive(Debug, Snafu)] 49 | pub enum Error { 50 | #[snafu(display("An http error occurred: {}", source))] 51 | HttpClient { source: HttpError }, 52 | 53 | #[snafu(display("Error writing data: {}", source))] 54 | WriteResult { source: SinkError }, 55 | 56 | #[snafu(display("No action given"))] 57 | NoAction, 58 | } 59 | 60 | impl Cmd for Input { 61 | type CmdError = Error; 62 | 63 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 64 | let result = match self.to_action()? { 65 | Action::Set(value) => set_field(&self.name, value, &self.id, ctx)?, 66 | Action::Remove => remove_field(self, ctx)?, 67 | }; 68 | ctx.write_result(result).context(WriteResultSnafu)?; 69 | Ok(()) 70 | } 71 | } 72 | 73 | fn set_field(name: &str, value: String, id: &str, ctx: &Context) -> Result { 74 | let fvalue = CustomFieldValue { 75 | field: name.to_string(), 76 | value, 77 | }; 78 | ctx.client 79 | .set_field(&ctx.opts.session, id, &fvalue) 80 | .context(HttpClientSnafu) 81 | } 82 | 83 | fn remove_field(opts: &Input, ctx: &Context) -> Result { 84 | ctx.client 85 | .remove_field(&ctx.opts.session, &opts.id, &opts.name) 86 | .context(HttpClientSnafu) 87 | } 88 | -------------------------------------------------------------------------------- /src/cli/cmd/item/get.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::ItemDetail; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Gets details about one item. 10 | #[derive(Parser, Debug)] 11 | pub struct Input { 12 | /// The item id (can be abbreviated to a prefix) 13 | pub id: String, 14 | } 15 | 16 | #[derive(Debug, Snafu)] 17 | pub enum Error { 18 | #[snafu(display("An http error occurred: {}", source))] 19 | HttpClient { source: HttpError }, 20 | 21 | #[snafu(display("Error writing data: {}", source))] 22 | WriteResult { source: SinkError }, 23 | 24 | #[snafu(display("The item was not found"))] 25 | ItemNotFound, 26 | } 27 | 28 | impl Cmd for Input { 29 | type CmdError = Error; 30 | 31 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 32 | let item = get_item(self.id.as_str(), ctx)?; 33 | ctx.write_result(item).context(WriteResultSnafu)?; 34 | Ok(()) 35 | } 36 | } 37 | 38 | fn get_item(id: &str, ctx: &Context) -> Result { 39 | let result = ctx 40 | .client 41 | .get_item(&ctx.opts.session, id) 42 | .context(HttpClientSnafu)?; 43 | 44 | result.ok_or(Error::ItemNotFound) 45 | } 46 | -------------------------------------------------------------------------------- /src/cli/cmd/item/tags.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgGroup, Parser}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::{BasicResult, StringList}; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Add or remove tags for an item. 10 | #[derive(Parser, Debug)] 11 | #[command(group = ArgGroup::new("action"))] 12 | pub struct Input { 13 | /// The item id (can be abbreviated to a prefix) 14 | #[arg(long)] 15 | pub id: String, 16 | 17 | /// Add the given tags. 18 | #[arg(long, group = "action")] 19 | pub add: bool, 20 | 21 | /// Remove the given tags. 22 | #[arg(long, group = "action")] 23 | pub remove: bool, 24 | 25 | /// Replace all item tags with the given ones. 26 | #[arg(long, group = "action")] 27 | pub replace: bool, 28 | 29 | /// A list of tags. Can be ids or names. 30 | #[arg(required = true, num_args = 1)] 31 | pub tags: Vec, 32 | } 33 | 34 | impl Input { 35 | fn to_action(&self) -> Result { 36 | if self.remove { 37 | Ok(Action::Remove) 38 | } else if self.replace { 39 | Ok(Action::Replace) 40 | } else if self.add { 41 | Ok(Action::Add) 42 | } else { 43 | Err(Error::NoAction) 44 | } 45 | } 46 | } 47 | 48 | enum Action { 49 | Add, 50 | Remove, 51 | Replace, 52 | } 53 | 54 | #[derive(Debug, Snafu)] 55 | pub enum Error { 56 | #[snafu(display("An http error occurred: {}", source))] 57 | HttpClient { source: HttpError }, 58 | 59 | #[snafu(display("Error writing data: {}", source))] 60 | WriteResult { source: SinkError }, 61 | 62 | #[snafu(display("No action given"))] 63 | NoAction, 64 | } 65 | 66 | impl Cmd for Input { 67 | type CmdError = Error; 68 | 69 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 70 | let result = match self.to_action()? { 71 | Action::Add => add_tags(self, ctx)?, 72 | Action::Replace => replace_tags(self, ctx)?, 73 | Action::Remove => remove_tags(self, ctx)?, 74 | }; 75 | ctx.write_result(result).context(WriteResultSnafu)?; 76 | Ok(()) 77 | } 78 | } 79 | 80 | fn add_tags(opts: &Input, ctx: &Context) -> Result { 81 | let tags = StringList { 82 | items: opts.tags.clone(), 83 | }; 84 | ctx.client 85 | .link_tags(&ctx.opts.session, &opts.id, &tags) 86 | .context(HttpClientSnafu) 87 | } 88 | 89 | fn replace_tags(opts: &Input, ctx: &Context) -> Result { 90 | let tags = StringList { 91 | items: opts.tags.clone(), 92 | }; 93 | ctx.client 94 | .set_tags(&ctx.opts.session, &opts.id, &tags) 95 | .context(HttpClientSnafu) 96 | } 97 | 98 | fn remove_tags(opts: &Input, ctx: &Context) -> Result { 99 | let tags = StringList { 100 | items: opts.tags.clone(), 101 | }; 102 | ctx.client 103 | .remove_tags(&ctx.opts.session, &opts.id, &tags) 104 | .context(HttpClientSnafu) 105 | } 106 | -------------------------------------------------------------------------------- /src/cli/cmd/login.rs: -------------------------------------------------------------------------------- 1 | use super::{Cmd, Context}; 2 | use crate::cli::sink::Error as SinkError; 3 | use crate::http::payload::{AuthRequest, AuthResp}; 4 | use crate::http::Error as HttpError; 5 | 6 | use crate::util::pass; 7 | 8 | use clap::{ArgGroup, Parser, ValueHint}; 9 | use rsotp::TOTP; 10 | use snafu::{ResultExt, Snafu}; 11 | use std::io::Write; 12 | 13 | /// Performs a login given user credentials. 14 | /// 15 | /// The returned token is stored on disk in a session file and used 16 | /// for subsequent calls to secured api endpoints. If the token is 17 | /// near to expire, it is refreshed and the session file is updated. 18 | /// 19 | /// It is also possible to specfiy a session token instead. When a 20 | /// session token is given via options or env variable, the session 21 | /// file is not updated (no filesystem access occurs). 22 | #[derive(Parser, Debug, PartialEq)] 23 | #[command(group = ArgGroup::new("pass"))] 24 | pub struct Input { 25 | /// The account name. If not given here, it is looked up in the 26 | /// config file. 27 | #[arg(long, short, value_hint = ValueHint::Username)] 28 | user: Option, 29 | 30 | /// The password used for authentication in plain text. An 31 | /// environment variable DSC_PASSWORD can also be used. 32 | #[arg(long, group = "pass")] 33 | password: Option, 34 | 35 | /// An entry for the pass password manager. If this is given, the 36 | /// `password` option is ignored. 37 | #[arg(long, group = "pass")] 38 | pass_entry: Option, 39 | 40 | /// An entry for the pass password manager that contains the TOTP 41 | /// secret, so dsc can obtain the TOTP code automatically. If 42 | /// prefixed with `key:` the remaining part is looked up in the 43 | /// other `pass_entry` instead. 44 | #[arg(long)] 45 | pass_otp: Option, 46 | } 47 | 48 | #[derive(Debug, Snafu)] 49 | pub enum Error { 50 | #[snafu(display("An http error occurred: {}", source))] 51 | HttpClient { source: HttpError }, 52 | 53 | #[snafu(display("Error received from server: {}", source))] 54 | ReadResponse { source: reqwest::Error }, 55 | 56 | #[snafu(display("Retrieving password using pass failed: {}", source))] 57 | PassEntry { source: std::io::Error }, 58 | 59 | #[snafu(display("No pass entry given, but required"))] 60 | NoPassEntry, 61 | 62 | #[snafu(display("No password provided"))] 63 | NoPassword, 64 | 65 | #[snafu(display("No account name provided"))] 66 | NoAccount, 67 | 68 | #[snafu(display("Login failed"))] 69 | LoginFailed, 70 | 71 | #[snafu(display("Invalid password (non-unicode) in environment variable"))] 72 | InvalidPasswordEnv, 73 | 74 | #[snafu(display("Error writing data: {}", source))] 75 | WriteResult { source: SinkError }, 76 | } 77 | 78 | impl Cmd for Input { 79 | type CmdError = Error; 80 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 81 | let mut result = login(self, ctx)?; 82 | if result.require_second_factor { 83 | log::info!("Account has two-factor auth enabled. Sending otp now."); 84 | result = login_otp(self, ctx)?; 85 | } 86 | 87 | ctx.write_result(result).context(WriteResultSnafu)?; 88 | Ok(()) 89 | } 90 | } 91 | 92 | pub fn login(opts: &Input, ctx: &Context) -> Result { 93 | let body = AuthRequest { 94 | account: get_account(opts, ctx)?, 95 | password: get_password(opts, ctx)?, 96 | remember_me: false, 97 | }; 98 | ctx.client.login(&body).context(HttpClientSnafu) 99 | } 100 | 101 | pub fn login_otp(opts: &Input, ctx: &Context) -> Result { 102 | let otp = get_otp(opts, ctx)?; 103 | ctx.client.login_otp(&otp).context(HttpClientSnafu) 104 | } 105 | 106 | /// Get the OTP code in this order: 107 | /// 108 | /// * Check options or the config for a otp pass entry. Obtain the 109 | /// secret and calculate the current OTP 110 | /// * Ask the user for the OTP 111 | fn get_otp(opts: &Input, ctx: &Context) -> Result { 112 | let totp_entry = opts 113 | .pass_otp 114 | .clone() 115 | .or_else(|| ctx.cfg.pass_otp_secret.clone()); 116 | 117 | match totp_entry { 118 | None => { 119 | print!("Authentication code: "); 120 | std::io::stdout().flush().context(PassEntrySnafu)?; 121 | let mut otp: String = String::new(); 122 | std::io::stdin() 123 | .read_line(&mut otp) 124 | .context(PassEntrySnafu)?; 125 | Ok(otp.trim().to_string()) 126 | } 127 | Some(name) => { 128 | log::debug!("Looking up TOTP secret via: {}", name); 129 | if let Some(secret) = name.strip_prefix("key:") { 130 | log::debug!("Looking up a line in {:?}", ctx.cfg.pass_entry); 131 | let pentry = ctx.cfg.pass_entry.clone().ok_or(Error::NoPassEntry)?; 132 | let otp_secret = pass::pass_key(&pentry, secret).context(PassEntrySnafu)?; 133 | let otp = TOTP::new(otp_secret).now(); 134 | Ok(otp.trim().to_string()) 135 | } else { 136 | log::debug!("Retrieve totp secret from separate entry"); 137 | let otp_secret = pass::pass_password(&name).context(PassEntrySnafu)?; 138 | let otp = TOTP::new(otp_secret).now(); 139 | Ok(otp.trim().to_string()) 140 | } 141 | } 142 | } 143 | } 144 | 145 | /// Get the password in this order: 146 | /// * Check options for password or pass_entry 147 | /// * Check environment variable DSC_PASSWORD 148 | /// * Check config file 149 | fn get_password(opts: &Input, ctx: &Context) -> Result { 150 | if let Some(pe) = &opts.pass_entry { 151 | log::debug!("Using given pass entry"); 152 | pass::pass_password(pe).context(PassEntrySnafu) 153 | } else if let Some(pw) = &opts.password { 154 | log::debug!("Using given plain password"); 155 | Ok(pw.clone()) 156 | } else { 157 | match std::env::var_os(DSC_PASSWORD) { 158 | Some(pw) => { 159 | log::debug!("Using password from environment variable."); 160 | pw.into_string().map_err(|_os| Error::InvalidPasswordEnv) 161 | } 162 | None => match &ctx.cfg.pass_entry { 163 | Some(pe) => { 164 | log::debug!("Using pass_entry from config file."); 165 | pass::pass_password(pe).context(PassEntrySnafu) 166 | } 167 | None => Err(Error::NoPassword), 168 | }, 169 | } 170 | } 171 | } 172 | 173 | fn get_account(opts: &Input, ctx: &Context) -> Result { 174 | let acc = match &opts.user { 175 | Some(u) => Ok(u.clone()), 176 | None => ctx.cfg.default_account.clone().ok_or(Error::NoAccount), 177 | }; 178 | log::debug!("Using account: {:?}", &acc); 179 | acc 180 | } 181 | 182 | const DSC_PASSWORD: &str = "DSC_PASSWORD"; 183 | -------------------------------------------------------------------------------- /src/cli/cmd/logout.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::BasicResult; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Removes the credentials file 10 | #[derive(Parser, Debug)] 11 | pub struct Input {} 12 | 13 | #[derive(Debug, Snafu)] 14 | pub enum Error { 15 | #[snafu(display("An http error occurred: {}", source))] 16 | HttpClient { source: HttpError }, 17 | 18 | #[snafu(display("Error writing data: {}", source))] 19 | WriteResult { source: SinkError }, 20 | } 21 | 22 | impl Cmd for Input { 23 | type CmdError = Error; 24 | 25 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 26 | ctx.client.logout().context(HttpClientSnafu)?; 27 | let message = BasicResult { 28 | success: true, 29 | message: "Session deleted.".into(), 30 | }; 31 | ctx.write_result(message).context(WriteResultSnafu)?; 32 | Ok(()) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/cli/cmd/open_item.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use prettytable::{row, Table}; 3 | use serde::{Deserialize, Serialize}; 4 | use snafu::{ResultExt, Snafu}; 5 | use std::path::{Path, PathBuf}; 6 | use webbrowser; 7 | 8 | use crate::cli::opts::EndpointOpts; 9 | use crate::cli::sink::{Error as SinkError, Sink}; 10 | use crate::cli::table; 11 | use crate::cli::{self, cmd}; 12 | use crate::http::payload::CheckFileResult; 13 | use crate::http::Error as HttpError; 14 | use crate::util::digest; 15 | 16 | use super::{Cmd, Context}; 17 | 18 | /// Open the item to a file, or given via the item id with the default 19 | /// browser. 20 | #[derive(Parser, std::fmt::Debug)] 21 | pub struct Input { 22 | #[clap(flatten)] 23 | pub endpoint: EndpointOpts, 24 | 25 | /// Do not open the item in the browser, only print the url. 26 | #[arg(long)] 27 | pub print_only: bool, 28 | 29 | /// A file or an item id. 30 | pub file_or_item: String, 31 | } 32 | 33 | #[derive(Debug, Snafu)] 34 | pub enum Error { 35 | #[snafu(display("An http error occurred: {}", source))] 36 | HttpClient { source: HttpError }, 37 | 38 | #[snafu(display("Collective must be present when using integration endpoint."))] 39 | NoCollective, 40 | 41 | #[snafu(display("Calculating digest of file {} failed: {}", path.display(), source))] 42 | DigestFail { 43 | source: std::io::Error, 44 | path: PathBuf, 45 | }, 46 | 47 | #[snafu(display("Error writing data: {}", source))] 48 | WriteResult { source: SinkError }, 49 | 50 | #[snafu(display("Error opening browser: {}", source))] 51 | Webbrowser { source: std::io::Error }, 52 | 53 | #[snafu(display("Cannot get credentials: {}", source))] 54 | CredentialsRead { source: cli::opts::FileAuthError }, 55 | } 56 | 57 | #[derive(Debug, Serialize, Deserialize)] 58 | pub struct CmdResult { 59 | pub success: bool, 60 | pub has_more: bool, 61 | pub item_id: Option, 62 | pub url: Option, 63 | } 64 | impl table::AsTable for CmdResult { 65 | fn to_table(&self) -> Table { 66 | let mut table = table::mk_table(); 67 | table.set_titles(row![bFg => "success", "item id", "url"]); 68 | table.add_row(row![ 69 | self.success, 70 | self.item_id.clone().unwrap_or_else(|| String::from("-")), 71 | self.url.clone().unwrap_or_else(|| String::from("-")), 72 | ]); 73 | table 74 | } 75 | } 76 | impl Sink for CmdResult {} 77 | 78 | impl CmdResult { 79 | pub fn new(item_id: String, more: bool, ctx: &Context) -> CmdResult { 80 | CmdResult { 81 | success: true, 82 | has_more: more, 83 | url: Some(create_url(ctx, &item_id)), 84 | item_id: Some(item_id), 85 | } 86 | } 87 | pub fn none() -> CmdResult { 88 | CmdResult { 89 | success: false, 90 | has_more: false, 91 | item_id: None, 92 | url: None, 93 | } 94 | } 95 | } 96 | 97 | impl Cmd for Input { 98 | type CmdError = Error; 99 | 100 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 101 | let path = Path::new(&self.file_or_item); 102 | let result = if path.is_file() { 103 | let result = item_from_file(path, &self.endpoint, ctx)?; 104 | let mut iter = result.items.into_iter(); 105 | match iter.next() { 106 | None => { 107 | log::info!("No items found for file {}", path.display()); 108 | CmdResult::none() 109 | } 110 | Some(item) => { 111 | let more = iter.next().is_some(); 112 | if more { 113 | log::info!( 114 | "More than one item for file {}. Using first.", 115 | path.display() 116 | ); 117 | } 118 | CmdResult::new(item.id, more, ctx) 119 | } 120 | } 121 | } else { 122 | // interpret it as id 123 | CmdResult::new(self.file_or_item.clone(), false, ctx) 124 | }; 125 | 126 | let the_url = &result.url.clone(); 127 | ctx.write_result(result).context(WriteResultSnafu)?; 128 | 129 | if let Some(url) = &the_url { 130 | if !self.print_only { 131 | webbrowser::open(url).context(WebbrowserSnafu)?; 132 | } 133 | } 134 | Ok(()) 135 | } 136 | } 137 | 138 | fn create_url(ctx: &Context, item_id: &str) -> String { 139 | let base_url = cmd::docspell_url(ctx.opts, ctx.cfg); 140 | format!("{}/app/item/{}", base_url, item_id) 141 | } 142 | 143 | fn item_from_file( 144 | file: &Path, 145 | opts: &EndpointOpts, 146 | ctx: &Context, 147 | ) -> Result { 148 | let fa = opts 149 | .to_file_auth(ctx, &|| None) 150 | .context(CredentialsReadSnafu)?; 151 | let hash = digest::digest_file_sha256(file).context(DigestFailSnafu { path: file })?; 152 | let mut result = ctx.client.file_exists(hash, &fa).context(HttpClientSnafu)?; 153 | result.file = file.canonicalize().ok().map(|p| p.display().to_string()); 154 | Ok(result) 155 | } 156 | -------------------------------------------------------------------------------- /src/cli/cmd/register.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueHint}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::Registration; 7 | use crate::http::Error as HttpError; 8 | 9 | /// Register a new account at Docspell. 10 | #[derive(Parser, Debug)] 11 | pub struct Input { 12 | /// The collective name to use. If unsure, use the same as login. 13 | #[arg(long, short, value_hint = ValueHint::Username)] 14 | pub collective_name: String, 15 | 16 | /// The user name. This name together with the collective name 17 | /// must be unique. 18 | #[arg(long, short, value_hint = ValueHint::Username)] 19 | pub login: String, 20 | 21 | /// The password for the account. 22 | #[arg(long, short)] 23 | pub password: String, 24 | 25 | /// If signup requires an invitation key, it can be specified 26 | /// here. 27 | #[arg(long, short)] 28 | pub invite: Option, 29 | } 30 | 31 | impl Cmd for Input { 32 | type CmdError = Error; 33 | 34 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 35 | let body = Registration { 36 | collective_name: self.collective_name.clone(), 37 | login: self.login.clone(), 38 | password: self.password.clone(), 39 | invite: self.invite.clone(), 40 | }; 41 | 42 | let result = ctx.client.register(&body).context(HttpClientSnafu)?; 43 | ctx.write_result(result).context(WriteResultSnafu)?; 44 | Ok(()) 45 | } 46 | } 47 | 48 | #[derive(Debug, Snafu)] 49 | pub enum Error { 50 | #[snafu(display("An http error occurred: {}", source))] 51 | HttpClient { source: HttpError }, 52 | 53 | #[snafu(display("Error writing data: {}", source))] 54 | WriteResult { source: SinkError }, 55 | } 56 | -------------------------------------------------------------------------------- /src/cli/cmd/search.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgAction, Parser}; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::opts::SearchMode; 6 | use crate::cli::sink::Error as SinkError; 7 | use crate::http::payload::{SearchReq, SearchResult}; 8 | use crate::http::Error as HttpError; 9 | 10 | /// Searches for documents and prints the results. 11 | /// 12 | /// Documents are searched via a query. The query syntax is described 13 | /// here: 14 | #[derive(Parser, std::fmt::Debug)] 15 | pub struct Input { 16 | /// The query string. See 17 | pub query: String, 18 | 19 | #[clap(flatten)] 20 | pub search_mode: SearchMode, 21 | 22 | /// Do not fetch details to each item in the result 23 | #[arg(long = "no-details", action = ArgAction::SetFalse)] 24 | pub with_details: bool, 25 | 26 | /// Limit the number of results. 27 | #[arg(short, long, default_value = "20")] 28 | pub limit: u32, 29 | 30 | /// Skip the first n results. 31 | #[arg(short, long, default_value = "0")] 32 | pub offset: u32, 33 | } 34 | 35 | impl Cmd for Input { 36 | type CmdError = Error; 37 | 38 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 39 | let result = search(self, ctx)?; 40 | ctx.write_result(result).context(WriteResultSnafu)?; 41 | Ok(()) 42 | } 43 | } 44 | 45 | #[derive(Debug, Snafu)] 46 | pub enum Error { 47 | #[snafu(display("An http error occurred: {}", source))] 48 | HttpClient { source: HttpError }, 49 | 50 | #[snafu(display("Error writing data: {}", source))] 51 | WriteResult { source: SinkError }, 52 | } 53 | 54 | fn search(opts: &Input, ctx: &Context) -> Result { 55 | let req = SearchReq { 56 | limit: opts.limit, 57 | offset: opts.offset, 58 | with_details: opts.with_details, 59 | query: opts.query.clone(), 60 | search_mode: opts.search_mode.to_mode(), 61 | }; 62 | 63 | ctx.client 64 | .search(&ctx.opts.session, &req) 65 | .context(HttpClientSnafu) 66 | } 67 | -------------------------------------------------------------------------------- /src/cli/cmd/search_summary.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::Error as HttpError; 7 | 8 | /// Performs a search and prints a summary of the results. 9 | #[derive(Parser, std::fmt::Debug)] 10 | pub struct Input { 11 | /// The query string. See 12 | query: String, 13 | } 14 | 15 | impl Cmd for Input { 16 | type CmdError = Error; 17 | 18 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 19 | let result = ctx 20 | .client 21 | .summary(&ctx.opts.session, &self.query) 22 | .context(HttpClientSnafu)?; 23 | ctx.write_result(result).context(WriteResultSnafu)?; 24 | Ok(()) 25 | } 26 | } 27 | 28 | #[derive(Debug, Snafu)] 29 | pub enum Error { 30 | #[snafu(display("An http error occurred: {}", source))] 31 | HttpClient { source: HttpError }, 32 | 33 | #[snafu(display("Error writing data: {}", source))] 34 | WriteResult { source: SinkError }, 35 | } 36 | -------------------------------------------------------------------------------- /src/cli/cmd/source.rs: -------------------------------------------------------------------------------- 1 | pub mod list; 2 | 3 | use clap::Parser; 4 | use snafu::{ResultExt, Snafu}; 5 | 6 | use super::{Cmd, Context}; 7 | 8 | /// Manage source urls for uploading files. 9 | #[derive(Parser, std::fmt::Debug)] 10 | pub struct Input { 11 | #[clap(subcommand)] 12 | pub subcmd: SourceCommand, 13 | } 14 | 15 | #[derive(Parser, Debug)] 16 | pub enum SourceCommand { 17 | #[command(version)] 18 | List(list::Input), 19 | } 20 | 21 | #[derive(Debug, Snafu)] 22 | pub enum Error { 23 | List { source: list::Error }, 24 | } 25 | 26 | impl Cmd for Input { 27 | type CmdError = Error; 28 | 29 | fn exec(&self, args: &Context) -> Result<(), Error> { 30 | match &self.subcmd { 31 | SourceCommand::List(input) => input.exec(args).context(ListSnafu), 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/cli/cmd/source/list.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use snafu::{ResultExt, Snafu}; 3 | 4 | use super::{Cmd, Context}; 5 | use crate::cli::sink::Error as SinkError; 6 | use crate::http::payload::SourceAndTags; 7 | use crate::http::Error as HttpError; 8 | 9 | /// List all sources for your collective 10 | #[derive(Parser, std::fmt::Debug)] 11 | pub struct Input { 12 | /// Filter sources that start by the given name 13 | #[arg(long)] 14 | pub name: Option, 15 | 16 | /// Filter sources that start by the given id 17 | #[arg(long)] 18 | pub id: Option, 19 | } 20 | 21 | impl Input { 22 | fn matches(&self, s: &SourceAndTags) -> bool { 23 | match (&self.name, &self.id) { 24 | (Some(n), Some(i)) => s.source.abbrev.starts_with(n) && s.source.id.starts_with(i), 25 | (None, Some(i)) => s.source.id.starts_with(i), 26 | (Some(n), None) => s.source.abbrev.starts_with(n), 27 | (None, None) => true, 28 | } 29 | } 30 | } 31 | 32 | #[derive(Debug, Snafu)] 33 | pub enum Error { 34 | #[snafu(display("An http error occurred: {}", source))] 35 | HttpClient { source: HttpError }, 36 | 37 | #[snafu(display("Error writing data: {}", source))] 38 | WriteResult { source: SinkError }, 39 | } 40 | 41 | impl Cmd for Input { 42 | type CmdError = Error; 43 | 44 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 45 | let items = ctx 46 | .client 47 | .list_sources(&ctx.opts.session) 48 | .map(|r| r.items) 49 | .context(HttpClientSnafu)?; 50 | let result = filter_sources(self, items); 51 | ctx.write_result(result).context(WriteResultSnafu)?; 52 | Ok(()) 53 | } 54 | } 55 | 56 | fn filter_sources(args: &Input, sources: Vec) -> Vec { 57 | if args.name.is_none() && args.id.is_none() { 58 | sources 59 | } else { 60 | sources.into_iter().filter(|s| args.matches(s)).collect() 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/cli/cmd/version.rs: -------------------------------------------------------------------------------- 1 | use crate::cli::sink::Sink; 2 | use crate::cli::table::AsTable; 3 | use crate::http::payload::{BuildInfo, VersionInfo}; 4 | use crate::http::Error as HttpError; 5 | use clap::Parser; 6 | use prettytable::{row, Table}; 7 | use serde::Serialize; 8 | use snafu::{ResultExt, Snafu}; 9 | 10 | use super::{Cmd, Context}; 11 | use crate::cli::sink::Error as SinkError; 12 | 13 | /// Prints version about server and client. 14 | /// 15 | /// Queries the server for its version information and prints more 16 | /// version details about this client. 17 | #[derive(Parser, Debug, PartialEq)] 18 | pub struct Input {} 19 | 20 | #[derive(Debug, Snafu)] 21 | pub enum Error { 22 | #[snafu(display("An http error occurred: {}", source))] 23 | HttpClient { source: HttpError }, 24 | 25 | #[snafu(display("Error writing data: {}", source))] 26 | WriteResult { source: SinkError }, 27 | } 28 | 29 | impl Cmd for Input { 30 | type CmdError = Error; 31 | 32 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 33 | let result = ctx.client.version().context(HttpClientSnafu)?; 34 | let vinfo = AllVersion::default(result, ctx.base_url()); 35 | ctx.write_result(vinfo).context(WriteResultSnafu)?; 36 | Ok(()) 37 | } 38 | } 39 | 40 | #[derive(Debug, Serialize)] 41 | pub struct AllVersion { 42 | pub client: BuildInfo, 43 | pub server: VersionInfo, 44 | pub docspell_url: String, 45 | } 46 | impl AllVersion { 47 | pub fn default(server: VersionInfo, docspell_url: String) -> AllVersion { 48 | AllVersion { 49 | client: BuildInfo::default(), 50 | server, 51 | docspell_url, 52 | } 53 | } 54 | } 55 | 56 | impl AsTable for AllVersion { 57 | fn to_table(&self) -> Table { 58 | let mut table = Table::new(); 59 | table.set_format(*prettytable::format::consts::FORMAT_CLEAN); 60 | let mut ct = self.client.to_table(); 61 | ct.set_titles(row!["Client (dsc)", ""]); 62 | let mut st = self.server.to_table(); 63 | st.set_titles(row!["Docspell Server", self.docspell_url]); 64 | table.add_row(row![st]); 65 | table.add_row(row![ct]); 66 | table 67 | } 68 | } 69 | impl Sink for AllVersion {} 70 | -------------------------------------------------------------------------------- /src/cli/cmd/view.rs: -------------------------------------------------------------------------------- 1 | use clap::{ArgGroup, Parser}; 2 | use dialoguer::Confirm; 3 | use snafu::{ResultExt, Snafu}; 4 | use std::{ 5 | path::{Path, PathBuf}, 6 | process::Command, 7 | }; 8 | 9 | use super::{Cmd, Context}; 10 | use crate::cli::opts::SearchMode; 11 | use crate::http::payload::SearchReq; 12 | use crate::http::DownloadRef; 13 | use crate::http::Error as HttpError; 14 | 15 | /// View pdf files. 16 | /// 17 | /// Searches for documents via a query and downloads one at a time to 18 | /// feed it into a viewer program. The prorgam can be defined in the 19 | /// config file. 20 | /// 21 | /// Use the `search-summary` command with the same query to get an 22 | /// idea how much is being downloaded. This is an interactive command. 23 | #[derive(Parser, std::fmt::Debug)] 24 | #[command(group = ArgGroup::new("kind"))] 25 | pub struct Input { 26 | /// The query string. See 27 | query: String, 28 | 29 | #[clap(flatten)] 30 | pub search_mode: SearchMode, 31 | 32 | /// Limit the number of results. 33 | #[arg(short, long, default_value = "60")] 34 | limit: u32, 35 | 36 | /// Skip the first n results. 37 | #[arg(short, long, default_value = "0")] 38 | offset: u32, 39 | 40 | /// Ask whether to keep viewing between each file. 41 | #[arg(long, short)] 42 | stop: bool, 43 | } 44 | 45 | #[derive(Debug, Snafu)] 46 | pub enum Error { 47 | #[snafu(display("An http error occurred: {}", source))] 48 | HttpClient { source: HttpError }, 49 | 50 | #[snafu(display("Error creating a file. {}", source))] 51 | CreateFile { source: std::io::Error }, 52 | 53 | #[snafu(display("Error executing command: {}", source))] 54 | Exec { source: std::io::Error }, 55 | 56 | #[snafu(display("No pdf viewer defined in the config file"))] 57 | NoPdfViewer, 58 | 59 | #[snafu(display("Interaction with terminal failed: {}", source))] 60 | Interact { source: dialoguer::Error }, 61 | } 62 | 63 | impl Cmd for Input { 64 | type CmdError = Error; 65 | 66 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 67 | let parent = std::env::temp_dir().join("dsc-view"); 68 | 69 | if !parent.exists() { 70 | std::fs::create_dir_all(&parent).context(CreateFileSnafu)?; 71 | } 72 | 73 | view_all(self, ctx, &parent) 74 | } 75 | } 76 | 77 | pub fn view_all(opts: &Input, ctx: &Context, parent: &Path) -> Result<(), Error> { 78 | let req = SearchReq { 79 | query: opts.query.clone(), 80 | offset: opts.offset, 81 | limit: opts.limit, 82 | with_details: true, 83 | search_mode: opts.search_mode.to_mode(), 84 | }; 85 | let result = ctx 86 | .client 87 | .download_search(&ctx.opts.session, &req) 88 | .context(HttpClientSnafu)?; 89 | 90 | let mut confirm = false; 91 | for dref in result { 92 | if confirm { 93 | if is_stop_viewing(opts)? { 94 | return Ok(()); 95 | } 96 | } else { 97 | confirm = true; 98 | } 99 | 100 | let file = download(&dref, ctx, parent)?; 101 | if let Some(f) = file { 102 | let tool = &ctx.cfg.pdf_viewer.get(0).ok_or(Error::NoPdfViewer)?; 103 | let tool_args: Vec = ctx 104 | .cfg 105 | .pdf_viewer 106 | .iter() 107 | .skip(1) 108 | .map(|s| s.replace("{}", f.display().to_string().as_str())) 109 | .collect(); 110 | log::info!( 111 | "Run: {} {}", 112 | tool, 113 | tool_args 114 | .iter() 115 | .map(|s| format!("'{}'", s)) 116 | .collect::>() 117 | .join(" ") 118 | ); 119 | Command::new(tool) 120 | .args(tool_args) 121 | .output() 122 | .context(ExecSnafu)?; 123 | } else { 124 | eprintln!( 125 | "Skip attachment: {}/{}. There was no file!", 126 | dref.id, dref.name 127 | ); 128 | } 129 | } 130 | 131 | Ok(()) 132 | } 133 | 134 | fn is_stop_viewing(opts: &Input) -> Result { 135 | if opts.stop { 136 | if let Some(answer) = Confirm::new() 137 | .with_prompt("Keep viewing?") 138 | .interact_opt() 139 | .context(InteractSnafu)? 140 | { 141 | return Ok(!answer); 142 | } 143 | } 144 | Ok(false) 145 | } 146 | 147 | fn download(attach: &DownloadRef, ctx: &Context, parent: &Path) -> Result, Error> { 148 | let dlopt = attach 149 | .get(&ctx.client, &ctx.opts.session) 150 | .context(HttpClientSnafu)?; 151 | 152 | let path = parent.join("view.pdf"); 153 | 154 | if let Some(mut dl) = dlopt { 155 | if path.exists() { 156 | std::fs::remove_file(&path).context(CreateFileSnafu)?; 157 | } 158 | 159 | let file = std::fs::File::create(&path).context(CreateFileSnafu)?; 160 | let mut writer = std::io::BufWriter::new(file); 161 | dl.copy_to(&mut writer).context(HttpClientSnafu)?; 162 | Ok(Some(path)) 163 | } else { 164 | Ok(None) 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /src/cli/cmd/watch.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, ValueHint}; 2 | use notify::{DebouncedEvent, RecursiveMode, Watcher}; 3 | use snafu::{ResultExt, Snafu}; 4 | use std::{path::Path, sync::mpsc}; 5 | use std::{path::PathBuf, time::Duration}; 6 | 7 | use super::{upload, Cmd, Context}; 8 | use crate::http::payload::BasicResult; 9 | use crate::{ 10 | cli::opts::{EndpointOpts, FileAction, UploadMeta}, 11 | util::file::CollectiveSubdirErr, 12 | }; 13 | 14 | use crate::util::file; 15 | 16 | /// Watches a directory and uploads files to docspell. 17 | /// 18 | /// It accepts the same authentication options as the `upload` 19 | /// command. 20 | /// 21 | /// When using the integration endpoint and a collective is not 22 | /// specified, it will be guessed from the first subdirectory of the 23 | /// directory that is specified. 24 | /// 25 | /// On some filesystems, this command may not work (e.g. networking 26 | /// file systems like NFS or SAMBA). You may use the `upload` command 27 | /// then in combination with the `--poll` option. 28 | #[derive(Parser, Debug)] 29 | pub struct Input { 30 | /// Wether to watch directories recursively or not. 31 | #[arg(long, short)] 32 | pub recursive: bool, 33 | 34 | /// A delay in seconds after which the event is acted upon. 35 | #[arg(long = "delay", default_value = "6")] 36 | pub delay_secs: u64, 37 | 38 | #[clap(flatten)] 39 | pub upload: UploadMeta, 40 | 41 | #[clap(flatten)] 42 | pub action: FileAction, 43 | 44 | /// A glob pattern for matching against each file. Note that 45 | /// usually you can just use the shells expansion mechanism. 46 | #[arg(long, short, default_value = "**/*")] 47 | pub matches: String, 48 | 49 | /// A glob pattern that excludes files to upload. If `--matches` 50 | /// is also specified, both must evaluate to true. 51 | #[arg(long, short)] 52 | pub not_matches: Option, 53 | 54 | /// Don't upload anything, but print what would be uploaded. 55 | #[arg(long)] 56 | pub dry_run: bool, 57 | 58 | #[clap(flatten)] 59 | pub endpoint: EndpointOpts, 60 | 61 | /// The directories to watch for changes. 62 | #[arg(value_hint = ValueHint::DirPath)] 63 | pub dirs: Vec, 64 | } 65 | 66 | #[derive(Debug, Snafu)] 67 | pub enum Error { 68 | #[snafu(display("Uploading failed: {}", source))] 69 | Upload { source: upload::Error }, 70 | 71 | #[snafu(display("Error creating hash for '{}': {}", path.display(), source))] 72 | DigestFile { 73 | source: std::io::Error, 74 | path: PathBuf, 75 | }, 76 | 77 | #[snafu(display("Not a directory: {}", path.display()))] 78 | NotADirectory { path: PathBuf }, 79 | 80 | #[snafu(display("Error while watching: {}", source))] 81 | Watch { source: notify::Error }, 82 | 83 | #[snafu(display("Error consuming event: {}", source))] 84 | Event { source: mpsc::RecvError }, 85 | 86 | #[snafu(display("Error finding collective: {}", source))] 87 | FindCollective { source: CollectiveSubdirErr }, 88 | 89 | #[snafu(display("Could not find a collective for {}", path.display()))] 90 | NoCollective { path: PathBuf }, 91 | } 92 | 93 | impl Cmd for Input { 94 | type CmdError = Error; 95 | 96 | fn exec(&self, ctx: &Context) -> Result<(), Error> { 97 | watch_directories(self, ctx)?; 98 | Ok(()) 99 | } 100 | } 101 | 102 | pub fn watch_directories(opts: &Input, ctx: &Context) -> Result<(), Error> { 103 | check_is_dir(&opts.dirs)?; 104 | let mode = if opts.recursive { 105 | RecursiveMode::Recursive 106 | } else { 107 | RecursiveMode::NonRecursive 108 | }; 109 | let (tx, rx) = mpsc::channel(); 110 | 111 | let mut watcher = 112 | notify::watcher(tx, Duration::from_secs(opts.delay_secs)).context(WatchSnafu)?; 113 | for dir in &opts.dirs { 114 | eprintln!("Watching directory ({:?}): {}", mode, dir.display()); 115 | watcher.watch(dir, mode).context(WatchSnafu)?; 116 | } 117 | eprintln!("Press Ctrl-C to quit."); 118 | loop { 119 | match rx.recv() { 120 | Ok(event) => event_act(event, opts, ctx)?, 121 | Err(e) => return Err(Error::Event { source: e }), 122 | } 123 | } 124 | } 125 | 126 | fn check_is_dir(dirs: &[PathBuf]) -> Result<(), Error> { 127 | for path in dirs { 128 | if !path.is_dir() { 129 | return Err(Error::NotADirectory { path: path.clone() }); 130 | } 131 | } 132 | Ok(()) 133 | } 134 | 135 | fn event_act(event: DebouncedEvent, opts: &Input, ctx: &Context) -> Result<(), Error> { 136 | log::info!("Event: {:?}", event); 137 | match event { 138 | DebouncedEvent::Create(path) => upload_and_report(path, opts, ctx)?, 139 | DebouncedEvent::Write(path) => upload_and_report(path, opts, ctx)?, 140 | DebouncedEvent::Chmod(path) => upload_and_report(path, opts, ctx)?, 141 | DebouncedEvent::Error(err, path_opt) => { 142 | log::error!("Debounce event error for path {:?}: {}", path_opt, err); 143 | return Err(Error::Watch { source: err }); 144 | } 145 | _ => (), 146 | } 147 | Ok(()) 148 | } 149 | 150 | fn upload_and_report(path: PathBuf, opts: &Input, ctx: &Context) -> Result<(), Error> { 151 | if path.is_dir() { 152 | log::debug!( 153 | "Skip event triggered on a directory and not a file: {:?}", 154 | path 155 | ); 156 | } else { 157 | eprintln!("------------------------------------------------------------------------------"); 158 | eprintln!("Got: {}", path.display()); 159 | let result = upload_file(path, opts, ctx)?; 160 | if result.success { 161 | if opts.dry_run { 162 | eprintln!("Dry run. Would upload now."); 163 | } else { 164 | eprintln!("Server: {}", result.message); 165 | } 166 | } else { 167 | log::error!("Error from uploading: {}", result.message); 168 | eprintln!("Sevrer Error: {}", result.message); 169 | } 170 | } 171 | Ok(()) 172 | } 173 | 174 | fn upload_file(path: PathBuf, opts: &Input, ctx: &Context) -> Result { 175 | let mut ep = opts.endpoint.clone(); 176 | if let Some(cid) = find_collective(&path, &opts.dirs, &opts.endpoint)? { 177 | ep.collective = Some(cid); 178 | } 179 | 180 | let data = &upload::Input { 181 | endpoint: ep, 182 | multiple: true, 183 | action: opts.action.clone(), 184 | upload: opts.upload.clone(), 185 | matches: opts.matches.clone(), 186 | not_matches: opts.not_matches.clone(), 187 | traverse: false, 188 | poll: None, 189 | dry_run: opts.dry_run, 190 | files: vec![path], 191 | }; 192 | upload::upload_files(data, ctx).context(UploadSnafu) 193 | } 194 | 195 | pub fn find_collective( 196 | path: &Path, 197 | dirs: &[PathBuf], 198 | opts: &EndpointOpts, 199 | ) -> Result, Error> { 200 | if opts.integration && opts.collective.is_none() { 201 | let cid = file::collective_from_subdir(path, dirs).context(FindCollectiveSnafu)?; 202 | if cid.is_none() { 203 | Err(Error::NoCollective { 204 | path: path.to_path_buf(), 205 | }) 206 | } else { 207 | Ok(cid) 208 | } 209 | } else { 210 | Ok(None) 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /src/cli/sink.rs: -------------------------------------------------------------------------------- 1 | //! A [`Sink`] is a common way to output datastructures for a command. 2 | //! 3 | //! A command may format its data based on the common `--format` 4 | //! option. Data types that should be presented in this way can 5 | //! implement the [`Sink`] trait so commands can easily output them. 6 | //! 7 | //! If a type implements serdes Serialize trait and the 8 | //! [`super::table::AsTable`] trait, a Sink is implemented for free. 9 | 10 | use super::opts::Format; 11 | use crate::cli::table::AsTable; 12 | use serde::Serialize; 13 | use snafu::Snafu; 14 | use std::convert::From; 15 | 16 | /// Defines different outputs for a data type given via a [`Format`] 17 | /// argument. 18 | /// 19 | /// The formats `json` and `lisp` are handled via 20 | /// [serde](https://serde.rs), the formats `tabular` and `csv` are 21 | /// handled by [prettytable](https://crates.io/crates/prettytable-rs) 22 | /// (and the csv crate). 23 | pub trait Sink 24 | where 25 | Self: Serialize + AsTable, 26 | { 27 | fn write_value(format: Format, value: &Self) -> Result<(), Error> { 28 | match format { 29 | Format::Json => { 30 | serde_json::to_writer(std::io::stdout(), &value)?; 31 | Ok(()) 32 | } 33 | Format::Lisp => { 34 | serde_lexpr::to_writer(std::io::stdout(), &value)?; 35 | Ok(()) 36 | } 37 | Format::Elisp => { 38 | let opts = serde_lexpr::print::Options::elisp(); 39 | serde_lexpr::to_writer_custom(std::io::stdout(), &value, opts)?; 40 | Ok(()) 41 | } 42 | Format::Csv => Self::write_csv(value), 43 | Format::Tabular => Self::write_tabular(value), 44 | } 45 | } 46 | 47 | fn write_tabular(value: &Self) -> Result<(), Error> { 48 | let table = value.to_table(); 49 | table.printstd(); 50 | Ok(()) 51 | } 52 | 53 | fn write_csv(value: &Self) -> Result<(), Error> { 54 | let table = value.to_table(); 55 | table.to_csv(std::io::stdout())?; 56 | Ok(()) 57 | } 58 | } 59 | 60 | /// Possible errors when serializing data. 61 | #[derive(Debug, Snafu)] 62 | pub enum Error { 63 | #[snafu(display("Error serializing to JSON"))] 64 | Json { source: serde_json::Error }, 65 | 66 | #[snafu(display("Error serializing to Lisp"))] 67 | Lisp { source: serde_lexpr::Error }, 68 | 69 | #[snafu(display("Error serializing to CSV"))] 70 | Csv { source: csv::Error }, 71 | } 72 | impl From for Error { 73 | fn from(e: csv::Error) -> Error { 74 | Error::Csv { source: e } 75 | } 76 | } 77 | impl From for Error { 78 | fn from(e: serde_json::Error) -> Error { 79 | Error::Json { source: e } 80 | } 81 | } 82 | impl From for Error { 83 | fn from(e: serde_lexpr::Error) -> Error { 84 | Error::Lisp { source: e } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | //! Module for reading the configuration file. 2 | 3 | use crate::cli::opts::Format; 4 | use serde::{Deserialize, Serialize}; 5 | use snafu::{ResultExt, Snafu}; 6 | use std::default; 7 | use std::path::{Path, PathBuf}; 8 | 9 | /// Defines the contents of the configuration file. 10 | #[derive(Serialize, Deserialize, Debug)] 11 | pub struct DsConfig { 12 | /// The base-url to the docspell server. 13 | pub docspell_url: String, 14 | 15 | /// The format to use for the output. 16 | pub default_format: Format, 17 | 18 | /// The admin secret used to authenticate for admin commands. 19 | pub admin_secret: Option, 20 | 21 | /// A source id to use when uploading. 22 | pub default_source_id: Option, 23 | 24 | /// A entry for the pass password manager, specifying the password 25 | /// to docspell in its first line. 26 | pub pass_entry: Option, 27 | 28 | /// A pass entry to specify the totp secret in its first line. If 29 | /// prefixed with `key:`, it will be looked up in the entry 30 | /// defined by `pass_entry` for a line that starts with `key:`. 31 | pub pass_otp_secret: Option, 32 | 33 | /// The account to use for login. 34 | pub default_account: Option, 35 | 36 | /// A command used to view pdf files. 37 | pub pdf_viewer: Vec, 38 | 39 | /// A proxy server used for all connections. 40 | pub proxy: Option, 41 | 42 | /// The user to authenticate at the proxy server. 43 | pub proxy_user: Option, 44 | 45 | /// The password to authenticate at the proxy server. 46 | pub proxy_password: Option, 47 | 48 | /// A DER or PEM file to specify an external trust store. 49 | pub extra_certificate: Option, 50 | 51 | /// Whether to accept invalid certificates. 52 | pub accept_invalid_certificates: Option, 53 | } 54 | 55 | /// Error states when reading and writing the config file. 56 | #[derive(Debug, Snafu)] 57 | pub enum ConfigError { 58 | #[snafu(display("Unable to read config file {}: {}", path.display(), source))] 59 | ReadFile { 60 | source: std::io::Error, 61 | path: PathBuf, 62 | }, 63 | #[snafu(display("Unable to create default config file {}: {}", path.display(), source))] 64 | CreateDefault { 65 | source: std::io::Error, 66 | path: PathBuf, 67 | }, 68 | #[snafu(display("Unable to parse file {}: {}", path.display(), source))] 69 | ParseFile { 70 | source: toml::de::Error, 71 | path: PathBuf, 72 | }, 73 | #[snafu(display("The config file could not be serialized"))] 74 | WriteFile { 75 | source: toml::ser::Error, 76 | path: PathBuf, 77 | }, 78 | #[snafu(display("The config directory could not be found"))] 79 | NoConfigDir, 80 | } 81 | 82 | impl default::Default for DsConfig { 83 | fn default() -> Self { 84 | Self { 85 | docspell_url: "http://localhost:7880".into(), 86 | default_format: Format::Tabular, 87 | admin_secret: None, 88 | default_source_id: None, 89 | pass_entry: None, 90 | pass_otp_secret: None, 91 | default_account: None, 92 | pdf_viewer: vec!["zathura".into(), "{}".into()], 93 | proxy: None, 94 | proxy_user: None, 95 | proxy_password: None, 96 | extra_certificate: None, 97 | accept_invalid_certificates: None, 98 | } 99 | } 100 | } 101 | 102 | impl DsConfig { 103 | /// Reads the configuration file. 104 | /// 105 | /// If the argument provides a config file, this is read. If not, 106 | /// the env variable `DSC_CONFIG` is used to lookup the 107 | /// configuration file. If this env variable is not set, the 108 | /// default location is used (which is ~/.config/dsc/config.toml` 109 | /// on linuxes). 110 | pub fn read(file: Option<&PathBuf>) -> Result { 111 | if let Some(cfg_file) = &file { 112 | log::debug!( 113 | "Looking for {} in {}", 114 | cfg_file.to_path_buf().display(), 115 | std::env::current_dir() 116 | .map(|p| p.display().to_string()) 117 | .unwrap_or_else(|_| "unknown directory".into()) 118 | ); 119 | let given_path = cfg_file.as_path().canonicalize().context(ReadFileSnafu { 120 | path: cfg_file.as_path().to_path_buf(), 121 | })?; 122 | log::debug!("Load config from: {:}", given_path.display()); 123 | load_from(&given_path) 124 | } else { 125 | match std::env::var(DSC_CONFIG).ok() { 126 | Some(cfg_file) => { 127 | log::debug!("Loading config file given by env variable"); 128 | Self::read(Some(&PathBuf::from(cfg_file))) 129 | } 130 | None => { 131 | let mut dir = config_dir()?; 132 | dir.push("dsc"); 133 | dir.push("config.toml"); 134 | if dir.exists() { 135 | log::debug!("Load config from: {:}", dir.display()); 136 | load_from(&dir) 137 | } else { 138 | log::debug!("No config file present; using default config"); 139 | Ok(DsConfig::default()) 140 | } 141 | } 142 | } 143 | } 144 | } 145 | 146 | /// Write the default configuration to the default config file. 147 | /// The file must not yet exist. 148 | pub fn write_default_file() -> Result { 149 | DsConfig::default().write_default() 150 | } 151 | 152 | /// Write this configuration to the default location. If the 153 | /// already file exists, a error is returned. 154 | pub fn write_default(&self) -> Result { 155 | let mut dir = config_dir()?; 156 | dir.push("dsc"); 157 | dir.push("config.toml"); 158 | if dir.exists() { 159 | log::info!("The default config file already exists. Not writing it!"); 160 | Err(ConfigError::CreateDefault { 161 | source: std::io::Error::new( 162 | std::io::ErrorKind::AlreadyExists, 163 | "The config file already exists!", 164 | ), 165 | path: dir, 166 | }) 167 | } else { 168 | log::debug!("Writing config file: {:}", dir.display()); 169 | write_to(self, &dir)?; 170 | Ok(dir) 171 | } 172 | } 173 | } 174 | 175 | fn load_from(file: &Path) -> Result { 176 | let cnt = std::fs::read_to_string(file).map_err(|e| ConfigError::ReadFile { 177 | source: e, 178 | path: file.to_path_buf(), 179 | }); 180 | cnt.and_then(|c| { 181 | toml::from_str(&c).map_err(|e| ConfigError::ParseFile { 182 | source: e, 183 | path: file.to_path_buf(), 184 | }) 185 | }) 186 | } 187 | 188 | fn config_dir() -> Result { 189 | match dirs::config_dir() { 190 | Some(dir) => Ok(dir), 191 | None => Err(ConfigError::NoConfigDir), 192 | } 193 | } 194 | 195 | fn write_to(cfg: &DsConfig, file: &Path) -> Result<(), ConfigError> { 196 | if !file.exists() { 197 | if let Some(dir) = file.parent() { 198 | std::fs::create_dir_all(dir).map_err(|e| ConfigError::CreateDefault { 199 | source: e, 200 | path: file.to_path_buf(), 201 | })?; 202 | } 203 | } 204 | let cnt = toml::to_string(cfg).map_err(|e| ConfigError::WriteFile { 205 | source: e, 206 | path: file.to_path_buf(), 207 | }); 208 | 209 | cnt.and_then(|c| { 210 | std::fs::write(&file, &c).map_err(|e| ConfigError::CreateDefault { 211 | source: e, 212 | path: file.to_path_buf(), 213 | }) 214 | }) 215 | } 216 | 217 | const DSC_CONFIG: &str = "DSC_CONFIG"; 218 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | //! Global error types. 2 | 3 | use crate::cli::cmd; 4 | use crate::config; 5 | use snafu::Snafu; 6 | 7 | #[derive(Debug, Snafu)] 8 | pub enum Error { 9 | #[snafu(display("{}", source))] 10 | Cmd { source: cmd::CmdError }, 11 | 12 | #[snafu(display("Configuration error: {}", source))] 13 | Config { source: config::ConfigError }, 14 | } 15 | 16 | impl From for Error { 17 | fn from(e: config::ConfigError) -> Error { 18 | Error::Config { source: e } 19 | } 20 | } 21 | 22 | impl From for Error { 23 | fn from(e: cmd::CmdError) -> Error { 24 | Error::Cmd { source: e } 25 | } 26 | } 27 | 28 | pub type Result = std::result::Result; 29 | -------------------------------------------------------------------------------- /src/http/proxy.rs: -------------------------------------------------------------------------------- 1 | use reqwest::blocking::ClientBuilder; 2 | use reqwest::{Proxy, Result}; 3 | 4 | pub enum ProxySetting { 5 | System, 6 | None, 7 | Custom { 8 | url: String, 9 | user: Option, 10 | password: Option, 11 | }, 12 | } 13 | 14 | impl ProxySetting { 15 | pub fn set(&self, builder: ClientBuilder) -> Result { 16 | match self { 17 | ProxySetting::System => { 18 | log::debug!("Using system proxy (no changes to client)"); 19 | Ok(builder) 20 | } 21 | ProxySetting::None => { 22 | log::info!("Setting no_proxy"); 23 | Ok(builder.no_proxy()) 24 | } 25 | ProxySetting::Custom { 26 | url, 27 | user, 28 | password, 29 | } => { 30 | log::info!("Using proxy: {:?}", url); 31 | let mut p = Proxy::all(url)?; 32 | if let Some(login) = user { 33 | log::debug!("Use proxy auth: {:?}/***", login); 34 | p = p.basic_auth(login.as_str(), password.as_ref().unwrap_or(&"".into())); 35 | } 36 | Ok(builder.proxy(p)) 37 | } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/http/session.rs: -------------------------------------------------------------------------------- 1 | //! Provides helpers to handle docspell "sessions". 2 | //! 3 | //! Docspell returns an authentication token for a login via account + 4 | //! password. This token must be used for all secured endpoints. 5 | //! 6 | //! This token is stored on disk and also refreshed if it is almost 7 | //! expired. 8 | //! 9 | //! This is for internal use only. 10 | 11 | use snafu::{ResultExt, Snafu}; 12 | use std::{ 13 | path::{Path, PathBuf}, 14 | time::Duration, 15 | }; 16 | 17 | use super::payload::AuthResp; 18 | use super::Client; 19 | 20 | const TOKEN_FILENAME: &str = "dsc-token.json"; 21 | const DSC_SESSION: &str = "DSC_SESSION"; 22 | 23 | #[derive(Debug, Snafu)] 24 | pub enum Error { 25 | #[snafu(display("Error storing session file at {}: {}", path.display(), source))] 26 | StoreSessionFile { 27 | source: std::io::Error, 28 | path: PathBuf, 29 | }, 30 | 31 | #[snafu(display("Error reading session file at {}: {}", path.display(), source))] 32 | ReadSessionFile { 33 | source: std::io::Error, 34 | path: PathBuf, 35 | }, 36 | 37 | #[snafu(display("No session file found. Please use the `login` command first."))] 38 | NoSessionFile, 39 | 40 | #[snafu(display("Error storing session file at {}: {}", path.display(), source))] 41 | DeleteSessionFile { 42 | source: std::io::Error, 43 | path: PathBuf, 44 | }, 45 | 46 | #[snafu(display("You are not logged in"))] 47 | NotLoggedIn, 48 | 49 | #[snafu(display("Invalid authentication token: {}", token))] 50 | InvalidAuthToken { token: String }, 51 | 52 | #[snafu(display("Error serializing auth response: {}", source))] 53 | SerializeSession { source: serde_json::Error }, 54 | 55 | #[snafu(display("Error refreshing session. Use the `login` command. {}", mesg))] 56 | RefreshSession { mesg: String }, 57 | } 58 | 59 | pub fn store_session(resp: &AuthResp) -> Result<(), Error> { 60 | match dirs::config_dir() { 61 | Some(mut dir) => { 62 | dir.push("dsc"); 63 | dir.push(TOKEN_FILENAME); 64 | if !dir.exists() { 65 | log::debug!("Creating directory to store config at {:?}", dir.parent()); 66 | std::fs::create_dir_all(dir.parent().unwrap()) 67 | .context(StoreSessionFileSnafu { path: dir.clone() })?; 68 | } 69 | write_token_file(resp, &dir) 70 | } 71 | None => Err(Error::NoSessionFile), 72 | } 73 | } 74 | 75 | /// Loads the session token from the session file created by the 76 | /// `login` command. 77 | pub fn session_token_from_file() -> Result { 78 | let file = get_token_file().map_err(|_err| Error::NotLoggedIn)?; 79 | let resp = read_token_file(&file)?; 80 | get_token(&resp) 81 | } 82 | 83 | /// Loads the session token from defined places. Uses in this order: 84 | /// the option `--session`, the env variable `DSC_SESSION` or the 85 | /// sesion file created by the `login` command. 86 | /// 87 | /// If a session token can be loaded, it is checked for expiry and 88 | /// refreshed if deemed necessary. 89 | pub fn session_token(token: &Option, client: &Client) -> Result { 90 | let given_token = token.clone().or_else(get_token_from_env); 91 | let no_token = given_token.is_none(); 92 | let (token, valid) = match given_token { 93 | Some(token) => { 94 | log::debug!("Using auth token as given via option or env variable"); 95 | Ok((token, None)) 96 | } 97 | None => { 98 | let file = get_token_file().map_err(|_err| Error::NotLoggedIn)?; 99 | let resp = read_token_file(&file)?; 100 | let token = get_token(&resp)?; 101 | Ok((token, Some(resp.valid_ms))) 102 | } 103 | }?; 104 | 105 | let created = extract_creation_time(&token)?; 106 | if near_expiry(created, valid) { 107 | log::info!("Token is nearly expired. Trying to refresh"); 108 | let resp = client 109 | .session_login(&token) 110 | .map_err(|err| Error::RefreshSession { 111 | mesg: err.to_string(), 112 | })?; 113 | if no_token { 114 | store_session(&resp)?; 115 | } else { 116 | log::debug!("Not storing new session, since it was given as argument"); 117 | } 118 | get_token(&resp) 119 | } else { 120 | Ok(token) 121 | } 122 | } 123 | 124 | pub fn drop_session() -> Result<(), Error> { 125 | let path = get_token_file()?; 126 | if path.exists() { 127 | std::fs::remove_file(&path).context(DeleteSessionFileSnafu { path })?; 128 | } 129 | Ok(()) 130 | } 131 | 132 | // --- helper 133 | 134 | fn near_expiry(created: u64, valid: Option) -> bool { 135 | let now = std::time::SystemTime::now() 136 | .duration_since(std::time::SystemTime::UNIX_EPOCH) 137 | .unwrap(); 138 | let created_ms = Duration::from_millis(created); 139 | 140 | let diff = match now.checked_sub(created_ms) { 141 | Some(d) => d, 142 | None => { 143 | log::warn!( 144 | "Cannot calc '{:?} - {:?}'. Going with a default.", 145 | now, 146 | created_ms, 147 | ); 148 | Duration::from_secs(180) 149 | } 150 | }; 151 | 152 | match valid { 153 | Some(valid_ms) => { 154 | let threshold = Duration::from_millis(((valid_ms as f64) * 0.8) as u64); 155 | log::debug!("Token age: {:?} Threshold: {:?}", diff, threshold); 156 | diff.gt(&threshold) 157 | } 158 | None => { 159 | log::debug!("Token age: {:?}", diff); 160 | diff.gt(&Duration::from_secs(180)) 161 | } 162 | } 163 | } 164 | 165 | fn extract_creation_time(token: &str) -> Result { 166 | match token.split('-').next() { 167 | Some(ms) => ms.parse().map_err(|_e| Error::InvalidAuthToken { 168 | token: token.to_string(), 169 | }), 170 | None => Err(Error::InvalidAuthToken { 171 | token: token.to_string(), 172 | }), 173 | } 174 | } 175 | 176 | fn get_token_from_env() -> Option { 177 | std::env::var_os(DSC_SESSION) 178 | .filter(|s| !s.is_empty()) 179 | .and_then(|s| s.into_string().ok()) 180 | } 181 | 182 | fn get_token_file() -> Result { 183 | match dirs::config_dir() { 184 | Some(mut dir) => { 185 | dir.push("dsc"); 186 | dir.push(TOKEN_FILENAME); 187 | Ok(dir) 188 | } 189 | None => Err(Error::NoSessionFile), 190 | } 191 | } 192 | 193 | fn read_token_file(path: &Path) -> Result { 194 | if path.exists() { 195 | acquire_lock(path, false)?; 196 | 197 | let cnt = std::fs::read_to_string(&path).context(ReadSessionFileSnafu { path })?; 198 | let resp: AuthResp = serde_json::from_str(&cnt).context(SerializeSessionSnafu)?; 199 | Ok(resp) 200 | } else { 201 | Err(Error::NoSessionFile) 202 | } 203 | } 204 | 205 | fn write_token_file(resp: &AuthResp, path: &Path) -> Result<(), Error> { 206 | let flock = acquire_lock(path, true); 207 | match flock { 208 | Ok(_fl) => { 209 | log::debug!("Storing session to {}", path.display()); 210 | let cnt = serde_json::to_string(resp).context(SerializeSessionSnafu)?; 211 | std::fs::write(path, &cnt).context(StoreSessionFileSnafu { path }) 212 | } 213 | Err(err) => { 214 | log::debug!( 215 | "Could not obtain write lock to store session in file: {}", 216 | err 217 | ); 218 | Ok(()) 219 | } 220 | } 221 | } 222 | 223 | fn get_token(resp: &AuthResp) -> Result { 224 | match &resp.token { 225 | Some(t) => Ok(t.clone()), 226 | None => Err(Error::NotLoggedIn), 227 | } 228 | } 229 | 230 | // --- file lock 231 | 232 | #[cfg(windows)] 233 | fn acquire_lock(path: &Path, write: bool) -> Result<(), Error> { 234 | Ok(()) 235 | } 236 | 237 | #[cfg(unix)] 238 | fn acquire_lock(path: &Path, write: bool) -> Result<(), Error> { 239 | if write { 240 | file_locker::FileLock::new(path) 241 | .blocking(false) 242 | .writeable(true) 243 | .lock() 244 | .map(|_fl| ()) 245 | .context(StoreSessionFileSnafu { path }) 246 | } else { 247 | file_locker::FileLock::new(path) 248 | .blocking(true) 249 | .writeable(false) 250 | .lock() 251 | .map(|_fl| ()) 252 | .context(ReadSessionFileSnafu { path }) 253 | } 254 | } 255 | 256 | // --- tests 257 | 258 | #[cfg(test)] 259 | mod tests { 260 | use super::*; 261 | 262 | #[test] 263 | fn unit_extract_creation_time() { 264 | let token = 265 | "1626345633653-ZGVtby9kZW1v-$2a$10$63d9R5xyDMYusXNdPdfKYO-e0jDd0o2KgBdrHv3PN+qTM+cFPM="; 266 | assert_eq!(extract_creation_time(token).unwrap(), 1626345633653); 267 | } 268 | } 269 | -------------------------------------------------------------------------------- /src/http/util.rs: -------------------------------------------------------------------------------- 1 | pub const DOCSPELL_AUTH: &str = "X-Docspell-Auth"; 2 | pub const DOCSPELL_ADMIN: &str = "Docspell-Admin-Secret"; 3 | 4 | use percent_encoding::percent_decode; 5 | 6 | // Couldn't find a library for parsing the header properly ¯\_(ツ)_/¯ 7 | 8 | /// Extracts the filename from a Content-Disposition header It prefers 9 | /// 'filename*' values over 'filename' should both be present. 10 | pub fn filename_from_header(header_value: &str) -> Option { 11 | log::debug!("file header value: {}", header_value); 12 | let mut all: Vec<(u32, String)> = header_value 13 | .split(';') 14 | .map(|e| e.trim()) 15 | .filter_map(decode_name) 16 | .collect(); 17 | 18 | all.sort_by(|(a, _), (b, _)| a.partial_cmp(b).unwrap()); 19 | 20 | let name = all.into_iter().next().map(|(_, e)| e); 21 | 22 | log::debug!("Return file name: {:?}", name); 23 | name 24 | } 25 | 26 | fn decode_name(v: &str) -> Option<(u32, String)> { 27 | from_percent_encoded(v).or_else(|| from_basic_name(v).map(|(n, s)| (n, s.to_string()))) 28 | } 29 | 30 | fn from_basic_name(v: &str) -> Option<(u32, &str)> { 31 | v.find("filename=") 32 | .map(|index| &v[9 + index..]) 33 | .map(|rest| rest.trim_matches('"')) 34 | .map(|s| (1, s)) 35 | } 36 | 37 | fn from_percent_encoded(v: &str) -> Option<(u32, String)> { 38 | v.find("filename*=") 39 | .map(|index| &v[10 + index..]) 40 | .and_then(|rest| rest.split_once("''")) 41 | .and_then(|(_, name)| percent_decode(name.as_bytes()).decode_utf8().ok()) 42 | .map(|s| (0, s.to_string())) 43 | } 44 | 45 | #[cfg(test)] 46 | mod tests { 47 | use super::*; 48 | 49 | #[test] 50 | fn unit_filename_from_header() { 51 | assert_eq!( 52 | filename_from_header("inline; filename=\"test.jpg\""), 53 | Some("test.jpg".into()) 54 | ); 55 | 56 | assert_eq!( 57 | filename_from_header("inline; filename=\"XXXXXXX_XXXX_Unterj?hrige Entgeltaufstellung_vom_XX.XX.XXXX_XXXXXXXXXXXXXX.pdf\"; filename*=UTF-8''XXXXXXX_XXXX_Unterj%C3%A4hrige%20Entgeltaufstellung_vom_XX.XX.XXXX_XXXXXXXXXXXXXX.pdf"), 58 | Some("XXXXXXX_XXXX_Unterjährige Entgeltaufstellung_vom_XX.XX.XXXX_XXXXXXXXXXXXXX.pdf".into()) 59 | ); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Provides a library and command line interface to Docspell. 2 | //! 3 | 4 | pub mod cli; 5 | pub mod config; 6 | pub mod error; 7 | pub mod http; 8 | mod util; 9 | 10 | pub use cli::execute_cmd; 11 | 12 | use clap::Parser; 13 | use cli::opts::MainOpts; 14 | use config::DsConfig; 15 | use error::Result; 16 | use std::path::PathBuf; 17 | 18 | /// Reads the program arguments into the `MainOpts` data structure. 19 | pub fn read_args() -> MainOpts { 20 | log::debug!("Parsing command line options…"); 21 | let m = MainOpts::parse(); 22 | 23 | log::debug!("Parsed options: {:?}", m); 24 | m 25 | } 26 | 27 | /// Reads the config file. 28 | /// 29 | /// If the file is not given, it is searched in the default location. 30 | /// If the file is given, it is used without a fallback. 31 | pub fn read_config(file: &Option) -> Result { 32 | let f = DsConfig::read(file.as_ref())?; 33 | log::debug!("Config: {:?}", f); 34 | Ok(f) 35 | } 36 | 37 | /// The main method: reads arguments and config file and executes the 38 | /// corresponding command. 39 | pub fn execute() -> Result<()> { 40 | let opts = read_args(); 41 | let cfg = read_config(&opts.config)?; 42 | execute_cmd(cfg, opts)?; 43 | Ok(()) 44 | } 45 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use dsc::error::{Error, Result}; 2 | use std::env; 3 | use std::process; 4 | 5 | const LOG_LEVEL: &str = "RUST_LOG"; 6 | 7 | fn main() { 8 | let error_style = console::Style::new().red().bright(); 9 | let result = execute(); 10 | if let Err(err) = result { 11 | eprintln!("{}", error_style.apply_to(&err)); 12 | process::exit(exit_code(&err)); 13 | } 14 | } 15 | 16 | fn execute() -> Result<()> { 17 | let opts = dsc::read_args(); 18 | let remove_env = match opts.common_opts.verbose { 19 | 1 => set_log_level("info"), 20 | n => { 21 | if n > 1 { 22 | set_log_level("debug") 23 | } else { 24 | false 25 | } 26 | } 27 | }; 28 | env_logger::init(); 29 | 30 | let cfg = dsc::read_config(&opts.config)?; 31 | let result = dsc::execute_cmd(cfg, opts); 32 | if remove_env { 33 | env::remove_var(LOG_LEVEL); 34 | } 35 | result?; 36 | Ok(()) 37 | } 38 | 39 | fn set_log_level(level: &str) -> bool { 40 | let current = env::var_os(LOG_LEVEL); 41 | if current.is_none() { 42 | env::set_var(LOG_LEVEL, level); 43 | true 44 | } else { 45 | false 46 | } 47 | } 48 | 49 | fn exit_code(err: &Error) -> i32 { 50 | match err { 51 | Error::Config { source: _ } => 1, 52 | Error::Cmd { source: _ } => 2, 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | pub mod digest; 2 | pub mod dupes; 3 | pub mod file; 4 | pub mod pass; 5 | -------------------------------------------------------------------------------- /src/util/digest.rs: -------------------------------------------------------------------------------- 1 | use sha2::{Digest, Sha256}; 2 | use std::{io, path::Path}; 3 | 4 | const BUFFER_SIZE: usize = 1024; 5 | 6 | pub fn digest_file_sha256(file: &Path) -> Result { 7 | digest_file::(file) 8 | } 9 | 10 | pub fn digest_file(file: &Path) -> Result { 11 | log::debug!("Calculating hash for file {}", file.display()); 12 | std::fs::File::open(file).and_then(|mut f| digest::(&mut f)) 13 | } 14 | 15 | /// Compute digest value for given `Reader` and return it as hex string 16 | pub fn digest(reader: &mut R) -> Result { 17 | let mut sh = D::default(); 18 | let mut buffer = [0u8; BUFFER_SIZE]; 19 | loop { 20 | let n = match reader.read(&mut buffer) { 21 | Ok(n) => n, 22 | Err(_) => return Err(io::Error::new(io::ErrorKind::Other, "Could not read file")), 23 | }; 24 | sh.update(&buffer[..n]); 25 | if n == 0 || n < BUFFER_SIZE { 26 | break; 27 | } 28 | } 29 | Ok(hex::encode(&sh.finalize())) 30 | } 31 | -------------------------------------------------------------------------------- /src/util/dupes.rs: -------------------------------------------------------------------------------- 1 | use super::file; 2 | 3 | use std::collections::HashMap; 4 | 5 | pub struct Dupes { 6 | names: HashMap, 7 | } 8 | 9 | impl Dupes { 10 | pub fn new() -> Dupes { 11 | Dupes { 12 | names: HashMap::new(), 13 | } 14 | } 15 | 16 | pub fn use_name(&mut self, name: &str) -> (String, bool) { 17 | let fname = name.to_string(); 18 | match self.names.get(&fname) { 19 | Some(count) => { 20 | let next_name = file::splice_name(name, count); 21 | let next_count = count + 1; 22 | self.names.insert(fname.clone(), next_count); 23 | (next_name, true) 24 | } 25 | None => { 26 | self.names.insert(fname.clone(), 1); 27 | (fname, false) 28 | } 29 | } 30 | } 31 | 32 | pub fn is_empty(&self) -> bool { 33 | self.names.is_empty() 34 | } 35 | } 36 | 37 | #[cfg(test)] 38 | mod tests { 39 | 40 | use super::*; 41 | 42 | #[test] 43 | fn unit_dupes_add() { 44 | let mut dupes = Dupes::new(); 45 | assert_eq!(dupes.use_name("test.png"), ("test.png".into(), false)); 46 | assert_eq!(dupes.use_name("test.png"), ("test_1.png".into(), true)); 47 | assert_eq!(dupes.use_name("test.png"), ("test_2.png".into(), true)); 48 | assert_eq!(dupes.use_name("test.png"), ("test_3.png".into(), true)); 49 | assert_eq!(dupes.use_name("test.jpg"), ("test.jpg".into(), false)); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/util/file.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use crate::cli::opts::FileAction; 4 | use snafu::{ResultExt, Snafu}; 5 | use std::io; 6 | 7 | /// Puts `suffix` in the filename before the extension. 8 | pub fn splice_name(fname: &str, suffix: &i32) -> String { 9 | let p = PathBuf::from(fname); 10 | 11 | match p.extension() { 12 | Some(ext) => { 13 | let mut base = fname.trim_end_matches(ext.to_str().unwrap()).chars(); 14 | base.next_back(); 15 | format!("{}_{}.{}", base.as_str(), suffix, ext.to_str().unwrap()) 16 | } 17 | None => format!("{}_{}", fname, suffix), 18 | } 19 | } 20 | 21 | fn delete_parent_if_empty(file: &Path, root: Option<&PathBuf>) -> Result<(), std::io::Error> { 22 | match (root, file.parent()) { 23 | (Some(r), Some(p)) => { 24 | if p != r && std::fs::read_dir(p)?.next().is_none() { 25 | std::fs::remove_dir(p) 26 | } else { 27 | Ok(()) 28 | } 29 | } 30 | _ => Ok(()), 31 | } 32 | } 33 | 34 | #[derive(Debug, Snafu)] 35 | pub enum CollectiveSubdirErr { 36 | #[snafu(display("The collective could not be deduced for {}. Make sure there is a subdirectory with the name of the collective below {}", file.display(), dir.display()))] 37 | NoSubdir { file: PathBuf, dir: PathBuf }, 38 | 39 | #[snafu(display("Could not strip path prefix: {}", source))] 40 | StripPrefix { source: std::path::StripPrefixError }, 41 | } 42 | pub fn collective_from_subdir( 43 | path: &Path, 44 | roots: &[PathBuf], 45 | ) -> Result, CollectiveSubdirErr> { 46 | let file = path.canonicalize().unwrap(); 47 | for dir in roots { 48 | let can_dir = dir.canonicalize().unwrap(); 49 | log::debug!("Check prefix {} -> {}", can_dir.display(), file.display()); 50 | if file.starts_with(&can_dir) { 51 | let rest = file.strip_prefix(&can_dir).context(StripPrefixSnafu)?; 52 | let mut components = rest.components(); 53 | let coll = components.next(); 54 | match components.next() { 55 | Some(_) => { 56 | log::debug!("Found collective: {:?}", &coll); 57 | return Ok(coll 58 | .and_then(|s| s.as_os_str().to_str()) 59 | .map(|s| s.to_string())); 60 | } 61 | None => { 62 | return Err(CollectiveSubdirErr::NoSubdir { 63 | file: file.clone(), 64 | dir: can_dir.clone(), 65 | }) 66 | } 67 | } 68 | } 69 | } 70 | Ok(None) 71 | } 72 | 73 | pub fn safe_filename(name: &str) -> String { 74 | sanitize_filename::sanitize_with_options( 75 | name, 76 | sanitize_filename::Options { 77 | replacement: "-", 78 | ..Default::default() 79 | }, 80 | ) 81 | } 82 | 83 | pub fn safe_filepath(name: &str, path_delimiter: &Option) -> String { 84 | let path_segments: Vec = match path_delimiter { 85 | Some(delimiter) => name.split(delimiter).map(safe_filename).collect(), 86 | None => vec![safe_filename(name)], 87 | }; 88 | path_segments.join("/") 89 | } 90 | 91 | #[cfg(windows)] 92 | pub fn symlink, Q: AsRef>(original: P, link: Q) -> io::Result<()> { 93 | std::os::windows::fs::symlink_dir(original, link) 94 | } 95 | 96 | #[cfg(unix)] 97 | pub fn symlink, Q: AsRef>(original: P, link: Q) -> io::Result<()> { 98 | std::os::unix::fs::symlink(original, link) 99 | } 100 | 101 | #[derive(Debug, Clone)] 102 | pub enum FileActionResult { 103 | Deleted(PathBuf), 104 | Moved(PathBuf), 105 | Nothing, 106 | } 107 | 108 | impl FileAction { 109 | pub fn execute( 110 | &self, 111 | file: &Path, 112 | root: Option<&PathBuf>, 113 | ) -> Result { 114 | match &self.move_to { 115 | Some(target) => Self::move_file(file, root, target).map(FileActionResult::Moved), 116 | None => { 117 | if self.delete { 118 | Self::delete_file(file, root) 119 | .map(|_| FileActionResult::Deleted(file.to_path_buf())) 120 | } else { 121 | Ok(FileActionResult::Nothing) 122 | } 123 | } 124 | } 125 | } 126 | 127 | fn move_file( 128 | file: &Path, 129 | root: Option<&PathBuf>, 130 | target: &Path, 131 | ) -> Result { 132 | let target_file = match root { 133 | Some(r) => { 134 | let part = file.strip_prefix(r).unwrap(); 135 | target.join(part) 136 | } 137 | None => target.join(file.file_name().unwrap()), 138 | }; 139 | log::debug!( 140 | "Move file '{}' -> '{}'", 141 | file.display(), 142 | &target_file.display() 143 | ); 144 | if let Some(parent) = &target_file.parent() { 145 | if !parent.exists() { 146 | std::fs::create_dir_all(parent)?; 147 | } 148 | } 149 | std::fs::rename(file, &target_file)?; 150 | // delete the parent when below root. if no root given, don't delete parent 151 | delete_parent_if_empty(file, root)?; 152 | Ok(target_file) 153 | } 154 | 155 | fn delete_file(file: &Path, root: Option<&PathBuf>) -> Result<(), std::io::Error> { 156 | log::debug!("Deleting file: {}", file.display()); 157 | std::fs::remove_file(file)?; 158 | delete_parent_if_empty(file, root)?; 159 | Ok(()) 160 | } 161 | } 162 | 163 | #[cfg(test)] 164 | mod tests { 165 | use super::*; 166 | 167 | #[test] 168 | fn unit_splice_name() { 169 | assert_eq!(splice_name("abc.pdf", &1), "abc_1.pdf"); 170 | assert_eq!(splice_name("abc", &1), "abc_1"); 171 | assert_eq!(splice_name("stuff.tar.gz", &2), "stuff.tar_2.gz"); 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /src/util/pass.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::process::Command; 3 | 4 | pub fn pass_password(entry: &str) -> Result { 5 | let content = pass_exec(entry)?; 6 | content 7 | .lines() 8 | .next() 9 | .map(String::from) 10 | .ok_or_else(|| io_err(&format!("No password found for entry: {}", entry))) 11 | } 12 | 13 | pub fn pass_key(entry: &str, key: &str) -> Result { 14 | let content = pass_exec(entry)?; 15 | let name = format!("{:}:", key); 16 | let line = content 17 | .lines() 18 | .find(|l| l.starts_with(&name)) 19 | .ok_or_else(|| io_err(&format!("No line found for key: {:?}", key)))?; 20 | 21 | let len = key.len() + 1; 22 | Ok((&line[len..]).trim().into()) 23 | } 24 | 25 | fn pass_exec(entry: &str) -> Result { 26 | log::debug!("Running external command `pass show {}`", entry); 27 | let output = Command::new("pass").arg("show").arg(entry).output()?; 28 | if !output.status.success() { 29 | let msg = String::from_utf8(output.stderr); 30 | log::warn!("pass exited with error {:}: {:?}", output.status, msg); 31 | Err(io_err(&format!( 32 | "Pass failed with an error ({:}): {}", 33 | output.status, 34 | msg.unwrap_or_else(|_| "no output".into()) 35 | ))) 36 | } else { 37 | String::from_utf8(output.stdout).map_err(|_| io_err("Error decoding bytes using utf8!")) 38 | } 39 | } 40 | 41 | fn io_err(msg: &str) -> io::Error { 42 | io::Error::new(io::ErrorKind::Other, msg) 43 | } 44 | -------------------------------------------------------------------------------- /tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | use assert_cmd::cargo::CargoError; 2 | use assert_cmd::prelude::*; 3 | use std::{io, process::Command}; 4 | 5 | #[derive(Debug)] 6 | pub enum Error { 7 | Cargo(CargoError), 8 | IO(io::Error), 9 | Json(serde_json::Error), 10 | } 11 | impl std::convert::From for Error { 12 | fn from(e: CargoError) -> Self { 13 | Error::Cargo(e) 14 | } 15 | } 16 | impl std::convert::From for Error { 17 | fn from(e: io::Error) -> Self { 18 | Error::IO(e) 19 | } 20 | } 21 | impl std::convert::From for Error { 22 | fn from(e: serde_json::Error) -> Self { 23 | Error::Json(e) 24 | } 25 | } 26 | 27 | pub type Result = std::result::Result; 28 | 29 | pub fn mk_cmd() -> Result { 30 | let mut cmd = Command::cargo_bin("dsc")?; 31 | cmd.arg("-c").arg("./ci/dsc-config.toml"); 32 | Ok(cmd) 33 | } 34 | -------------------------------------------------------------------------------- /tests/integration.rs: -------------------------------------------------------------------------------- 1 | mod common; 2 | 3 | use crate::common::{mk_cmd, Result}; 4 | use assert_cmd::prelude::*; 5 | use dsc::http::payload::{BasicResult, ItemDetail, SearchResult, SourceAndTags, Summary}; 6 | use std::fs; 7 | use std::{io::Write, path::Path, process::Command}; 8 | 9 | const ITEM_ID1: &str = "2wKtSUVt3Kj-mAmexmm1jFe-BU6aY6PN4vo-5cpaDD2EyRm"; 10 | const ITEM_ID2: &str = "J4wAkg3jxt5-7QaYXD1WTmF-gq4kGaS89RP-DnPyUwa77fK"; 11 | 12 | fn basic_result(success: bool, msg: &str) -> BasicResult { 13 | BasicResult { 14 | success, 15 | message: msg.into(), 16 | } 17 | } 18 | 19 | fn basic_result_json(success: bool, msg: &str) -> String { 20 | let res = basic_result(success, msg); 21 | serde_json::to_string(&res).unwrap() 22 | } 23 | 24 | #[test] 25 | fn dsc_help() -> Result<()> { 26 | let mut cmd = Command::cargo_bin("dsc")?; 27 | let assert = cmd.arg("--help").assert(); 28 | assert.success().stderr(""); 29 | Ok(()) 30 | } 31 | 32 | #[test] 33 | fn remote_version() -> Result<()> { 34 | let mut cmd = mk_cmd()?; 35 | let assert = cmd.arg("version").assert(); 36 | assert.success().stderr(""); 37 | Ok(()) 38 | } 39 | 40 | #[test] 41 | fn remote_register() -> Result<()> { 42 | let mut cmd = mk_cmd()?; 43 | let out = cmd 44 | .arg("register") 45 | .args(&["-c", "demo2", "-l", "demo2", "--password", "test"]) 46 | .output()?; 47 | 48 | let out: BasicResult = serde_json::from_slice(out.stdout.as_slice())?; 49 | assert!(out.success); 50 | Ok(()) 51 | } 52 | 53 | #[test] 54 | fn remote_upload_web() -> Result<()> { 55 | let mut cmd = mk_cmd()?; 56 | let assert = cmd.arg("upload").arg("README.md").assert(); 57 | assert 58 | .success() 59 | .stdout(basic_result_json(true, "Files submitted.")); 60 | Ok(()) 61 | } 62 | 63 | #[test] 64 | fn remote_upload_single_delete() -> Result<()> { 65 | let testname = "hello.txt"; 66 | let testpath = Path::new(testname); 67 | let mut testfile = std::fs::File::create(&testpath)?; 68 | writeln!(&mut testfile, "hello world!")?; 69 | 70 | assert!(testpath.exists()); 71 | 72 | let mut cmd = mk_cmd()?; 73 | let assert = cmd.arg("upload").arg("--delete").arg("hello.txt").assert(); 74 | assert 75 | .success() 76 | .stdout(basic_result_json(true, "Files submitted.")); 77 | 78 | assert!(!testpath.exists()); 79 | Ok(()) 80 | } 81 | 82 | #[test] 83 | fn remote_upload_int_endpoint() -> Result<()> { 84 | let mut cmd = mk_cmd()?; 85 | let assert = cmd 86 | .arg("upload") 87 | .args(&[ 88 | "-c", 89 | "demo", 90 | "-i", 91 | "--header", 92 | "Docspell-Integration:test123", 93 | ]) 94 | .arg("README.md") 95 | .assert(); 96 | assert 97 | .success() 98 | .stdout(basic_result_json(true, "Files submitted.")); 99 | Ok(()) 100 | } 101 | 102 | #[test] 103 | fn remote_upload_int_endpoint_guess_collective() -> Result<()> { 104 | let base = std::path::Path::new("target/test_remote_upload"); 105 | if base.exists() { 106 | fs::remove_dir_all(base)?; 107 | } 108 | 109 | let demo = base.join("demo"); 110 | std::fs::create_dir_all(demo.clone())?; 111 | std::fs::copy("README.md", demo.join("README.md"))?; 112 | 113 | let mut cmd = mk_cmd()?; 114 | let assert = cmd 115 | .arg("upload") 116 | .args(&[ 117 | "-i", 118 | "--header", 119 | "Docspell-Integration:test123", 120 | "--traverse", 121 | ]) 122 | .arg("target/test_remote_upload") 123 | .assert(); 124 | assert 125 | .success() 126 | .stdout(basic_result_json(true, "Uploaded 1")); 127 | Ok(()) 128 | } 129 | 130 | #[test] 131 | fn remote_source_list() -> Result<()> { 132 | let mut cmd = mk_cmd()?; 133 | let out = cmd.arg("source").arg("list").output()?; 134 | 135 | let out: Vec = serde_json::from_slice(out.stdout.as_slice())?; 136 | assert_eq!(out.len(), 1); 137 | assert_eq!( 138 | out[0].source.id, 139 | "FcVZWHAgfFD-MdYCm3qWTyX-a7hcbVhsgKG-FG9DwArw9eQ" 140 | ); 141 | Ok(()) 142 | } 143 | 144 | #[test] 145 | fn remote_source_list_filter_id() -> Result<()> { 146 | let mut cmd = mk_cmd()?; 147 | let out = cmd 148 | .arg("source") 149 | .arg("list") 150 | .arg("--id") 151 | .arg("FcVZ") 152 | .output()?; 153 | 154 | let out: Vec = serde_json::from_slice(out.stdout.as_slice())?; 155 | assert_eq!(out.len(), 1); 156 | assert_eq!( 157 | out[0].source.id, 158 | "FcVZWHAgfFD-MdYCm3qWTyX-a7hcbVhsgKG-FG9DwArw9eQ" 159 | ); 160 | Ok(()) 161 | } 162 | 163 | #[test] 164 | fn remote_source_list_filter_id_neg() -> Result<()> { 165 | let mut cmd = mk_cmd()?; 166 | let out = cmd 167 | .arg("source") 168 | .arg("list") 169 | .arg("--id") 170 | .arg("xyz") 171 | .output()?; 172 | 173 | let out: Vec = serde_json::from_slice(out.stdout.as_slice())?; 174 | assert_eq!(out.len(), 0); 175 | Ok(()) 176 | } 177 | 178 | #[test] 179 | fn remote_source_list_filter_name() -> Result<()> { 180 | let mut cmd = mk_cmd()?; 181 | let out = cmd 182 | .arg("source") 183 | .arg("list") 184 | .arg("--name") 185 | .arg("test") 186 | .output()?; 187 | 188 | let out: Vec = serde_json::from_slice(out.stdout.as_slice())?; 189 | assert_eq!(out.len(), 1); 190 | assert_eq!( 191 | out[0].source.id, 192 | "FcVZWHAgfFD-MdYCm3qWTyX-a7hcbVhsgKG-FG9DwArw9eQ" 193 | ); 194 | Ok(()) 195 | } 196 | 197 | #[test] 198 | fn remote_source_list_filter_name_neg() -> Result<()> { 199 | let mut cmd = mk_cmd()?; 200 | let out = cmd 201 | .arg("source") 202 | .arg("list") 203 | .arg("--name") 204 | .arg("xyz") 205 | .output()?; 206 | 207 | let out: Vec = serde_json::from_slice(out.stdout.as_slice())?; 208 | assert_eq!(out.len(), 0); 209 | Ok(()) 210 | } 211 | 212 | #[test] 213 | fn remote_search_1() -> Result<()> { 214 | let mut cmd = mk_cmd()?; 215 | let out = cmd.arg("search").arg("name:*").output()?; 216 | 217 | let out: SearchResult = serde_json::from_slice(out.stdout.as_slice())?; 218 | assert_eq!(out.groups.len(), 2); 219 | assert_eq!(out.groups[0].name, "2019-09"); 220 | assert_eq!(out.groups[1].name, "2016-01"); 221 | Ok(()) 222 | } 223 | 224 | #[test] 225 | fn remote_search_2() -> Result<()> { 226 | let mut cmd = mk_cmd()?; 227 | let out = cmd.arg("search").arg("corr:pancake*").output()?; 228 | 229 | let res: SearchResult = serde_json::from_slice(out.stdout.as_slice())?; 230 | assert_eq!(res.groups.len(), 1); 231 | assert_eq!(res.groups[0].name, "2019-09"); 232 | Ok(()) 233 | } 234 | 235 | #[test] 236 | fn remote_upload_source() -> Result<()> { 237 | let mut cmd = mk_cmd()?; 238 | let assert = cmd 239 | .arg("upload") 240 | .arg("--source") 241 | .arg("FcVZWHAgfFD-MdYCm3qWTyX-a7hcbVhsgKG-FG9DwArw9eQ") 242 | .arg("README.md") 243 | .assert(); 244 | assert.success(); 245 | Ok(()) 246 | } 247 | 248 | #[test] 249 | fn remote_search_summary() -> Result<()> { 250 | let mut cmd = mk_cmd()?; 251 | let out = cmd.arg("search-summary").arg("name:*").output()?; 252 | 253 | let res: Summary = serde_json::from_slice(out.stdout.as_slice())?; 254 | 255 | assert_eq!(res.count, 2); 256 | assert_eq!(res.tag_cloud.items.len(), 5); 257 | assert_eq!(res.tag_category_cloud.items.len(), 2); 258 | assert_eq!(res.field_stats.len(), 2); 259 | Ok(()) 260 | } 261 | 262 | #[test] 263 | fn remote_download() -> Result<()> { 264 | let mut cmd = mk_cmd()?; 265 | let out = cmd 266 | .arg("download") 267 | .arg("--target") 268 | .arg("files_test") 269 | .arg("date Result<()> { 282 | let mut cmd = mk_cmd()?; 283 | let out = cmd 284 | .arg("download") 285 | .arg("--target") 286 | .arg("zip_test/test.zip") 287 | .arg("--zip") 288 | .arg("date Result<()> { 301 | let mut cmd = mk_cmd()?; 302 | let out = cmd.arg("admin").arg("convert-all-pdfs").assert(); 303 | 304 | out.success() 305 | .stderr("") 306 | .stdout(basic_result_json(true, "Convert all PDFs task submitted")); 307 | Ok(()) 308 | } 309 | 310 | #[test] 311 | fn remote_item_get() -> Result<()> { 312 | let mut cmd = mk_cmd()?; 313 | let out = cmd.arg("item").arg("get").arg(&ITEM_ID2[0..7]).output()?; 314 | let item: ItemDetail = serde_json::from_slice(out.stdout.as_slice())?; 315 | out.assert().success().stderr(""); 316 | 317 | assert_eq!(item.name, "wordpress-pdf-invoice-plugin-sample.pdf"); 318 | 319 | let tag_names: Vec = item.tags.into_iter().map(|t| t.name).collect(); 320 | assert_eq!(tag_names, vec!["Invoice", "Todo"]); 321 | Ok(()) 322 | } 323 | 324 | #[test] 325 | fn remote_item_tags_add() -> Result<()> { 326 | let mut cmd = mk_cmd()?; 327 | let out = cmd 328 | .arg("item") 329 | .arg("tags") 330 | .arg("--id") 331 | .arg(ITEM_ID1) 332 | .arg("--add") 333 | .arg("todo") 334 | .assert(); 335 | 336 | out.success() 337 | .stderr("") 338 | .stdout(basic_result_json(true, "Tags linked")); 339 | 340 | remote_item_tags_remove()?; 341 | Ok(()) 342 | } 343 | fn remote_item_tags_remove() -> Result<()> { 344 | let mut cmd = mk_cmd()?; 345 | let out = cmd 346 | .arg("item") 347 | .arg("tags") 348 | .arg("--id") 349 | .arg(ITEM_ID1) 350 | .arg("--remove") 351 | .arg("todo") 352 | .assert(); 353 | 354 | out.success() 355 | .stderr("") 356 | .stdout(basic_result_json(true, "Tags removed")); 357 | Ok(()) 358 | } 359 | 360 | #[test] 361 | fn remote_item_tags_replace() -> Result<()> { 362 | let mut cmd = mk_cmd()?; 363 | let out = cmd 364 | .arg("item") 365 | .arg("tags") 366 | .arg("--id") 367 | .arg(ITEM_ID1) 368 | .arg("--replace") 369 | .arg("invitation") 370 | .assert(); 371 | 372 | out.success() 373 | .stderr("") 374 | .stdout(basic_result_json(true, "Tags updated")); 375 | Ok(()) 376 | } 377 | 378 | #[test] 379 | fn remote_item_fields_set() -> Result<()> { 380 | let mut cmd = mk_cmd()?; 381 | let out = cmd 382 | .arg("item") 383 | .arg("fields") 384 | .arg("--id") 385 | .arg(&ITEM_ID1[0..7]) 386 | .arg("--name") 387 | .arg("eur") 388 | .arg("--set") 389 | .arg("12.50") 390 | .assert(); 391 | 392 | out.success().stderr("").stdout(basic_result_json( 393 | true, 394 | "Custom field value set successfully.", 395 | )); 396 | 397 | remote_item_fields_remove()?; 398 | Ok(()) 399 | } 400 | fn remote_item_fields_remove() -> Result<()> { 401 | let mut cmd = mk_cmd()?; 402 | let out = cmd 403 | .arg("item") 404 | .arg("fields") 405 | .arg("--id") 406 | .arg(ITEM_ID1) 407 | .arg("--name") 408 | .arg("eur") 409 | .arg("--remove") 410 | .assert(); 411 | 412 | out.success() 413 | .stderr("") 414 | .stdout(basic_result_json(true, "Custom field value removed.")); 415 | Ok(()) 416 | } 417 | 418 | #[test] 419 | fn remote_item_fields_set_bad_value() -> Result<()> { 420 | let mut cmd = mk_cmd()?; 421 | let out = cmd 422 | .arg("item") 423 | .arg("fields") 424 | .arg("--id") 425 | .arg(ITEM_ID1) 426 | .arg("--name") 427 | .arg("eur") 428 | .arg("--set") 429 | .arg("xyz") 430 | .assert(); 431 | 432 | out.success().stderr("").stdout(basic_result_json( 433 | false, 434 | "The value is invalid: Could not parse decimal value from: xyz", 435 | )); 436 | Ok(()) 437 | } 438 | -------------------------------------------------------------------------------- /tests/login.rs: -------------------------------------------------------------------------------- 1 | mod common; 2 | use crate::common::{mk_cmd, Result}; 3 | use assert_cmd::prelude::*; 4 | 5 | #[test] 6 | fn remote_login() -> Result<()> { 7 | let mut cmd = mk_cmd()?; 8 | let assert = cmd.arg("login").args(&["--password", "test"]).assert(); 9 | assert.success().stderr(""); 10 | Ok(()) 11 | } 12 | --------------------------------------------------------------------------------