├── .github └── workflows │ ├── create-release.yml │ ├── docs.yml │ ├── main.yml │ ├── publish-docker.yml │ └── publish-to-pypi.yml ├── .gitignore ├── .isort.cfg ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── README.md ├── config.example.toml ├── docker-entrypoint.sh ├── docs ├── cli.md ├── index.md └── usage.md ├── mkdocs.yml ├── pyinstaller ├── at2.py └── autotorrent.spec ├── pyproject.toml ├── pytest.ini ├── setup.cfg ├── src └── autotorrent │ ├── __init__.py │ ├── __main__.py │ ├── __version__.py │ ├── db.py │ ├── exceptions.py │ ├── indexer.py │ ├── matcher.py │ ├── rw_cache.py │ └── utils.py └── tests ├── __init__.py ├── fixtures.py ├── test_cli.py ├── test_rm_clients.py ├── test_scan_add_torrents.py ├── test_scan_ls_clients.py ├── test_utils_hash.py └── testfiles ├── My-Bluray.torrent ├── My-Bluray └── BDMV │ ├── BACKUP │ ├── MovieObject.bdmv │ ├── PLAYLIST │ │ └── 00000.mpls │ └── index.bdmv │ ├── MovieObject.bdmv │ ├── PLAYLIST │ └── 00000.mpls │ ├── STREAM │ └── 00000.m2ts │ └── index.bdmv ├── My-DVD.torrent ├── My-DVD └── VIDEO_TS │ ├── VIDEO_TS.BUP │ ├── VIDEO_TS.IFO │ ├── VTS_01_0.BUP │ ├── VTS_01_0.IFO │ ├── VTS_01_0.VOB │ └── VTS_01_1.VOB ├── Some-CD-Release.torrent ├── Some-CD-Release ├── CD1 │ ├── somestuff-1.r00 │ ├── somestuff-1.r01 │ ├── somestuff-1.r02 │ ├── somestuff-1.r03 │ ├── somestuff-1.r04 │ ├── somestuff-1.r05 │ ├── somestuff-1.r06 │ ├── somestuff-1.rar │ └── somestuff-1.sfv ├── CD2 │ ├── somestuff-2.r00 │ ├── somestuff-2.r01 │ ├── somestuff-2.r02 │ ├── somestuff-2.r03 │ ├── somestuff-2.r04 │ ├── somestuff-2.r05 │ ├── somestuff-2.r06 │ ├── somestuff-2.r07 │ ├── somestuff-2.rar │ └── somestuff-2.sfv ├── Sample │ └── some-rls.mkv ├── Subs │ ├── somestuff-subs.r00 │ ├── somestuff-subs.rar │ └── somestuff-subs.sfv └── crap.nfo ├── Some-Release [test].torrent ├── Some-Release.torrent ├── Some-Release ├── Sample │ └── some-rls.mkv ├── Subs │ ├── some-subs.rar │ └── some-subs.sfv ├── some-rls.nfo ├── some-rls.r00 ├── some-rls.r01 ├── some-rls.r02 ├── some-rls.r03 ├── some-rls.r04 ├── some-rls.r05 ├── some-rls.r06 ├── some-rls.rar └── some-rls.sfv ├── file_a.txt ├── file_b.txt ├── file_c.txt ├── folder-does-not-exist.torrent ├── test-unsplitable-normal.torrent ├── test.torrent └── test_single.torrent /.github/workflows/create-release.yml: -------------------------------------------------------------------------------- 1 | name: Build artifacts and create release 2 | 3 | permissions: 4 | contents: write 5 | 6 | on: 7 | push: 8 | tags: 9 | - 1.* 10 | 11 | workflow_dispatch: 12 | 13 | jobs: 14 | build-pyinstaller: 15 | runs-on: ${{ matrix.os }} 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: ['windows-latest', 'ubuntu-latest', 'macos-latest'] 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | - uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.11' 26 | 27 | - run: pip install . pyinstaller 28 | - run: pyinstaller pyinstaller/autotorrent.spec 29 | - run: cd dist && tar -czvf ../pyinstaller-macos.tar.gz * 30 | if: ${{ matrix.os == 'macos-latest' }} 31 | - run: cd dist && tar -czvf ../pyinstaller-linux.tar.gz * 32 | if: ${{ matrix.os == 'ubuntu-latest' }} 33 | - run: cd dist && 7z a ../pyinstaller-windows.zip * 34 | if: ${{ matrix.os == 'windows-latest' }} 35 | - uses: actions/upload-artifact@v4 36 | with: 37 | name: pyinstaller-${{ matrix.os }} 38 | path: pyinstaller-*.* 39 | 40 | build-shiv: 41 | runs-on: ubuntu-latest 42 | 43 | steps: 44 | - uses: actions/checkout@v4 45 | - uses: actions/setup-python@v5 46 | with: 47 | python-version: '3.11' 48 | 49 | - run: pip install shiv 50 | - run: mkdir dist 51 | - run: shiv -c at2 -o dist/at2.pyz autotorrent2 52 | - uses: actions/upload-artifact@v4 53 | with: 54 | name: zipapp-shiv 55 | path: dist/at2.pyz 56 | 57 | create-release: 58 | runs-on: ubuntu-latest 59 | needs: [build-pyinstaller, build-shiv] 60 | steps: 61 | - name: Download 62 | uses: actions/download-artifact@v4 63 | with: 64 | merge-multiple: true 65 | - name: Display structure of downloaded files 66 | run: ls -R 67 | - name: Release 68 | uses: softprops/action-gh-release@v1 69 | with: 70 | body: TODO 71 | files: | 72 | at2.pyz 73 | pyinstaller-linux.tar.gz 74 | pyinstaller-macos.tar.gz 75 | pyinstaller-windows.zip 76 | env: 77 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: Docs 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the master branch 8 | push: 9 | tags: 10 | - 1.* 11 | 12 | # Allows you to run this workflow manually from the Actions tab 13 | workflow_dispatch: 14 | 15 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 16 | jobs: 17 | # This workflow contains a single job called "build" 18 | deploy: 19 | # The type of runner that the job will run on 20 | runs-on: ubuntu-latest 21 | 22 | permissions: 23 | contents: write 24 | 25 | # Steps represent a sequence of tasks that will be executed as part of the job 26 | steps: 27 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 28 | - uses: actions/checkout@v4 29 | 30 | - name: Setup Python 31 | uses: actions/setup-python@v5 32 | with: 33 | python-version: "3.12" 34 | 35 | - name: Install project 36 | run: | 37 | python -m pip install .[docs] 38 | 39 | - name: Deploy docs 40 | run: mkdocs gh-deploy --force 41 | 42 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: Tests 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the master branch 8 | push: 9 | branches: [ master ] 10 | pull_request: 11 | 12 | # Allows you to run this workflow manually from the Actions tab 13 | workflow_dispatch: 14 | 15 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 16 | jobs: 17 | # This workflow contains a single job called "build" 18 | build: 19 | # The type of runner that the job will run on 20 | runs-on: ubuntu-latest 21 | 22 | # Steps represent a sequence of tasks that will be executed as part of the job 23 | steps: 24 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 25 | - uses: actions/checkout@v4 26 | 27 | - name: Setup Python 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: "3.12" 31 | 32 | - name: Install project 33 | run: | 34 | python -m pip install .[test] 35 | 36 | - name: Run tests 37 | run: python -m pytest . 38 | -------------------------------------------------------------------------------- /.github/workflows/publish-docker.yml: -------------------------------------------------------------------------------- 1 | name: Publish docker image 2 | 3 | permissions: 4 | contents: read 5 | packages: write 6 | 7 | env: 8 | REGISTRY: ghcr.io 9 | IMAGE_NAME: ${{ github.repository }} 10 | 11 | on: 12 | # Triggers the workflow on push or pull request events but only for the master branch 13 | push: 14 | tags: 15 | - 1.* 16 | branches: 17 | - master 18 | 19 | 20 | # Allows you to run this workflow manually from the Actions tab 21 | workflow_dispatch: 22 | 23 | jobs: 24 | build-and-push-image: 25 | runs-on: ubuntu-latest 26 | 27 | permissions: 28 | contents: read 29 | packages: write 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v4 34 | - name: Log in to the Container registry 35 | uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 36 | with: 37 | registry: ${{ env.REGISTRY }} 38 | username: ${{ github.actor }} 39 | password: ${{ secrets.GITHUB_TOKEN }} 40 | 41 | - name: Extract metadata (tags, labels) for Docker 42 | id: meta 43 | uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 44 | with: 45 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 46 | 47 | - name: Build and push Docker image 48 | uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 49 | with: 50 | context: . 51 | push: true 52 | tags: ${{ steps.meta.outputs.tags }} 53 | labels: ${{ steps.meta.outputs.labels }} -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish to pypi 2 | on: 3 | # Triggers the workflow on push or pull request events but only for the master branch 4 | push: 5 | tags: 6 | - 1.* 7 | 8 | # Allows you to run this workflow manually from the Actions tab 9 | workflow_dispatch: 10 | 11 | jobs: 12 | build-and-publish: 13 | name: Build and publish Python 14 | runs-on: ubuntu-latest 15 | environment: 16 | name: autotorrent2 17 | url: https://pypi.org/p/autotorrent2 18 | permissions: 19 | id-token: write 20 | contents: read 21 | steps: 22 | - uses: actions/checkout@v4 23 | - uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.11' 26 | - name: Install pypa/build 27 | run: >- 28 | python -m 29 | pip install 30 | build 31 | - name: Build packages 32 | run: >- 33 | python -m 34 | build 35 | --sdist 36 | --wheel 37 | --outdir dist/ 38 | - name: Publish distribution 39 | uses: pypa/gh-action-pypi-publish@release/v1 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.mo 2 | *.egg-info 3 | *.egg 4 | *.EGG 5 | *.EGG-INFO 6 | bin 7 | build 8 | develop-eggs 9 | downloads 10 | eggs 11 | fake-eggs 12 | parts 13 | dist 14 | .installed.cfg 15 | .mr.developer.cfg 16 | .hg 17 | .bzr 18 | .svn 19 | *.pyc 20 | *.pyo 21 | *.tmp* 22 | dropin.cache 23 | _trial_temp 24 | *.komodoproject 25 | docs/_build* 26 | .env* 27 | autotorrent.conf 28 | .coverage 29 | autotorrent.db -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | multi_line_output=3 3 | include_trailing_comma=True 4 | force_grid_wrap=0 5 | use_parentheses=True 6 | line_length=88 7 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [Unreleased] - 4 | 5 | ### Add 6 | 7 | - Way to find torrents in a client not seeded from specified paths 8 | - Hardlink (inode) support for several commands and features 9 | - Query support for several commands that interact with already seeding torrents 10 | 11 | ### Bugfix 12 | 13 | - It is now possible to scan single files (again?) #56 14 | 15 | ## [1.3.0] - 2024-02-17 16 | 17 | ### Add 18 | 19 | - Dockerfile and docker support #51 (thanks kannibalox) 20 | 21 | ### Change 22 | 23 | - Moved to os.scanfile for more efficient disk scanning #45 (thanks kannibalox) 24 | - Using prefix tree when inserting into sqlite for more efficiency #41 (thanks kannibalox) 25 | 26 | ### Bugfix 27 | 28 | - reflink never actually worked #40 (Thanks undrog) 29 | - Using prefix tree when inserting into sqlite for more efficiency #41 (thanks kannibalox) 30 | 31 | ## [1.2.3] - 2022-08-13 32 | 33 | ### Add 34 | 35 | - Possible to disable metadata in store path #32 36 | 37 | ### Change 38 | 39 | - Exception logger on failed torrent parse #36 40 | - Bumped libtc version to qbittorrent tag support version #33 41 | 42 | ### Bugfix 43 | 44 | - Scan stalls when an exception occures #34 45 | - Making sure all trackers are bytes, fixing #48 46 | 47 | ## [1.2.2] - 2022-08-06 48 | 49 | ### Bugfix 50 | 51 | - Announce-list was not parsed correctly #31 52 | 53 | ## [1.2.1] - 2022-07-30 54 | 55 | ### Add 56 | 57 | - Support for more custom variables used in store_path, both from torrent and cli #28 58 | 59 | ### Change 60 | 61 | - Verifying store_path has at least one variable as it otherwise will use weird paths #12 62 | 63 | ### Bugfix 64 | 65 | - Catching permission error, not-a-directory error on add related to store_path #12 66 | 67 | ## [1.2.0] - 2022-06-18 68 | 69 | ### Add 70 | 71 | - Torrents can be added in stopped state via CLI flag #20 72 | 73 | ### Change 74 | 75 | - Unsplitable algorithm improved to support more use-cases 76 | - Renamed OK to Added to streamline messages #23 77 | - More information shared on failed to add exception #21 78 | 79 | ### Bugfix 80 | 81 | - Symlinks are now not resolved when adding to client (updated libtc) #17 82 | 83 | ## [1.1.0] - 2022-06-07 84 | ### Add 85 | 86 | - Option to ignore directory patterns during scan #18 87 | 88 | ### Change 89 | 90 | - fast_resume is now set to false default everywhere as it can cause problems #15 91 | - Disk scan now threaded with a pipeline design #14 92 | 93 | ## [1.0.3] - 2022-06-04 94 | ### Added 95 | 96 | - Option to ignore file patterns during scan #7 97 | 98 | ### Bugfix 99 | 100 | - Bumped libtc version to resolve qBittorrent issues #9 101 | 102 | ## [1.0.2] - 2022-06-01 103 | ### Added 104 | 105 | - Default config where possible 106 | - Updated libtc version to support labels 107 | 108 | ## [1.0.1] - 2022-05-30 109 | ### Bugfix 110 | 111 | - Made `same_paths` config option optional #4 112 | - Fixed problem with torrents that might make add crash #2 113 | (empty path segments because of encoding) 114 | - Hardlinks now working and compatible with Python 3.7 incl. tests for it. 115 | 116 | ## [1.0.0] - 2022-05-29 117 | ### Added 118 | 119 | - Initial release 120 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 2 | 3 | RUN python3 -m venv /app/autotorrent 4 | 5 | ENV PATH=/app/autotorrent/bin/:$PATH 6 | ENV HOME=/app/autotorrent 7 | WORKDIR /app/autotorrent 8 | 9 | RUN curl -sSL https://github.com/bcicen/tinycron/releases/download/v0.4/tinycron-0.4-linux-amd64 > /usr/local/bin/tinycron && chmod +x /usr/local/bin/tinycron 10 | 11 | COPY ./docker-entrypoint.sh /opt/docker-entrypoint.sh 12 | ENTRYPOINT ["/opt/docker-entrypoint.sh"] 13 | COPY . /tmp/autotorrent/ 14 | RUN pip install --no-cache-dir /tmp/autotorrent/ && rm -r /tmp/autotorrent/ 15 | 16 | ## Uncomment to install from pypi 17 | # RUN /app/autotorrent/bin/pip install autotorrent2 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (C) 2022 Anders Jensen 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Autotorrent2 2 | 3 | ![Test result](https://github.com/JohnDoee/autotorrent2/actions/workflows/main.yml/badge.svg?branch=master) 4 | 5 | Match torrents and data, remove torrents based on data, cleanup your disk for unseeded files. 6 | Autotorrent2 does everything you currently miss in your flow. 7 | 8 | ## Supported 9 | 10 | * Torrent clients: rtorrent, Deluge, Transmission and qBittorrent 11 | * OS: Any, but only tested on linux 12 | * Python: 3.7+ are the only tested versions, might work with lower 3.x. 13 | 14 | ## Quickstart guide 15 | 16 | Install Autotorrent2 17 | 18 | ```bash 19 | python3 -m venv ~/.autotorrent # Create virtual environment where we install autotorrent2 20 | ~/.autotorrent/bin/pip install autotorrent2 # Actually install autotorrent2 21 | 22 | # Optional, add at2 to your commandline 23 | echo "alias at2=~/.autotorrent/bin/at2" >> ~/.bashrc 24 | source ~/.bashrc 25 | ``` 26 | 27 | The first time Autotorrent2 is run without a known config file, it will create a base config file. 28 | 29 | ```bash 30 | at2 check-config 31 | ``` 32 | 33 | The default location is ~/.config/autotorrent/config.toml - edit it to match your setup. 34 | See the example configuration file for setting description [found here](config.example.toml). 35 | 36 | Test the connections and see if it can connect to all your configured clients. 37 | 38 | ```bash 39 | at2 test-connection 40 | ``` 41 | 42 | Now you are ready to actually use it, check out the [Usage page for fun things to do](https://johndoee.github.io/autotorrent2/usage/) and [CLI page for featurelist](https://johndoee.github.io/autotorrent2/cli/) 43 | 44 | ## Note about running autotorrent2 in a script 45 | 46 | It can be fun to run scripts automatically and see cross-seeding just happen. 47 | Autotorrent2 is not really designed for multiple instances at once so it is recommenced to use a lock to prevent this. 48 | 49 | Instead of just `at2` then use e.g. `flock ~/.autotorrent.lock -c 'at2'` which prevents multiple instances of Autototrrent2 at once. 50 | 51 | ## Note about Docker 52 | 53 | If you use Autotorrent2 in a docker container or with a torrent client running in docker then the recommendation is to align the paths. 54 | If your torrent data is located in /mnt/data outside docker then you should map it similarly inside the docker container and you will save yourself from a lot of headaches. 55 | 56 | Personally I recommend mounting your data source as read-only because bittorrent clients are write-happy that might corrupt your data. 57 | 58 | ## Docker usage 59 | 60 | There is a docker image published automatically now. 61 | 62 | Basic usage: 63 | 64 | ```bash 65 | docker run -ti --rm -v ${PWD}/autotorrent.db:autotorrent.db -v ${PWD}/config.toml:config.toml ghcr.io/johndoee/autotorrent2:master check-config 66 | ``` 67 | 68 | Cron usage, check config every 5 minute. 69 | 70 | ```bash 71 | docker run -ti --rm -v ${PWD}/autotorrent.db:autotorrent.db -v ${PWD}/config.toml:config.toml ghcr.io/johndoee/autotorrent2:master cron '*/5 * * * *' check-config 72 | ``` 73 | 74 | ## Todo 75 | 76 | Assortment of stuff that is planned. 77 | 78 | - [ ] When Autotorrent2 is working on a task, e.g. copying a file to cache, then it might look like as it is stalled. An indicator should be added. 79 | - [ ] Client normalization indexing, e.g. index based on how transmission and qbittorrent handles problematic filenames 80 | - [ ] Torrent discovery for a torrent site. 81 | 82 | ## Known bugs 83 | 84 | Assortment of stuff I am not sure I can do much about. 85 | 86 | - [ ] Transmission (3.x) does not parse all emojis correctly and will return the wrong filename. [This is fixed in Transmission 4.0.2.](https://github.com/transmission/transmission/pull/5096) 87 | 88 | ## License 89 | 90 | MIT -------------------------------------------------------------------------------- /config.example.toml: -------------------------------------------------------------------------------- 1 | # Autotorrent specific settings. 2 | [autotorrent] 3 | 4 | # Path to the internally used sqlite3 database. 5 | # The path is relative to the config.toml if it is relative. 6 | database_path = "./autotorrent.db" 7 | 8 | # Link type to use. 9 | # Choices: 10 | # soft - use soft links. 11 | # hard - use hard links, can only be used on same device. 12 | # reflink - use reflink, not supported on all filesystems. 13 | link_type = "soft" 14 | 15 | # List of files to always verify hash on, can be used for files 16 | # that might have the same size but often vary. 17 | # It is case-sensitive to some extend, see https://docs.python.org/3/library/fnmatch.html for syntax and description 18 | always_verify_hash = [ 19 | "*.nfo", 20 | "*.sfv", 21 | "*.diz", 22 | ] 23 | 24 | # List of paths to scan when running at2 scan 25 | paths = [ 26 | "/mnt/sd1/anime/", 27 | "/mnt/sd1/tv/", 28 | "/mnt/sd*/random/" 29 | ] 30 | 31 | # Paths that are the same but mounted different places, useful for e.g. rar2fs. 32 | # There is no need to include both paths in the path config. 33 | same_paths = [ 34 | ["/mnt/sd1/tv/", "/mnt/sd1/rar2fs/"] 35 | ] 36 | 37 | ## The add_limit settings will choose the smallest one, e.g. 38 | ## 1GB torrent will be 5% which is 53MB 39 | ## 10GB torrent will be 128MB because 5% is 530MB 40 | 41 | # Maximum number of bytes missing when matching data and not doing an exact match 42 | add_limit_size = 128_000_000 43 | 44 | # Maximum number of percent missing when matching data and not doing an exact match 45 | add_limit_percent = 5 46 | 47 | # Where to store links, supports the following variables: 48 | # client - the key found in the [clients] section, e.g. 'the-transmission' for [clients.the-transmission] 49 | # torrent_name - torrent filename, e.g. /path/to/a.good.file.torrent is a.good.file 50 | # torrent_source - source key from torrent, will throw an error if it does not exist in torrent 51 | # tracker_domain - base domain for the tracker 52 | store_path = "/mnt/store_path/{client}/{torrent_name}" 53 | 54 | # Skip various metadata store in the store_folder when creating links 55 | # Disabling this makes it impossible for autotorrent to trace back what a link folder contains 56 | skip_store_metadata = false 57 | 58 | # Cache files that are touched, useful with e.g. if the source is a read-only filesystem. 59 | # Torrent clients need write access to the source data if pieces are missing that overlap with existing files. 60 | cache_touched_files = false 61 | 62 | # Set owner and group for a path after links are created 63 | # ONLY UNIX 64 | # rw_file_cache_chown = "1000:1000" 65 | 66 | ## Config settings for the RW cache if enabled 67 | 68 | # TTL in seconds for files in the cache, i.e. it'll be deleted from the cache after X time 69 | # and the links to files in the cache are replaced with links to the original file 70 | rw_file_cache_ttl = 86400 71 | 72 | # Path to the cache 73 | rw_file_cache_path = "/mnt/store_path/cache" 74 | 75 | # Tell client to fast-resume, not supported in all clients or in all situations. 76 | # WARNING: setting fast_resume to true can cause errors and problems. 77 | fast_resume = false 78 | 79 | # List of fnmatch patterns to ignore when scanning local data and matching against torrent. 80 | # The patterns are only used doing "at2 scan" and "at add". They are only matched against the filename. 81 | # It is case-sensitive to some extend, see https://docs.python.org/3/library/fnmatch.html for syntax and description 82 | ignore_file_patterns = [ 83 | "*.png" 84 | ] 85 | 86 | # List of fnmatch patterns to ignore when scanning local data and matching against torrent. 87 | # The patterns are only used doing "at2 scan" and "at add". They are only matched against the directory name. 88 | # Unlike file matching, it is NOT case-sensitive, see https://docs.python.org/3/library/fnmatch.html for syntax and description 89 | ignore_directory_patterns = [ 90 | ".*" 91 | ] 92 | 93 | # List of clients 94 | # See https://github.com/JohnDoee/libtc#config-file-syntax for syntax (or infer it from reading the examples) 95 | [clients] 96 | 97 | [clients.deluge] 98 | display_name = "A Deluge" 99 | client_type = "deluge" 100 | host = "127.0.0.1" 101 | port = 58846 102 | username = "localclient" 103 | password = "secretpassword" 104 | session_path = "~/.config/deluge/" 105 | label = "labelthis" 106 | 107 | [clients.the-transmission] 108 | display_name = "Some transmission" 109 | client_type = "transmission" 110 | url = "http://127.0.0.1:9091/transmission/rpc" 111 | session_path = "~/.config/transmission-daemon/" 112 | 113 | [clients.another-transmission] 114 | display_name = "Horse transmission" 115 | client_type = "transmission" 116 | url = "http://127.0.0.1:9092/transmission/rpc" 117 | session_path = "~/.config/transmission-daemon2/" 118 | 119 | [clients.rtorrent] 120 | display_name = "rtorrent" 121 | client_type = "rtorrent" 122 | url = "scgi://127.0.0.1:5000" 123 | session_path = "~/.rtorrent/" 124 | label = "testlabel" 125 | 126 | [clients.another-q-bittorrent] 127 | display_name = "qBittorrent 1" 128 | client_type = "qbittorrent" 129 | url = "http://localhost:8080/" 130 | username = "admin" 131 | password = "adminadmin" 132 | session_path = "~/.config/qbittorrent/" 133 | label = "testlabel" 134 | 135 | [clients.deluge-url] 136 | display_name = "Deluge url" 137 | client_url = "deluge://localclient:da39a3ee5e6b4b0d3255bfef95601890afd80709@127.0.0.1:58846?session_path=%7E/.config/deluge" -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ -n "$PUID" && -n "$PGID" ]]; then 3 | groupadd -g "$PGID" autotorrent 4 | useradd -u "$PUID" -g "$PGID" -M -d /app/autotorrent autotorrent 5 | if [[ "$1" == "cron" ]]; then 6 | SCHEDULE="$2" 7 | shift 2 8 | echo -e '#!/bin/bash\nat2 '"$*" > /var/tmp/cron.sh 9 | chmod +x /var/tmp/cron.sh 10 | echo /usr/local/bin/tinycron "$SCHEDULE" /var/tmp/cron.sh | su autotorrent 11 | else 12 | echo at2 "$@" | su autotorrent 13 | fi 14 | else 15 | if [[ "$1" == "cron" ]]; then 16 | SCHEDULE="$2" 17 | shift 2 18 | echo -e '#!/bin/bash\nat2 '"$*" > /var/tmp/cron.sh 19 | chmod +x /var/tmp/cron.sh 20 | /usr/local/bin/tinycron "$SCHEDULE" /var/tmp/cron.sh 21 | else 22 | at2 "$@" 23 | fi 24 | fi 25 | -------------------------------------------------------------------------------- /docs/cli.md: -------------------------------------------------------------------------------- 1 | # CLI Reference 2 | 3 | This page provides documentation for the commandline. 4 | 5 | ::: mkdocs-click 6 | :module: autotorrent.__main__ 7 | :command: cli 8 | :prog_name: at2 9 | :depth: 1 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Autotorrent2 2 | 3 | Autotorrent2 is the successor to Autotorrent. The original autotorrent was made to match data on disk with data 4 | in torrents even when slight differences or alterations occured. Autotorrent2 can do that too. 5 | 6 | The purpose of Autotorrent2 is to extend to the whole pipeline of a torrent lifecycle from discovery to removal. 7 | Actions that often are cumbersome and timeconsuming to do by hand. 8 | 9 | 10 | ## Feature comparison 11 | 12 | | Feature | Autotorrent | Autotorrent2 | 13 | |-----------------------------------------|-------------|--------------| 14 | | Torrent to data match | Yes | Yes | 15 | | Torrent removal | No | Yes | 16 | | Handle read-only files being written to | No | Yes | 17 | | Discover how much is seeded in a folder | No | Yes | 18 | | Handle broken encoding (utf-8) | No | Yes | 19 | | Multi-client support | Partial | Yes | 20 | | Different torrent clients support | Yes | Yes | 21 | | (Conditional) hash verification | No | Yes | 22 | | Torrent discovery | No | On TODO | 23 | 24 | ## Installation 25 | 26 | Check out the Github README. 27 | 28 | ## Links 29 | 30 | [Github](https://github.com/JohnDoee/autotorrent2) -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | A number of common use-cases and how to handle them are described here. For the exhaustive feature list check out the CLI. 4 | 5 | ## Torrent add 6 | 7 | ###### Commands: 8 | - at2 add 9 | - at2 scan 10 | - at2 cleanup-cache 11 | 12 | ###### Config fields: 13 | - link_type 14 | - always_verify_hash 15 | - paths 16 | - same_paths 17 | - add_limit_size 18 | - add_limit_percent 19 | - store_path 20 | - cache_touched_files 21 | - rw_file_cache_ttl 22 | - rw_file_cache_path 23 | 24 | Match torrents with data on your disk, where every torrent starts its life. First we have to `at2 scan` to discover files Autotorrent2 can match against. 25 | Our ubuntu isos are now indexed and we can add them to a torrent client. The client we are using is called transmission-ubuntu. 26 | 27 | `at2 add transmission-ubuntu ubuntu-20.04.torrent` - it turns out the torrent is a little bit different as it has an .nfo file and transmission will need to write part of a piece to ubuntu-20.04.iso. 28 | The file is read-only because it is owned by a different user and therefore it is cached locally. It can be pointed back to the original file AFTER transmission is done writing to the cached file. 29 | 30 | Autotorrent2 supports caching files which can be enabled and disabled with the `cache_touched_files` setting. 31 | 32 | The time is now `rw_file_cache_ttl` seconds later and we want to cleanup the cache, i.e. re-link files with the original file instead of having multiple copies of the same file indefinitely. Run `at2 cleanup-cache` and the file is gone from the cache. 33 | 34 | ## Torrent reseed 35 | 36 | ###### Commands: 37 | - at2 add 38 | - at2 scan 39 | 40 | ###### Config fields: 41 | - always_verify_hash 42 | - paths 43 | - same_paths 44 | 45 | New computer, new seedbox, new something. You want to reseed the old torrents and you have all the torrents and all the data available. Edit `paths` to the correct paths, run `at2 scan` to index your data. 46 | 47 | With the data indexed you are ready to add the torrents with `at2 add -e *.torrent` - the `-e` option is the exact match mode, aka. reseed mode. 48 | 49 | ## Find seeded and unseeded files 50 | 51 | ###### Commands: 52 | - at2 ls 53 | - at2 find-unseeded 54 | - at2 scan-clients 55 | 56 | For one reason or another, you have removed torrents from your client but not deleted the files on disk. This can be for multiple reasons, e.g. the files might be in use for cross-seeding purposes. 57 | 58 | First we scan the clients with `at2 scan-clients` so we have a local index of all the seeded files. It takes the filelist from the clients and saves it. 59 | 60 | Now we can do `at2 ls` to see what is seeded in the current folder. While it is interesting to see how much is seeded the practical purpose is to find the exact files not seeded. 61 | 62 | This can be done with `at2 find-unseeded /mnt/data/torrent-data/` which will spit out the paths not seeded. 63 | 64 | A common trick is to do is use -e option and rm like: `at2 find-unseeded -e /mnt/data/torrent-data/ | xargs rm -r --` WARNING: make sure the clients are recently scanned and the output without the rm part looks correct as this command just deletes files. 65 | 66 | 67 | ## Torrent removal 68 | 69 | ###### Commands: 70 | - at2 rm 71 | - at2 scan-clients 72 | 73 | The normal process for removing data is to do it from the torrent client, these commands can help you do it the other way and remove from multiple clients at once. 74 | 75 | Like with ls we need to `at2 scan-clients` first to have an up-to-date local list of seeded files. Run `at2 rm /path/to/torrent` to remove everything seeded in that path directly or indirectly, i.e. linked files too. No torrents left hanging. 76 | 77 | ## Test configuration 78 | 79 | ###### Commands: 80 | - at2 check-config 81 | - at2 test-connection 82 | 83 | Sometimes you want to check if what you are doing is correct and working. 84 | The two commands list can test this easily. 85 | 86 | ## Torrent discovery 87 | 88 | Command: TODO 89 | 90 | This feature is not implemented yet but you can look at some of the other tools while you wait. 91 | 92 | With the original autotorrent you would have to go out and find the potential torrents you would want to seed. This works for some flows but people just want stuff found and seeded. 93 | 94 | There has been a huge resurgance in this exact field with tools like [mmgoodnow/cross-seed](https://github.com/mmgoodnow/cross-seed), [BC44/Cross-Seed-AutoDL](https://github.com/BC44/Cross-Seed-AutoDL), [boban-bmw/cross-seedarr](https://github.com/boban-bmw/cross-seedarr), [ccf-2012/seedcross](https://github.com/ccf-2012/seedcross) and (my own) [JohnDoee/flexget-cross-seed](https://github.com/JohnDoee/flexget-cross-seed). 95 | 96 | The listed tools have an air of impreciseness around them, if files cannot be easily discovered via external search tools. -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Autotorrent2 2 | theme: readthedocs 3 | 4 | nav: 5 | - Home: index.md 6 | - Usage: usage.md 7 | - CLI Reference: cli.md 8 | 9 | markdown_extensions: 10 | - attr_list 11 | - mkdocs-click -------------------------------------------------------------------------------- /pyinstaller/at2.py: -------------------------------------------------------------------------------- 1 | from autotorrent.__main__ import cli 2 | 3 | if __name__ == "__main__": 4 | cli() 5 | -------------------------------------------------------------------------------- /pyinstaller/autotorrent.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | from PyInstaller.utils.hooks import collect_data_files 3 | 4 | datas = [] 5 | datas += collect_data_files('publicsuffixlist') 6 | 7 | 8 | block_cipher = None 9 | 10 | 11 | a = Analysis( 12 | ['at2.py'], 13 | pathex=[], 14 | binaries=[], 15 | datas=datas, 16 | hiddenimports=[], 17 | hookspath=[], 18 | hooksconfig={}, 19 | runtime_hooks=[], 20 | excludes=[], 21 | win_no_prefer_redirects=False, 22 | win_private_assemblies=False, 23 | cipher=block_cipher, 24 | noarchive=False, 25 | ) 26 | pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) 27 | 28 | exe = EXE( 29 | pyz, 30 | a.scripts, 31 | [], 32 | exclude_binaries=True, 33 | name='at2', 34 | debug=False, 35 | bootloader_ignore_signals=False, 36 | strip=False, 37 | upx=True, 38 | console=True, 39 | disable_windowed_traceback=False, 40 | argv_emulation=False, 41 | target_arch=None, 42 | codesign_identity=None, 43 | entitlements_file=None, 44 | ) 45 | coll = COLLECT( 46 | exe, 47 | a.binaries, 48 | a.zipfiles, 49 | a.datas, 50 | strip=False, 51 | upx=True, 52 | upx_exclude=[], 53 | name='autotorrent', 54 | ) 55 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools>=42", 4 | "wheel" 5 | ] 6 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli = true 3 | log_cli_level = 10 4 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = autotorrent2 3 | version = attr: autotorrent.__version__.__version__ 4 | author = John 5 | description = Torrent lifecycle management tool 6 | long_description = file: README.md 7 | long_description_content_type = text/markdown 8 | url = https://github.com/johndoee/autotorrent2 9 | classifiers = 10 | Programming Language :: Python :: 3 11 | License :: OSI Approved :: MIT License 12 | Operating System :: OS Independent 13 | 14 | [options] 15 | package_dir = 16 | = src 17 | packages = find: 18 | python_requires = >=3.7 19 | install_requires = 20 | libtc >=1.3.4,<2 21 | toml >=0.10.1,<0.10.99 22 | appdirs >=1.4.4,<2 23 | click >=8.0.0,<9 24 | chardet >=4.0.0,<5 25 | 26 | [options.extras_require] 27 | test = 28 | pytest >=6.0.1 29 | docs = 30 | mkdocs ==1.3.0 31 | mkdocs-click ==0.7.0 32 | 33 | [options.packages.find] 34 | where = src 35 | 36 | [options.entry_points] 37 | console_scripts = 38 | at2 = autotorrent.__main__:cli -------------------------------------------------------------------------------- /src/autotorrent/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/src/autotorrent/__init__.py -------------------------------------------------------------------------------- /src/autotorrent/__version__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.3.0" 2 | -------------------------------------------------------------------------------- /src/autotorrent/db.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import sqlite3 4 | from collections import namedtuple 5 | from pathlib import Path 6 | 7 | from .utils import decode_str, normalize_filename 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | SeededFile = namedtuple( 12 | "SeededFile", ["name", "path", "download_path", "infohash", "client", "size"] 13 | ) 14 | 15 | InsertTorrentFile = namedtuple( 16 | "InsertTorrentFile", 17 | ["infohash", "name", "download_path", "paths"], 18 | ) 19 | 20 | 21 | class SearchedFile( 22 | namedtuple( 23 | "SearchedFile", ["name", "path", "size", "normalized_name", "unsplitable_root"] 24 | ) 25 | ): 26 | def to_full_path(self): 27 | return self.path / self.name 28 | 29 | 30 | class Database: 31 | _insert_counter = 0 32 | 33 | def __init__(self, path, utf8_compat_mode=False): 34 | self.db = sqlite3.connect(path) 35 | self.utf8_compat_mode = utf8_compat_mode 36 | self.create_tables() 37 | 38 | def create_tables(self): 39 | c = self.db.cursor() 40 | c.execute( 41 | """CREATE TABLE IF NOT EXISTS files ( 42 | name varchar NOT NULL, 43 | path varchar NOT NULL, 44 | size integer NOT NULL, 45 | normalized_name varchar NOT NULL, 46 | unsplitable_root varchar, 47 | UNIQUE(name, path) 48 | )""" 49 | ) 50 | c.execute( 51 | """CREATE INDEX IF NOT EXISTS idx_normalized_name ON files(normalized_name)""" 52 | ) 53 | c.execute("""CREATE INDEX IF NOT EXISTS idx_size ON files(size)""") 54 | c.execute( 55 | """CREATE TABLE IF NOT EXISTS client_torrents ( 56 | id INTEGER PRIMARY KEY AUTOINCREMENT, 57 | name varchar NOT NULL, 58 | download_path varchar NOT NULL, 59 | infohash varchar NOT NULL, 60 | client varchar NOT NULL, 61 | UNIQUE(infohash, client) 62 | )""" 63 | ) 64 | c.execute( 65 | """CREATE TABLE IF NOT EXISTS client_torrentfiles ( 66 | torrent_id integer NOT NULL, 67 | path varchar NOT NULL, 68 | size integer NOT NULL, 69 | UNIQUE(path, torrent_id) 70 | )""" 71 | ) 72 | try: 73 | c.execute("""ALTER TABLE client_torrentfiles ADD COLUMN inode INTEGER""") 74 | except sqlite3.OperationalError: 75 | pass 76 | c.execute( 77 | """CREATE INDEX IF NOT EXISTS client_torrentfiles_inode ON client_torrentfiles (inode)""" 78 | ) 79 | self.db.commit() 80 | 81 | def commit(self): 82 | self.db.commit() 83 | 84 | def insert_file_paths(self, iterable): 85 | """Take an interable that generates a tuple with the three 86 | fields defined in `create_insert` and normalize them for 87 | insertion into the DB""" 88 | 89 | def create_insert(args): 90 | path, size, unsplitable_root = args 91 | unsplitable_root = str(unsplitable_root) 92 | decoded_path = decode_str(os.fsencode(path), try_fix=self.utf8_compat_mode) 93 | if decoded_path is None: 94 | return None 95 | name_path, name = os.path.split(decoded_path) 96 | normalized_name = normalize_filename(name) 97 | logger.debug( 98 | f"Inserting name: {name!r} name_path: {name_path!r} size: {size} normalized_name: {normalized_name!r} unsplitable_root {unsplitable_root!r}" 99 | ) 100 | return (name, name_path, size, normalized_name, unsplitable_root) 101 | 102 | c = self.db.cursor() 103 | try: 104 | c.executemany( 105 | "INSERT OR IGNORE INTO files (name, path, size, normalized_name, unsplitable_root) VALUES (?, ?, ?, ?, ?)", 106 | [row for row in map(create_insert, iterable) if row is not None], 107 | ) 108 | finally: 109 | c.close() 110 | 111 | def truncate_files(self): 112 | c = self.db.cursor() 113 | try: 114 | c.execute("DELETE FROM files") 115 | finally: 116 | c.close() 117 | 118 | def search_file( 119 | self, 120 | filename=None, 121 | size=None, 122 | path=None, 123 | normalized_filename=False, 124 | path_postfix=None, 125 | is_unsplitable=None, 126 | unsplitable_root=None, 127 | ): 128 | assert ( 129 | filename is not None 130 | or size is not None 131 | or path is not None 132 | or normalized_filename is not None 133 | ), "must specify at least one argument" 134 | assert ( 135 | unsplitable_root is None or is_unsplitable is None 136 | ), "must specify only unsplitable_root or is_unsplitable, not both" 137 | c = self.db.cursor() 138 | query, args = [], [] 139 | if normalized_filename: 140 | query.append("normalized_name = ?") 141 | args.append(normalize_filename(normalized_filename)) 142 | 143 | if filename: 144 | query.append("name = ?") 145 | args.append(filename) 146 | 147 | if size is not None: 148 | query.append("size = ?") 149 | args.append(size) 150 | 151 | if path is not None: 152 | query.append("path = ?") 153 | args.append(str(path)) 154 | 155 | if path_postfix: 156 | path_postfix = str(path_postfix).lstrip(os.sep) 157 | if path_postfix != ".": 158 | query.append("path LIKE ?") 159 | args.append(f"%{os.sep}{path_postfix}") 160 | 161 | if is_unsplitable is not None: 162 | if is_unsplitable: 163 | query.append("unsplitable_root IS NOT NULL") 164 | else: 165 | query.append("unsplitable_root IS NULL") 166 | 167 | if unsplitable_root is not None: 168 | query.append("unsplitable_root = ?") 169 | args.append(str(unsplitable_root)) 170 | 171 | query = ( 172 | "SELECT name, path, size, normalized_name, unsplitable_root FROM files WHERE " 173 | + " AND ".join(query) 174 | ) 175 | logger.debug(f"Doing query: {query!r} with args: {args!r}") 176 | return [ 177 | SearchedFile(name, Path(path), size, normalized_name, unsplitable_root) 178 | for (name, path, size, normalized_name, unsplitable_root) in c.execute( 179 | query, args 180 | ).fetchall() 181 | ] 182 | 183 | def get_torrent_file_info(self, client, infohash): 184 | c = self.db.cursor() 185 | torrents = c.execute( 186 | "SELECT name, download_path FROM client_torrents WHERE client = ? AND infohash = ?", 187 | ( 188 | client, 189 | infohash, 190 | ), 191 | ).fetchall() 192 | if not torrents: 193 | return None, None 194 | name, download_path = torrents[0] 195 | return name, Path(download_path) 196 | 197 | def insert_torrent_files_paths(self, client, insert_torrent_files): 198 | c = self.db.cursor() 199 | 200 | self.remove_torrent_files( 201 | client, [itf.infohash for itf in insert_torrent_files] 202 | ) 203 | self.commit() 204 | 205 | c.executemany( 206 | "INSERT OR IGNORE INTO client_torrents (name, download_path, infohash, client) VALUES (?, ?, ?, ?)", 207 | [ 208 | ( 209 | itf.name, 210 | decode_str(itf.download_path, try_fix=self.utf8_compat_mode), 211 | itf.infohash, 212 | client, 213 | ) 214 | for itf in insert_torrent_files 215 | ], 216 | ) 217 | self.commit() 218 | 219 | infohash_id_mapping = dict( 220 | c.execute( 221 | f"SELECT infohash, id FROM client_torrents WHERE client = ? AND infohash IN ({','.join(['?'] * len(insert_torrent_files))})", 222 | (client, *[itf.infohash for itf in insert_torrent_files]), 223 | ).fetchall() 224 | ) 225 | 226 | for itf in insert_torrent_files: 227 | insert_args = [] 228 | for path, size, inode in itf.paths: 229 | path = decode_str(path, try_fix=self.utf8_compat_mode) 230 | if path is None: 231 | continue 232 | 233 | insert_args.append( 234 | (infohash_id_mapping[itf.infohash], path, size, inode) 235 | ) 236 | 237 | c.executemany( 238 | "INSERT OR IGNORE INTO client_torrentfiles (torrent_id, path, size, inode) VALUES (?, ?, ?, ?)", 239 | insert_args, 240 | ) 241 | self.commit() 242 | 243 | def truncate_torrent_files(self, client=None): 244 | c = self.db.cursor() 245 | if client: 246 | c.execute( 247 | "DELETE FROM client_torrentfiles WHERE torrent_id IN (SELECT id FROM client_torrents WHERE client = ?)", 248 | (client,), 249 | ) 250 | c.execute("DELETE FROM client_torrents WHERE client = ?", (client,)) 251 | else: 252 | c.execute("DELETE FROM client_torrentfiles") 253 | c.execute("DELETE FROM client_torrents") 254 | self.db.commit() 255 | 256 | def remove_torrent_files(self, client, infohashes): 257 | c = self.db.cursor() 258 | for (id_,) in c.execute( 259 | f"SELECT id FROM client_torrents WHERE client = ? AND infohash IN ({','.join(['?'] * len(infohashes))})", 260 | (client, *infohashes), 261 | ): 262 | c.execute("DELETE FROM client_torrents WHERE id = ?", (id_,)) 263 | c.execute("DELETE FROM client_torrentfiles WHERE torrent_id = ?", (id_,)) 264 | self.db.commit() 265 | 266 | def remove_non_existing_infohashes(self, client, infohashes): 267 | c = self.db.cursor() 268 | self.remove_torrent_files( 269 | client, 270 | [ 271 | infohash 272 | for infohash, in c.execute( 273 | f"SELECT infohash FROM client_torrents WHERE client = ? AND infohash NOT IN ({','.join(['?'] * len(infohashes))})", 274 | (client, *infohashes), 275 | ).fetchall() 276 | ], 277 | ) 278 | 279 | def get_seeded_paths(self, paths, inodes): 280 | c = self.db.cursor() 281 | c.execute( 282 | f"""SELECT client_torrentfiles.torrent_id, name, download_path, infohash, client, path, size FROM client_torrentfiles 283 | LEFT JOIN client_torrents ON client_torrents.id = client_torrentfiles.torrent_id 284 | WHERE path IN ({','.join(['?'] * len(paths))})""", 285 | [decode_str(p, try_fix=self.utf8_compat_mode) for p in paths], 286 | ) 287 | 288 | seeded_files = [] 289 | indirect_seeded_files = [] 290 | seen_files = set() 291 | 292 | for ( 293 | torrent_id, 294 | name, 295 | download_path, 296 | infohash, 297 | client, 298 | path, 299 | size, 300 | ) in c.fetchall(): 301 | seeded_files.append( 302 | SeededFile(name, Path(path), download_path, infohash, client, size) 303 | ) 304 | seen_files.add((torrent_id, client, path)) 305 | 306 | if inodes: 307 | c.execute( 308 | f"""SELECT client_torrentfiles.torrent_id, inode, name, download_path, infohash, client, path, size FROM client_torrentfiles 309 | LEFT JOIN client_torrents ON client_torrents.id = client_torrentfiles.torrent_id 310 | WHERE inode IN ({','.join(['?'] * len(inodes))})""", 311 | list(inodes.keys()), 312 | ) 313 | for ( 314 | torrent_id, 315 | inode, 316 | name, 317 | download_path, 318 | infohash, 319 | client, 320 | path, 321 | size, 322 | ) in c.fetchall(): 323 | if (torrent_id, client, path) in seen_files: 324 | continue 325 | seen_files.add((torrent_id, client, path)) 326 | full_path = Path(path) 327 | if not full_path.is_file(): 328 | continue 329 | stat = full_path.stat() 330 | for p, dev in inodes[inode]: 331 | if dev == stat.st_dev: 332 | indirect_seeded_files.append( 333 | SeededFile(name, p, download_path, infohash, client, size) 334 | ) 335 | break 336 | return seeded_files, indirect_seeded_files 337 | 338 | def get_seeded_infohashes(self, client): 339 | c = self.db.cursor() 340 | c.execute( 341 | f"""SELECT infohash, name, sum(size), count(*) 342 | FROM client_torrents 343 | LEFT JOIN client_torrentfiles ON client_torrents.id = client_torrentfiles.torrent_id 344 | AND client_torrentfiles.path LIKE (client_torrents.download_path || '%') 345 | WHERE client = ? 346 | GROUP BY infohash, name""", 347 | (client,), 348 | ) 349 | 350 | return [ 351 | (infohash, name, size, count) for (infohash, size, count) in c.fetchall() 352 | ] 353 | -------------------------------------------------------------------------------- /src/autotorrent/exceptions.py: -------------------------------------------------------------------------------- 1 | class FailedToParseTorrentException(Exception): 2 | """A torrent was not possible to parse for some reason""" 3 | 4 | 5 | class FailedToCreateLinkException(Exception): 6 | """Failed to create links""" 7 | -------------------------------------------------------------------------------- /src/autotorrent/indexer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from concurrent.futures import ThreadPoolExecutor 4 | from enum import Enum 5 | from fnmatch import fnmatch 6 | from pathlib import Path 7 | from queue import Empty, SimpleQueue 8 | 9 | from .db import InsertTorrentFile 10 | from .utils import get_root_of_unsplitable, is_unsplitable 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | INSERT_QUEUE_MAX_SIZE = 1000 15 | 16 | SCAN_PATH_QUEUE_TIMEOUT_SECONDS = 10 17 | 18 | 19 | class PathTrieNode: 20 | __slots__ = ("children", "is_file", "is_unsplitable", "size") 21 | 22 | def __init__(self): 23 | self.children = {} 24 | self.is_file = False # In a typical string-based trie, this would mark the end of the string 25 | self.is_unsplitable = False 26 | self.size = None 27 | 28 | 29 | class PathTrie: 30 | def __init__(self): 31 | self.root = PathTrieNode() 32 | 33 | def insert_path(self, path, size): 34 | current = self.root 35 | for segment in path.parts: 36 | ch = segment 37 | node = current.children.get(ch) 38 | if node is None: 39 | node = PathTrieNode() 40 | current.children.update({ch: node}) 41 | current = node 42 | current.is_file = True 43 | current.size = size 44 | 45 | def mark_unsplitable(self, path): 46 | current = self.root 47 | for segment in path.parts: 48 | current = current.children.get(segment) 49 | current.is_unsplitable = True 50 | 51 | def walk(self, func): 52 | """Recursively walk the entire tree, applying `func` to all end 53 | nodes (which will always be a files)""" 54 | return self._walk_node(self.root, func, "", None) 55 | 56 | def _walk_node(self, node, func, current_path, unsplitable_root): 57 | """Provides the recursivity needed to actually walk the tree""" 58 | directories = [] 59 | files = [] 60 | for name, child in node.children.items(): 61 | if child.is_file: 62 | files.append((name, child)) 63 | else: 64 | directories.append((name, child)) 65 | # Descend directory-first to ensure unsplittable roots are applied properly 66 | for name, child in directories: 67 | unsplitable_root_for_children = unsplitable_root 68 | new_path = Path(current_path, name) 69 | if child.is_unsplitable and unsplitable_root is None: 70 | unsplitable_root_for_children = new_path 71 | yield from self._walk_node( 72 | child, func, new_path, unsplitable_root_for_children 73 | ) 74 | for name, child in files: 75 | yield func(child, Path(current_path, name), unsplitable_root) 76 | 77 | 78 | class IndexAction(Enum): 79 | ADD = 1 80 | MARK_UNSPLITABLE = 2 81 | FINISHED = 3 82 | 83 | 84 | class Indexer: 85 | def __init__( 86 | self, 87 | db, 88 | ignore_file_patterns=None, 89 | ignore_directory_patterns=None, 90 | include_inodes=False, 91 | ): 92 | self.db = db 93 | self.ignore_file_patterns = ignore_file_patterns or [] 94 | self.ignore_directory_patterns = ignore_directory_patterns or [] 95 | self.include_inodes = include_inodes 96 | 97 | def scan_paths(self, paths, full_scan=True): 98 | paths = [Path(p) for p in paths] 99 | path_tree = PathTrie() 100 | queue = SimpleQueue() 101 | futures = {} 102 | 103 | with ThreadPoolExecutor(max_workers=len(paths) + 1) as executor: 104 | for path in paths: 105 | logger.info(f"Indexing path {path}") 106 | futures[str(path)] = executor.submit( 107 | self._scan_path_thread, path, queue, root_thread=True 108 | ) 109 | 110 | while len(futures): 111 | action, args = None, () 112 | try: 113 | action, args = queue.get(timeout=SCAN_PATH_QUEUE_TIMEOUT_SECONDS) 114 | except Empty: 115 | logger.debug( 116 | "No action received from queue in %d seconds, checking threads", 117 | SCAN_PATH_QUEUE_TIMEOUT_SECONDS, 118 | ) 119 | for path in list(futures): 120 | future = futures[path] 121 | if future.done(): 122 | if future.exception() is not None: 123 | logger.error( 124 | f"Thread for path {path} encountered an exception: {future.exception()}" 125 | ) 126 | del futures[path] 127 | if action == IndexAction.ADD: 128 | path_tree.insert_path(*args) 129 | elif action == IndexAction.MARK_UNSPLITABLE: 130 | path_tree.mark_unsplitable(*args) 131 | elif action == IndexAction.FINISHED: 132 | del futures[args] 133 | 134 | # Helper function to modify walk results for DB usage 135 | def db_insert(child, full_path, unsplitable_root): 136 | return (str(full_path), child.size, str(unsplitable_root)) 137 | 138 | self.db.commit() 139 | if full_scan: 140 | self.db.truncate_files() 141 | self.db.insert_file_paths(path_tree.walk(db_insert)) 142 | self.db.commit() 143 | 144 | def _match_ignore_pattern(self, ignore_patterns, p, ignore_case=False): 145 | name = p.name 146 | if ignore_case: 147 | name = name.lower() 148 | for ignore_pattern in ignore_patterns: 149 | if ignore_case: 150 | if fnmatch(name, ignore_pattern.lower()): 151 | return True 152 | else: 153 | if fnmatch(name, ignore_pattern): 154 | return True 155 | return False 156 | 157 | def _scan_path_thread(self, path, queue, root_thread=False): 158 | files = [] 159 | def _handle_file(p): 160 | if p.is_dir(): 161 | if self.ignore_directory_patterns and self._match_ignore_pattern( 162 | self.ignore_directory_patterns, Path(p), ignore_case=True 163 | ): 164 | return 165 | self._scan_path_thread(Path(p), queue) 166 | elif p.is_file(): 167 | if self.ignore_file_patterns and self._match_ignore_pattern( 168 | self.ignore_file_patterns, Path(p) 169 | ): 170 | return 171 | files.append(Path(p)) 172 | size = p.stat().st_size 173 | queue.put((IndexAction.ADD, (Path(p), size))) 174 | 175 | try: 176 | if path.is_file(): 177 | _handle_file(path) 178 | else: 179 | for p in os.scandir(path): 180 | _handle_file(p) 181 | 182 | # TODO: probably not utf-8 problems resilient 183 | if is_unsplitable(files): # TODO: prevent duplicate work (?) 184 | unsplitable_root = get_root_of_unsplitable(path) 185 | queue.put((IndexAction.MARK_UNSPLITABLE, (unsplitable_root,))) 186 | except OSError as e: 187 | logger.error(f"Failed to scan {path}: {e}") 188 | 189 | if root_thread: 190 | queue.put((IndexAction.FINISHED, (str(path)))) 191 | 192 | def scan_clients(self, clients, full_scan=False, fast_scan=False): 193 | for name, client in clients.items(): 194 | if full_scan: 195 | self.db.truncate_torrent_files(name) 196 | self._scan_client(name, client, not full_scan and fast_scan) 197 | self.db.commit() 198 | 199 | def _scan_client(self, client_name, client, fast_scan): 200 | torrents = client.list() 201 | insert_queue = [] 202 | 203 | def get_file_inode(path): 204 | if self.include_inodes: 205 | return path.stat().st_ino 206 | else: 207 | return -1 208 | 209 | for torrent in torrents: 210 | _, current_download_path = self.db.get_torrent_file_info( 211 | client_name, torrent.infohash 212 | ) 213 | if fast_scan and current_download_path is not None: 214 | logger.debug( 215 | f"torrent:{torrent!r} client:{client!r} Skip indexing because it is already there and fast-scan is enabled" 216 | ) 217 | continue 218 | 219 | download_path = client.get_download_path(torrent.infohash) 220 | if str(download_path) == current_download_path: 221 | logger.debug( 222 | f"torrent:{torrent!r} client:{client!r} Skip indexing because download path not changed" 223 | ) 224 | continue 225 | 226 | files = client.get_files(torrent.infohash) 227 | if not files: 228 | logger.debug("No files found, not loaded") 229 | paths = [] 230 | for f in files: 231 | f_path = download_path / f.path 232 | inode = get_file_inode(f_path) 233 | paths.append((str(f_path), f.size, inode)) 234 | f_path_resolved = f_path.resolve() 235 | if f_path_resolved != f_path: 236 | paths.append((str(f_path_resolved), f.size, inode)) 237 | insert_queue.append( 238 | InsertTorrentFile(torrent.infohash, torrent.name, download_path, paths) 239 | ) 240 | if len(insert_queue) > INSERT_QUEUE_MAX_SIZE: 241 | self.db.insert_torrent_files_paths(client_name, insert_queue) 242 | insert_queue = [] 243 | if insert_queue: 244 | self.db.insert_torrent_files_paths(client_name, insert_queue) 245 | 246 | self.db.remove_non_existing_infohashes( 247 | client_name, [torrent.infohash for torrent in torrents] 248 | ) 249 | -------------------------------------------------------------------------------- /src/autotorrent/matcher.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import logging 3 | import os 4 | from collections import namedtuple 5 | from math import ceil 6 | from pathlib import Path 7 | 8 | from .utils import ( 9 | can_potentially_miss_in_unsplitable, 10 | get_root_of_unsplitable, 11 | is_unsplitable, 12 | parse_torrent, 13 | ) 14 | 15 | MatchedFile = namedtuple("MatchedFile", ["torrent_file", "searched_files"]) 16 | MatchResult = namedtuple("MatchResult", ["root_path", "matched_files", "size"]) 17 | MappedFile = namedtuple("MappedFile", ["size", "clients", "indirect_clients"]) 18 | MapResult = namedtuple( 19 | "MapResult", ["total_size", "seeded_size", "indirect_seeded_size", "files"] 20 | ) 21 | DynamicMatchResult = namedtuple( 22 | "DynamicMatchResult", ["success", "missing_size", "matched_files", "touched_files"] 23 | ) 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | EXACT_MATCH_FACTOR = 0.05 28 | 29 | 30 | def is_relative_to(path, *other): 31 | """Return True if the path is relative to another path or False.""" 32 | try: 33 | path.relative_to(*other) 34 | return True 35 | except ValueError: 36 | return False 37 | 38 | 39 | class Matcher: 40 | def __init__(self, rewriter, db, include_inodes=False): 41 | self.rewriter = rewriter 42 | self.db = db 43 | self.include_inodes = include_inodes 44 | 45 | def _match_filelist_exact( 46 | self, 47 | filelist, 48 | skip_prefix_path=None, 49 | match_normalized_filename=False, 50 | ): 51 | if skip_prefix_path: 52 | skip_prefix_path = Path(skip_prefix_path.strip(os.sep)) 53 | filelist = [f for f in filelist if is_relative_to(f.path, skip_prefix_path)] 54 | filelist = sorted( 55 | filelist, 56 | key=lambda f: (not can_potentially_miss_in_unsplitable(f.path), f.size), 57 | reverse=True, 58 | ) 59 | 60 | if not filelist: 61 | logger.warning( 62 | f"Empty filelist, bailing - skip_prefix_path:{skip_prefix_path}" 63 | ) 64 | return None 65 | 66 | handled_root_paths = set() 67 | match_results = [] 68 | for search_file in filelist[: ceil(len(filelist) * EXACT_MATCH_FACTOR)]: 69 | path_postfix = search_file.path.parent 70 | if match_normalized_filename: 71 | entry_matched_files = self.db.search_file( 72 | normalized_filename=search_file.path.name, 73 | size=search_file.size, 74 | path_postfix=path_postfix, 75 | ) 76 | else: 77 | entry_matched_files = self.db.search_file( 78 | filename=search_file.path.name, 79 | size=search_file.size, 80 | path_postfix=path_postfix, 81 | ) 82 | for entry_matched_file in entry_matched_files: 83 | if search_file.path: 84 | root_path = entry_matched_file.path 85 | for _ in search_file.path.parts[1:]: 86 | root_path = root_path.parent 87 | else: 88 | root_path = entry_matched_file.path 89 | if root_path in handled_root_paths: 90 | logger.debug( 91 | f"Skipping scan of root_path {handled_root_paths} for matches" 92 | ) 93 | continue 94 | handled_root_paths.add(root_path) 95 | logger.debug(f"Scanning root_path {root_path} for matches") 96 | 97 | matched_files = [MatchedFile(search_file, [entry_matched_file])] 98 | matched_file_size = entry_matched_file.size 99 | for f in filelist: 100 | if f == search_file: 101 | continue 102 | f_path = root_path / f.path 103 | f_name = f_path.name 104 | f_path = f_path.parent 105 | if match_normalized_filename: 106 | search_result = self.db.search_file( 107 | normalized_filename=f_name, size=f.size, path=f_path 108 | ) 109 | else: 110 | search_result = self.db.search_file( 111 | filename=f_name, size=f.size, path=f_path 112 | ) 113 | matched_files.append(MatchedFile(f, search_result)) 114 | if search_result: 115 | matched_file_size += f.size 116 | match_results.append( 117 | MatchResult(root_path, matched_files, matched_file_size) 118 | ) 119 | 120 | return match_results 121 | 122 | def _match_filelist_unsplitable( 123 | self, 124 | filelist, 125 | skip_prefix_path=None, 126 | match_normalized_filename=False, 127 | ): 128 | if skip_prefix_path: 129 | skip_prefix_path = Path(skip_prefix_path.strip(os.sep)) 130 | filelist = [f for f in filelist if is_relative_to(f.path, skip_prefix_path)] 131 | filelist = sorted( 132 | filelist, 133 | key=lambda f: (not can_potentially_miss_in_unsplitable(f.path), f.size), 134 | reverse=True, 135 | ) 136 | 137 | if not filelist: 138 | logger.warning( 139 | f"Empty filelist, bailing - skip_prefix_path:{skip_prefix_path}" 140 | ) 141 | return None 142 | 143 | handled_root_paths = set() 144 | match_results = [] 145 | for search_file in filelist[: ceil(len(filelist) * EXACT_MATCH_FACTOR)]: 146 | relative_path = search_file.path.relative_to(skip_prefix_path) 147 | if match_normalized_filename: 148 | entry_matched_files = self.db.search_file( 149 | normalized_filename=search_file.path.name, 150 | size=search_file.size, 151 | path_postfix=relative_path.parent, 152 | ) 153 | else: 154 | entry_matched_files = self.db.search_file( 155 | filename=search_file.path.name, 156 | size=search_file.size, 157 | path_postfix=relative_path.parent, 158 | ) 159 | 160 | for entry_matched_file in entry_matched_files: 161 | root_path = entry_matched_file.path 162 | for _ in range(len(relative_path.parts) - 1): 163 | root_path = root_path.parent 164 | if root_path in handled_root_paths: 165 | logger.debug( 166 | f"Skipping scan of root_path {handled_root_paths} for matches" 167 | ) 168 | continue 169 | handled_root_paths.add(root_path) 170 | 171 | root_path_is_correct_name = root_path.name == skip_prefix_path.name 172 | logger.debug( 173 | f"Scanning root_path {root_path} for matches with root_path_is_correct_name={root_path_is_correct_name}" 174 | ) 175 | 176 | matched_files = [MatchedFile(search_file, [entry_matched_file])] 177 | matched_file_size = entry_matched_file.size 178 | 179 | bad_path_found = False 180 | for f in filelist: 181 | if f == search_file: 182 | continue 183 | f_path = root_path / f.path.relative_to(skip_prefix_path) 184 | f_name = f_path.name 185 | f_path = f_path.parent 186 | if match_normalized_filename: 187 | search_result = self.db.search_file( 188 | normalized_filename=f_name, size=f.size, path=f_path 189 | ) 190 | else: 191 | search_result = self.db.search_file( 192 | filename=f_name, size=f.size, path=f_path 193 | ) 194 | matched_files.append(MatchedFile(f, search_result)) 195 | if search_result: 196 | matched_file_size += f.size 197 | 198 | if ( 199 | not search_result 200 | and not root_path_is_correct_name 201 | and not can_potentially_miss_in_unsplitable(f.path) 202 | ): 203 | bad_path_found = True 204 | break 205 | 206 | if bad_path_found: 207 | logger.debug(f"Bad path found wit root_path={root_path}") 208 | continue 209 | 210 | match_results.append( 211 | MatchResult(root_path, matched_files, matched_file_size) 212 | ) 213 | 214 | return match_results 215 | 216 | def _match_best_file( 217 | self, 218 | torrent, 219 | torrent_file, 220 | searched_files, 221 | hash_probe=False, 222 | match_hash_size=False, 223 | ): 224 | searched_files = sorted( 225 | searched_files, key=lambda x: x.name == torrent_file.path.name, reverse=True 226 | ) 227 | for searched_file in searched_files: 228 | if hash_probe: 229 | searched_file_path = searched_file.path / searched_file.name 230 | with searched_file_path.open("rb") as fp: 231 | matched_hash_probe = torrent_file.pieces.probe_hash( 232 | searched_file.size, fp 233 | ) 234 | if ( 235 | matched_hash_probe is False 236 | or not matched_hash_probe 237 | and match_hash_size 238 | ): 239 | logger.debug( 240 | f"File {searched_file_path} matched against {torrent_file.path} failed hash probe, skipping" 241 | ) 242 | continue 243 | return searched_file 244 | return None 245 | 246 | def _select_best_candidate( 247 | self, torrent, candidates, hash_probe=False, match_hash_size=False 248 | ): 249 | evaluated_candidates = [] 250 | for match_result in candidates: 251 | candidate_result = {} 252 | for matched_file in match_result.matched_files: 253 | candidate_result[matched_file.torrent_file.path] = ( 254 | self._match_best_file( 255 | torrent, 256 | matched_file.torrent_file, 257 | matched_file.searched_files, 258 | hash_probe=hash_probe, 259 | match_hash_size=match_hash_size, 260 | ) 261 | ) 262 | evaluated_candidates.append(candidate_result) 263 | return sorted( 264 | evaluated_candidates, 265 | key=lambda x: sum(y.size for y in x.values() if y is not None), 266 | reverse=True, 267 | )[0] 268 | 269 | def match_files_exact(self, torrent): 270 | torrent = parse_torrent(torrent, utf8_compat_mode=self.db.utf8_compat_mode) 271 | logger.info(f"Doing exact lookup for {torrent}") 272 | match_results = self._match_filelist_exact(torrent.filelist) 273 | usable_match_results = [] 274 | for match_result in match_results: 275 | if any( 276 | not matched_file.searched_files 277 | for matched_file in match_result.matched_files 278 | ): 279 | logger.debug("Match with missing files found, skipping") 280 | continue 281 | usable_match_results.append(match_result) 282 | if not usable_match_results: 283 | logger.info(f"No exact match found for {torrent}") 284 | return None 285 | 286 | return usable_match_results[0].root_path 287 | 288 | def match_files_dynamic( 289 | self, 290 | torrent, 291 | match_hash_size=False, 292 | add_limit_size=0, 293 | add_limit_percent=0, 294 | hash_probe=False, 295 | ): 296 | if match_hash_size: 297 | hash_probe = True 298 | torrent = parse_torrent(torrent, utf8_compat_mode=self.db.utf8_compat_mode) 299 | 300 | path_files = {} 301 | for f in torrent.filelist: 302 | path_files.setdefault(f.path.parent, []).append(f) 303 | 304 | unsplitable_roots = set() 305 | for path, files in path_files.items(): 306 | parts = path.parts 307 | while parts: 308 | if parts in unsplitable_roots: 309 | break 310 | parts = parts[:-1] 311 | else: 312 | if is_unsplitable([f.path for f in files]): 313 | unsplitable_root = get_root_of_unsplitable(Path(path)) 314 | unsplitable_roots.add(unsplitable_root.parts) 315 | 316 | best_possible_size = 0 317 | candidate_paths = {} 318 | for unsplitable_root in unsplitable_roots: 319 | # Unsplitable paths cannot be matched with hash_size, this is 320 | # because it will often contain lots of file of the same size 321 | # and there would be too many candidates. 322 | # match_results = self._match_filelist_exact( 323 | # torrent.filelist, 324 | # skip_prefix_path=os.path.sep.join(unsplitable_root), 325 | # match_normalized_filename=True, 326 | # ) 327 | match_results = self._match_filelist_unsplitable( 328 | torrent.filelist, 329 | skip_prefix_path=os.path.sep.join(unsplitable_root), 330 | match_normalized_filename=True, 331 | ) 332 | candidate_paths[unsplitable_root] = sorted( 333 | match_results, key=lambda x: -x.size 334 | ) 335 | if candidate_paths[unsplitable_root]: 336 | best_possible_size += candidate_paths[unsplitable_root][0].size 337 | 338 | candidate_files = {} 339 | for path, files in path_files.items(): 340 | parts = path.parts 341 | while parts: 342 | if parts in unsplitable_roots: 343 | break 344 | parts = parts[:-1] 345 | else: 346 | for torrent_file in files: 347 | if match_hash_size: 348 | searched_files = self.db.search_file(size=torrent_file.size) 349 | else: 350 | searched_files = self.db.search_file( 351 | normalized_filename=torrent_file.path.name, 352 | size=torrent_file.size, 353 | ) 354 | candidate_files[torrent_file.path] = (torrent_file, searched_files) 355 | if searched_files: 356 | best_possible_size += torrent_file.size 357 | 358 | max_missing_size = min( 359 | add_limit_size, (add_limit_percent * torrent.size) // 100 360 | ) 361 | current_missing_size = torrent.size - best_possible_size 362 | if current_missing_size > max_missing_size: 363 | logger.info( 364 | f"Torrent missing too much data, size:{torrent.size}, found data size:{best_possible_size}" 365 | ) 366 | return DynamicMatchResult(False, current_missing_size, None, None) 367 | 368 | result_mapping = {} 369 | for path, candidates in candidate_paths.items(): 370 | result_mapping.update( 371 | self._select_best_candidate( 372 | torrent, 373 | candidates, 374 | hash_probe=hash_probe, 375 | match_hash_size=match_hash_size, 376 | ) 377 | ) 378 | 379 | for path, (torrent_file, searched_files) in candidate_files.items(): 380 | result_mapping[torrent_file.path] = self._match_best_file( 381 | torrent, 382 | torrent_file, 383 | searched_files, 384 | hash_probe=hash_probe, 385 | match_hash_size=match_hash_size, 386 | ) 387 | 388 | missing_pieces = set() 389 | found_pieces = set() 390 | found_file_piece_mapping = {} 391 | current_missing_size = 0 392 | for torrent_file in torrent.filelist: 393 | piece_calculation = torrent_file.pieces.calculate_offsets(torrent_file.size) 394 | if result_mapping[torrent_file.path]: 395 | found_pieces.add(piece_calculation.start_piece) 396 | found_pieces.add(piece_calculation.end_piece) 397 | found_file_piece_mapping.setdefault( 398 | piece_calculation.start_piece, [] 399 | ).append(torrent_file.path) 400 | found_file_piece_mapping.setdefault( 401 | piece_calculation.end_piece, [] 402 | ).append(torrent_file.path) 403 | else: 404 | missing_pieces.add(piece_calculation.start_piece) 405 | missing_pieces.add(piece_calculation.end_piece) 406 | current_missing_size += torrent_file.size 407 | 408 | if current_missing_size > max_missing_size: 409 | logger.info( 410 | f"Torrent missing too much data after matching files, size:{torrent.size}, found data size:{best_possible_size} missing size:{current_missing_size} max missing size:{max_missing_size}" 411 | ) 412 | return DynamicMatchResult(False, current_missing_size, None, None) 413 | 414 | touched_files = set() 415 | for piece in missing_pieces & found_pieces: 416 | touched_files |= set(found_file_piece_mapping[piece]) 417 | 418 | return DynamicMatchResult( 419 | True, 420 | current_missing_size, 421 | { 422 | path: (searched_file and searched_file.to_full_path()) 423 | for (path, searched_file) in result_mapping.items() 424 | }, 425 | list(touched_files), 426 | ) 427 | 428 | def map_path_to_clients(self, path): 429 | """ 430 | Map a path and all its files to clients. 431 | """ 432 | scanned_folders = set() 433 | total = {"size": 0} 434 | real_files_seen = set() 435 | real_files_mapping = {} 436 | path_seeded = {} 437 | path_check_queue = [] 438 | 439 | def flush_check_queue(): 440 | logger.debug("Flushing queue") 441 | path_inodes = {} 442 | for p in path_check_queue: 443 | resolved_p = p.resolve() 444 | stat = p.stat() 445 | size = stat.st_size 446 | if self.include_inodes: 447 | if stat.st_ino not in path_inodes: 448 | path_inodes[stat.st_ino] = [] 449 | path_inodes[stat.st_ino].append((p, stat.st_dev)) 450 | if resolved_p not in real_files_seen: 451 | total["size"] += size 452 | real_files_seen.add(resolved_p) 453 | 454 | real_files_mapping[p] = resolved_p 455 | path_seeded[p] = MappedFile(size=size, clients=[], indirect_clients=[]) 456 | 457 | seeded_files, indirect_seeded_files = self.db.get_seeded_paths( 458 | path_check_queue, path_inodes 459 | ) 460 | 461 | for seeded_file in seeded_files: 462 | path_seeded[seeded_file.path].clients.append( 463 | (seeded_file.client, seeded_file.infohash) 464 | ) 465 | 466 | for indirect_seeded_file in indirect_seeded_files: 467 | path_seeded[indirect_seeded_file.path].indirect_clients.append( 468 | (indirect_seeded_file.client, indirect_seeded_file.infohash) 469 | ) 470 | 471 | path_check_queue.clear() 472 | 473 | def looper(path, initial_path=False): 474 | if path in scanned_folders: 475 | return 476 | logger.debug(f"Scanning path {path!s}") 477 | scanned_folders.add(path) 478 | 479 | for rewritten_path in self.rewriter.rewrite_path( 480 | path, prefix_match=initial_path 481 | ): 482 | if rewritten_path.is_file(): 483 | path_check_queue.append(rewritten_path) 484 | continue 485 | try: 486 | for p in rewritten_path.iterdir(): 487 | if p.is_dir(): 488 | looper(p) 489 | elif p.is_file(): 490 | path_check_queue.append(p) 491 | except OSError as e: 492 | if e.errno != errno.ELOOP: 493 | raise e 494 | 495 | if len(path_check_queue) > 1000: 496 | flush_check_queue() 497 | 498 | looper(path) 499 | flush_check_queue() 500 | seeded_size = 0 501 | indirect_seeded_size = 0 502 | already_counted_paths = set() 503 | for p, mapped_file in path_seeded.items(): 504 | if not mapped_file.clients: 505 | continue 506 | 507 | resolved_p = real_files_mapping[p] 508 | if resolved_p in already_counted_paths: 509 | continue 510 | 511 | already_counted_paths.add(resolved_p) 512 | seeded_size += mapped_file.size 513 | 514 | for p, mapped_file in path_seeded.items(): 515 | if not mapped_file.indirect_clients: 516 | continue 517 | 518 | resolved_p = real_files_mapping[p] 519 | if resolved_p in already_counted_paths: 520 | continue 521 | 522 | already_counted_paths.add(resolved_p) 523 | indirect_seeded_size += mapped_file.size 524 | 525 | return MapResult( 526 | total_size=total["size"], 527 | seeded_size=seeded_size, 528 | indirect_seeded_size=indirect_seeded_size, 529 | files=path_seeded, 530 | ) 531 | -------------------------------------------------------------------------------- /src/autotorrent/rw_cache.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | import logging 4 | import shutil 5 | import time 6 | from pathlib import Path 7 | 8 | from .utils import chown, create_link 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | RW_CACHE_DATA_PATH = "data" 13 | CW_CACHE_CONF_NAME = "autotorrent.json" 14 | 15 | 16 | class ReadWriteFileCache: 17 | def __init__(self, path, ttl, chown_str=None): 18 | self.path = Path(path) 19 | self.ttl = ttl 20 | self.chown_str = chown_str 21 | 22 | def cleanup_cache(self): 23 | removed_paths = [] 24 | for path in self.path.iterdir(): 25 | if time.time() - path.stat().st_mtime > self.ttl: 26 | logger.debug(f"Path {path} is older than ttl and should be deleted") 27 | conf_path = path / CW_CACHE_CONF_NAME 28 | conf = json.loads(conf_path.read_text()) 29 | source_path = Path(conf["source_path"]) 30 | for target_path in conf["target_paths"]: 31 | link_type = target_path["link_type"] 32 | target_path = Path(target_path["path"]) 33 | if not target_path.exists(): 34 | logger.warning(f"Target path {target_path!s} does not exist") 35 | continue 36 | logger.debug(f"Rewriting {target_path!s} to {source_path!s}") 37 | target_path.unlink() 38 | create_link(source_path, target_path, link_type) 39 | removed_paths.append(path) 40 | shutil.rmtree(path) 41 | return removed_paths 42 | 43 | def cache_file(self, path, target_path, link_type): 44 | full_folder_name = "__".join(path.parts[1:]) 45 | folder_name = f"{full_folder_name[:25]}__{full_folder_name[-50:]}__{hashlib.sha1(str(path).encode()).hexdigest()}" 46 | folder_path = self.path / folder_name 47 | folder_data_path = folder_path / RW_CACHE_DATA_PATH 48 | folder_data_file = folder_data_path / path.name 49 | conf_path = folder_path / CW_CACHE_CONF_NAME 50 | if not folder_path.exists(): 51 | logger.info( 52 | f"Seems like folder {folder_path!s} does not exist, copying file from {path!s}" 53 | ) 54 | folder_path.mkdir() 55 | folder_data_path.mkdir() 56 | shutil.copyfile(path, folder_data_file) 57 | if self.chown_str is not None: 58 | chown(self.chown_str, folder_data_file) 59 | conf_path.write_text( 60 | json.dumps( 61 | { 62 | "source_path": str(path), 63 | "target_paths": [], 64 | } 65 | ) 66 | ) 67 | 68 | folder_path.touch() 69 | conf = json.loads(conf_path.read_text()) 70 | conf["target_paths"].append( 71 | { 72 | "path": str(target_path), 73 | "link_type": link_type, 74 | } 75 | ) 76 | conf_path.write_text(json.dumps(conf)) 77 | return folder_data_file 78 | -------------------------------------------------------------------------------- /src/autotorrent/utils.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import hashlib 3 | import json 4 | import logging 5 | import os 6 | import platform 7 | import re 8 | import shutil 9 | import sqlite3 10 | from collections import namedtuple 11 | from fnmatch import fnmatch 12 | from pathlib import Path, PurePath 13 | 14 | import chardet 15 | import click 16 | 17 | from .exceptions import FailedToCreateLinkException, FailedToParseTorrentException 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | UNSPLITABLE_FILE_EXTENSIONS = [ 22 | set([".rar", ".sfv"]), 23 | set([".rar", ".r00"]), 24 | set([".mp3", ".sfv"]), 25 | set([".vob", ".ifo"]), 26 | ] 27 | UNSPLITABLE_FILE_MISSABLE = [ # If foldername does not match, these files can be missed 28 | "*.nfo", 29 | "*.sfv", 30 | "*.diz", 31 | "*.txt", 32 | ] 33 | 34 | # # Not allowed anywhere in the names 35 | # INVALID_CHARACTERS_NIX = ["/"] 36 | # INVALID_CHARACTERS_WINDOWS = ["/", "<", ">", ":", '"', "\\", "|", "?", "*"] 37 | 38 | # # Not allowed as a name or file basename 39 | # INVALID_BASE_NAMES_NIX = [] 40 | # INVALID_BASE_NAMES_WINDOWS = [ 41 | # "CON", 42 | # "PRN", 43 | # "AUX", 44 | # "NUL", 45 | # "COM1", 46 | # "COM2", 47 | # "COM3", 48 | # "COM4", 49 | # "COM5", 50 | # "COM6", 51 | # "COM7", 52 | # "COM8", 53 | # "COM9", 54 | # "LPT1", 55 | # "LPT2", 56 | # "LPT3", 57 | # "LPT4", 58 | # "LPT5", 59 | # "LPT6", 60 | # "LPT7", 61 | # "LPT8", 62 | # "LPT9", 63 | # ] 64 | 65 | # if os.name == "nt": 66 | # INVALID_CHARACTERS = INVALID_CHARACTERS_WINDOWS 67 | # INVALID_BASE_NAMES = INVALID_BASE_NAMES_WINDOWS 68 | # else: 69 | # INVALID_CHARACTERS = INVALID_CHARACTERS_NIX 70 | # INVALID_BASE_NAMES = INVALID_BASE_NAMES_NIX 71 | 72 | PIECE_SIZE = 20 73 | HASHER_READ_BLOCK_SIZE = 2**18 74 | 75 | AUTOTORRENT_CONF_NAME = "autotorrent.json" 76 | STORE_DATA_PATH = "data" 77 | 78 | 79 | def decode_str(s, try_fix=False): 80 | orig_s = s 81 | if not isinstance(s, str) and not isinstance(s, bytes): 82 | s = str(s) 83 | 84 | if isinstance(s, str): 85 | try: 86 | s = s.encode() 87 | except UnicodeEncodeError: 88 | if not try_fix: 89 | return None 90 | 91 | if isinstance(s, bytes): 92 | try: 93 | return s.decode() 94 | except UnicodeDecodeError: 95 | if not try_fix: 96 | return None 97 | else: 98 | try: 99 | s = bytes(orig_s) 100 | except TypeError: 101 | if not try_fix: 102 | return None 103 | 104 | encoding = chardet.detect(s) 105 | if encoding["encoding"]: 106 | try: 107 | return s.decode(encoding["encoding"]) 108 | except UnicodeDecodeError: 109 | pass 110 | 111 | try: 112 | return os.fsdecode(s) 113 | except UnicodeDecodeError: 114 | return s.decode(errors="replace") 115 | 116 | 117 | def normalize_filename(filename): 118 | filename = filename.strip(" ") 119 | name, ext = os.path.splitext(filename) 120 | name = re.sub(r"[ _.-]+", " ", name) 121 | return f"{name}{ext}".lower() 122 | 123 | 124 | def is_unsplitable(files): 125 | """ 126 | Checks if a list of files can be considered unsplitable, e.g. VOB/IFO or scene release. 127 | This means the files can only be used in this combination. 128 | """ 129 | extensions = set(f.suffix.lower() for f in files) 130 | for exts in UNSPLITABLE_FILE_EXTENSIONS: 131 | if len(extensions & exts) == len(exts): 132 | return True 133 | 134 | for f in files: 135 | if f.name.lower() == "movieobject.bdmv": 136 | return True 137 | 138 | return False 139 | 140 | 141 | def can_potentially_miss_in_unsplitable(filepath): 142 | """ 143 | Checks if a file can be potentially missed in an unsplitable release 144 | while the release is still usable. 145 | """ 146 | return re.match( 147 | r"^((samples?)|(proofs?)|((vob)?sub(title)?s?))$", 148 | filepath.parent.name, 149 | re.IGNORECASE, 150 | ) or any(fnmatch(filepath.name, pattern) for pattern in UNSPLITABLE_FILE_MISSABLE) 151 | 152 | 153 | def get_root_of_unsplitable(path): 154 | """ 155 | Scans a path for the actual scene release name, e.g. skipping cd1 folders. 156 | 157 | Returns None if no scene folder could be found 158 | """ 159 | while path: 160 | name = path.name 161 | is_scene_path = re.match( 162 | r"^((cd[1-9])|(samples?)|(proofs?)|((vob)?sub(title)?s?))$", 163 | name, 164 | re.IGNORECASE, 165 | ) 166 | is_disk_path = re.match(r"^((bdmv)|(disc\d*)|(video_ts))$", name, re.IGNORECASE) 167 | if ( 168 | not is_disk_path 169 | and name.lower() == "backup" 170 | and path.parent.name.lower() == "bdmv" 171 | ): 172 | is_disk_path = True 173 | 174 | if not is_scene_path and not is_disk_path: 175 | return path 176 | 177 | path = path.parent 178 | 179 | 180 | class PathRewriter: 181 | def __init__(self, path_mappings): 182 | self.paths = {} 183 | self.path_groups = {} 184 | self.handle_path_mappings(path_mappings) 185 | 186 | def handle_path_mappings(self, path_mappings): 187 | for i, path_mapping in enumerate(path_mappings): 188 | for path in path_mapping: 189 | path = Path(path) 190 | path_tuple = self._tuplify(path) 191 | self.paths[path_tuple] = i 192 | self.path_groups.setdefault(i, []).append(path) 193 | 194 | def rewrite_path(self, path, prefix_match=False): 195 | postfix_path = None 196 | orig_path = path 197 | while path: 198 | if postfix_path and not prefix_match: # nothing matched 199 | break 200 | 201 | path_tuple = self._tuplify(path) 202 | path_group_id = self.paths.get(path_tuple) 203 | if path_group_id is not None: 204 | if postfix_path is None: 205 | return list(self.path_groups[path_group_id]) 206 | else: 207 | return [p / postfix_path for p in self.path_groups[path_group_id]] 208 | 209 | if postfix_path is None: 210 | postfix_path = Path(path.name) 211 | else: 212 | postfix_path = Path(path.name) / postfix_path 213 | 214 | path = path.parent 215 | 216 | return [orig_path] 217 | 218 | def _tuplify(self, path): 219 | p = [] 220 | while path.name: 221 | p.append(path.name) 222 | path = path.parent 223 | return tuple(p[::-1]) 224 | 225 | 226 | def validate_path(path): 227 | return True 228 | 229 | 230 | def cleanup_torrent_path_segment(path_segment): # TODO: more here? 231 | if not path_segment: 232 | return path_segment 233 | return path_segment.strip("/") 234 | 235 | 236 | PieceCalculation = namedtuple( 237 | "PieceCalculation", 238 | [ 239 | "start_piece", 240 | "start_offset", 241 | "first_complete_piece", 242 | "end_piece", 243 | "end_offset", 244 | "last_complete_piece", 245 | "pieces", 246 | "complete_pieces", 247 | ], 248 | ) 249 | 250 | 251 | class Pieces: 252 | def __init__(self, piece_length, pieces, start_size=0): 253 | self.piece_length = piece_length 254 | if not isinstance(pieces, list): 255 | pieces = [ 256 | pieces[i : i + PIECE_SIZE] for i in range(0, len(pieces), PIECE_SIZE) 257 | ] 258 | self.pieces = pieces 259 | self.start_size = start_size 260 | 261 | def __getitem__(self, key): 262 | if not isinstance(key, slice): 263 | raise TypeError("Must be a slice") 264 | 265 | if not isinstance(key.start, int): 266 | raise TypeError("The start must be an integer") 267 | 268 | if key.stop is not None: 269 | raise TypeError("The stop must be None") 270 | 271 | if key.step is not None: 272 | raise TypeError("The step must be None") 273 | 274 | return self.__class__( 275 | self.piece_length, self.pieces, self.start_size + key.start 276 | ) 277 | 278 | def hash_piece(self, f): 279 | """Hashes a full piece from a single file, returns the hash-digest""" 280 | missing_size = self.piece_length 281 | hasher = hashlib.sha1() 282 | logger.debug(f"Trying to read {missing_size} bytes") 283 | 284 | while missing_size: 285 | d = f.read(min(16384, missing_size)) 286 | if not d: 287 | logger.warning( 288 | f"We expected to be able to read more data with missing size {missing_size}, bailing" 289 | ) 290 | return None 291 | missing_size -= len(d) 292 | hasher.update(d) 293 | 294 | return hasher.digest() 295 | 296 | def calculate_offsets(self, size, is_last_file=False): 297 | start_piece, start_offset = divmod(self.start_size, self.piece_length) 298 | first_complete_piece = start_piece 299 | if start_offset: 300 | first_complete_piece += 1 301 | start_offset = self.piece_length - start_offset 302 | 303 | end_size = self.start_size + size 304 | end_piece, end_offset = divmod(end_size, self.piece_length) 305 | last_complete_piece = end_piece 306 | if end_offset and not is_last_file: 307 | last_complete_piece -= 1 308 | 309 | piece_calculation = PieceCalculation( 310 | start_piece, 311 | start_offset, 312 | first_complete_piece, 313 | end_piece, 314 | end_offset, 315 | last_complete_piece, 316 | self.pieces[start_piece : end_piece + 1], 317 | self.pieces[first_complete_piece : last_complete_piece + 1], 318 | ) 319 | logger.debug( 320 | f"Piece calculation start_piece: {piece_calculation.start_piece} " 321 | f"start_offset: {piece_calculation.start_offset} " 322 | f"first_complete_piece: {piece_calculation.first_complete_piece} " 323 | f"end_piece: {piece_calculation.end_piece} " 324 | f"end_offset: {piece_calculation.end_offset} " 325 | f"last_complete_piece: {piece_calculation.last_complete_piece}" 326 | ) 327 | return piece_calculation 328 | 329 | def probe_hash(self, size, fp): 330 | """ 331 | Test a few pieces against the file if possible. 332 | 333 | Returns True if passed, False if failed, None if not possible 334 | """ 335 | piece_calculation = self.calculate_offsets(size) 336 | if not piece_calculation.complete_pieces: 337 | return None 338 | 339 | pieces_to_verify = set([0]) 340 | if len(piece_calculation.complete_pieces) > 1: 341 | pieces_to_verify.add(len(piece_calculation.complete_pieces) - 1) 342 | 343 | for piece in pieces_to_verify: 344 | fp.seek(piece_calculation.start_offset + piece * self.piece_length) 345 | if self.hash_piece(fp) != piece_calculation.complete_pieces[piece]: 346 | return False 347 | 348 | return True 349 | 350 | 351 | class Torrent( 352 | namedtuple( 353 | "Torrent", 354 | ["name", "size", "piece_length", "filelist", "filelist_mapped", "trackers"], 355 | ) 356 | ): 357 | def is_problematic(self): 358 | # TODO: check if the torrent can cause problems with some clients 359 | return False 360 | 361 | def verify_hash(self, fnmatches, file_mapping): 362 | """Returns a torrent_file mapping of failed and successful matched files""" 363 | # loop files, build list of pieces to verify 364 | pieces_to_verify = set() 365 | missing_pieces = set() 366 | for torrent_file in self.filelist: 367 | piece_calculation = torrent_file.pieces.calculate_offsets( 368 | torrent_file.size, is_last_file=torrent_file.is_last_file 369 | ) 370 | torrent_file_pieces = set( 371 | range(piece_calculation.start_piece, piece_calculation.end_piece + 1) 372 | ) 373 | for pattern in fnmatches: 374 | if fnmatch(torrent_file.path.name, pattern): 375 | pieces_to_verify |= torrent_file_pieces 376 | break 377 | else: 378 | if not file_mapping[torrent_file.path]: 379 | missing_pieces |= torrent_file_pieces 380 | 381 | piece_status = {} 382 | file_piece_mapping = {} 383 | file_has_inner_pieces = {} 384 | hasher, hasher_piece, data_left, fp, skip_to_piece = ( 385 | None, 386 | None, 387 | None, 388 | None, 389 | None, 390 | ) 391 | for torrent_file in self.filelist: 392 | piece_calculation = torrent_file.pieces.calculate_offsets( 393 | torrent_file.size, is_last_file=torrent_file.is_last_file 394 | ) 395 | file_has_inner_pieces[torrent_file] = ( 396 | piece_calculation.first_complete_piece 397 | <= piece_calculation.last_complete_piece 398 | ) 399 | full_path = file_mapping[torrent_file.path] 400 | if not full_path: 401 | piece_status[piece_calculation.start_piece] = None 402 | piece_status[piece_calculation.end_piece] = None 403 | skip_to_piece = piece_calculation.end_piece + 1 404 | continue 405 | 406 | for piece_index, piece in enumerate( 407 | piece_calculation.pieces, piece_calculation.start_piece 408 | ): 409 | file_piece_mapping.setdefault(piece_index, []).append(torrent_file) 410 | if skip_to_piece is not None and skip_to_piece > piece_index: 411 | continue 412 | 413 | if piece_index not in pieces_to_verify: 414 | continue 415 | 416 | if piece_index in piece_status: 417 | continue 418 | 419 | if piece_index > piece_calculation.start_piece: 420 | expected_tell = piece_calculation.start_offset + ( 421 | (piece_index - piece_calculation.first_complete_piece) 422 | * self.piece_length 423 | ) 424 | else: 425 | expected_tell = 0 426 | 427 | if not fp: 428 | fp = full_path.open("rb") 429 | if expected_tell: 430 | fp.seek(expected_tell) 431 | 432 | if hasher_piece != piece_index: 433 | hasher = hashlib.new("sha1", usedforsecurity=False) 434 | hasher_piece = piece_index 435 | data_left = min( 436 | self.size - (piece_index * self.piece_length), self.piece_length 437 | ) 438 | if fp.tell() != expected_tell: 439 | fp.seek(expected_tell) 440 | 441 | while data_left > 0: 442 | data = fp.read(min(HASHER_READ_BLOCK_SIZE, data_left)) 443 | hasher.update(data) 444 | data_left -= len(data) 445 | if not data: 446 | break 447 | 448 | if data_left == 0: 449 | piece_status[hasher_piece] = hasher.digest() == piece 450 | if not piece_status[hasher_piece]: 451 | skip_to_piece = piece_calculation.end_piece 452 | 453 | if fp: 454 | fp.close() 455 | fp = None 456 | 457 | file_status_mapping = {} 458 | for torrent_file in self.filelist: 459 | for pattern in fnmatches: 460 | if fnmatch(torrent_file.path.name, pattern): 461 | piece_calculation = torrent_file.pieces.calculate_offsets( 462 | torrent_file.size, is_last_file=torrent_file.is_last_file 463 | ) 464 | 465 | inner_piece_status = [ 466 | piece_status.get(p) 467 | for p in range( 468 | piece_calculation.first_complete_piece, 469 | piece_calculation.last_complete_piece + 1, 470 | ) 471 | ] 472 | edge_piece_status = [] 473 | if ( 474 | piece_calculation.start_piece 475 | != piece_calculation.first_complete_piece 476 | ): 477 | edge_piece_status.append( 478 | piece_status.get(piece_calculation.start_piece) 479 | ) 480 | # check other files in same piece 481 | if ( 482 | piece_calculation.end_piece 483 | != piece_calculation.last_complete_piece 484 | ): 485 | edge_piece_status.append( 486 | piece_status.get(piece_calculation.end_piece) 487 | ) 488 | 489 | if ( 490 | inner_piece_status 491 | and all(inner_piece_status) 492 | and all([p is not False for p in edge_piece_status]) 493 | ): 494 | file_status_mapping[torrent_file] = "hash-success" 495 | elif not inner_piece_status and all(edge_piece_status): 496 | file_status_mapping[torrent_file] = "hash-success" 497 | elif ( 498 | inner_piece_status 499 | and all(inner_piece_status) 500 | and all( 501 | [ 502 | file_has_inner_pieces[tf] 503 | for tf in file_piece_mapping[ 504 | piece_calculation.start_piece 505 | ] 506 | ] 507 | ) 508 | and all( 509 | [ 510 | file_has_inner_pieces[tf] 511 | for tf in file_piece_mapping[ 512 | piece_calculation.end_piece 513 | ] 514 | ] 515 | ) 516 | ): 517 | file_status_mapping[torrent_file] = "hash-success" 518 | else: 519 | file_status_mapping[torrent_file] = "hash-failed" 520 | 521 | break 522 | 523 | file_touch_status_mapping = {} 524 | for torrent_file in self.filelist: 525 | # if hash-failed or any pieces are failed, then it is touch-failed 526 | # if any of the files in any of the pieces are missing, then it is touched-success 527 | piece_calculation = torrent_file.pieces.calculate_offsets( 528 | torrent_file.size, is_last_file=torrent_file.is_last_file 529 | ) 530 | file_piece_results = { 531 | piece_status[p] 532 | for p in range( 533 | piece_calculation.start_piece, piece_calculation.end_piece + 1 534 | ) 535 | if p in piece_status 536 | } 537 | if ( 538 | file_status_mapping.get(torrent_file) == "hash-failed" 539 | or False in file_piece_results 540 | ): 541 | file_touch_status_mapping[torrent_file] = "touch-failed" 542 | elif None in file_piece_results: 543 | file_touch_status_mapping[torrent_file] = "touch-success" 544 | 545 | return file_status_mapping, file_touch_status_mapping 546 | 547 | def has_file_patterns(self, patterns): 548 | for torrent_file in self.filelist: 549 | for pattern in patterns: 550 | if fnmatch(torrent_file.path.name, pattern): 551 | return True 552 | return False 553 | 554 | 555 | TorrentFile = namedtuple( 556 | "TorrentFile", ["path", "size", "pieces", "is_last_file"], defaults=(False,) 557 | ) 558 | 559 | 560 | def parse_torrent( 561 | torrent, utf8_compat_mode=False 562 | ): # TODO: validate path, add support for transmission rewrite? 563 | if b"info" not in torrent: 564 | raise FailedToParseTorrentException("Info dict not found") 565 | info = torrent[b"info"] 566 | name = cleanup_torrent_path_segment( 567 | decode_str(info[b"name"], try_fix=utf8_compat_mode) 568 | ) 569 | if name is None: 570 | raise FailedToParseTorrentException("Unable to parse name of torrent") 571 | 572 | pieces = Pieces(info[b"piece length"], info[b"pieces"]) 573 | length = 0 574 | filelist = [] 575 | if b"files" in info: 576 | last_i = len(info[b"files"]) - 1 577 | for i, f in enumerate(info[b"files"]): 578 | path = [ 579 | cleanup_torrent_path_segment(decode_str(p, try_fix=utf8_compat_mode)) 580 | for p in f[b"path"] 581 | if p 582 | ] 583 | if any(p is None for p in path): 584 | raise FailedToParseTorrentException( 585 | "Broken path elements found in torrent, try utf-8 compat mode" 586 | ) 587 | if not path: 588 | raise FailedToParseTorrentException("Empty path") 589 | if any(p is None for p in path): 590 | raise FailedToParseTorrentException( 591 | "Invalid encoding in torrent file path" 592 | ) 593 | if any(not validate_path(p) for p in path): 594 | raise FailedToParseTorrentException( 595 | "Illegal entry in torrent file path" 596 | ) 597 | 598 | path = os.path.sep.join([name] + path) 599 | filelist.append( 600 | TorrentFile( 601 | PurePath(path), 602 | f[b"length"], 603 | pieces[length:], 604 | is_last_file=(i == last_i), 605 | ) 606 | ) 607 | length += f[b"length"] 608 | else: 609 | filelist.append( 610 | TorrentFile(PurePath(name), info[b"length"], pieces, is_last_file=True) 611 | ) 612 | length += info[b"length"] 613 | 614 | filelist_mapped = {f.path: f for f in filelist} 615 | 616 | trackers = [torrent.get(b"announce", b"").decode()] 617 | for tracker_group in torrent.get(b"announce-list", []): 618 | if not isinstance(tracker_group, list): 619 | tracker_group = [tracker_group] 620 | for tracker in tracker_group: 621 | if not isinstance(tracker, bytes): 622 | continue 623 | tracker = tracker.decode() 624 | if tracker not in trackers: 625 | trackers.append(trackers) 626 | trackers = [t for t in trackers if t] 627 | return Torrent( 628 | name, length, info[b"piece length"], filelist, filelist_mapped, trackers 629 | ) 630 | 631 | 632 | CreateLinkResult = namedtuple( 633 | "CreateLinkResult", ["path", "torrent_path", "config_path", "data_path"] 634 | ) 635 | 636 | 637 | def _parse_chown(chown): 638 | chown = chown.split(":") 639 | chown_user = chown[0] or None 640 | if len(chown) > 1: 641 | chown_group = chown[1] or None 642 | else: 643 | chown_group = None 644 | 645 | try: 646 | uid = int(chown_user) 647 | except ValueError: 648 | uid = shutil._get_uid(chown_user) 649 | if uid is None: 650 | uid = -1 651 | 652 | try: 653 | gid = int(chown_group) 654 | except ValueError: 655 | gid = shutil._get_gid(chown_group) 656 | if gid is None: 657 | gid = -1 658 | return uid, gid 659 | 660 | 661 | def chown(chown_str, path): 662 | chown_user, chown_group = _parse_chown(chown_str) 663 | if chown_user == -1 and chown_group == -1: 664 | return 665 | 666 | logger.debug(f"Got permissions chown_user={chown_user} chown_group={chown_group}") 667 | 668 | def walk(path): 669 | yield path 670 | 671 | if path.is_dir(): 672 | for p in Path(path).iterdir(): 673 | if p.is_dir() and not p.is_symlink(): 674 | yield from walk(p) 675 | else: 676 | yield p 677 | 678 | for p in walk(path): 679 | logger.debug(f"Changing permission for {p}") 680 | os.chown(p, chown_user, chown_group, follow_symlinks=False) 681 | 682 | 683 | def create_link_path( 684 | store_path_template, 685 | file_mapping, 686 | client_name, 687 | torrent_file_path, 688 | additional_kwargs, 689 | link_type, 690 | rw_cache=None, 691 | chown_str=None, 692 | dry_run=False, 693 | skip_store_metadata=False, 694 | ): 695 | kwargs = { 696 | "client": client_name, 697 | "torrent_name": torrent_file_path.stem, 698 | } 699 | kwargs.update(additional_kwargs) 700 | store_path = Path(store_path_template.format(**kwargs)) 701 | 702 | if dry_run: 703 | if store_path.exists(): 704 | raise FailedToCreateLinkException(f"Path {store_path} already exist") 705 | else: 706 | return None 707 | 708 | try: 709 | store_path.mkdir(parents=True) 710 | except FileExistsError: 711 | raise FailedToCreateLinkException(f"Path {store_path} already exist") 712 | 713 | if skip_store_metadata: 714 | data_store_path = store_path 715 | torrent_store_path = None 716 | autotorrent_store_path = None 717 | else: 718 | data_store_path = store_path / STORE_DATA_PATH 719 | data_store_path.mkdir() 720 | 721 | torrent_store_path = store_path / torrent_file_path.name 722 | shutil.copy(torrent_file_path, torrent_store_path) 723 | 724 | autotorrent_store_path = store_path / AUTOTORRENT_CONF_NAME 725 | autotorrent_store_path.write_text(json.dumps({})) 726 | 727 | for torrent_path, (action, actual_path) in file_mapping.items(): 728 | link_path = data_store_path / torrent_path 729 | link_path.parent.mkdir(exist_ok=True, parents=True) 730 | if action == "link" or action == "cache_link": 731 | if rw_cache and action == "cache_link": 732 | actual_path = rw_cache.cache_file(actual_path, link_path, link_type) 733 | 734 | create_link(actual_path, link_path, link_type) 735 | elif action == "copy": 736 | shutil.copyfile(actual_path, link_path) 737 | 738 | if chown_str: 739 | chown(chown_str, data_store_path) 740 | 741 | return CreateLinkResult( 742 | store_path, torrent_store_path, autotorrent_store_path, data_store_path 743 | ) 744 | 745 | 746 | def create_link(actual_path, link_path, link_type): 747 | if link_type == "soft": 748 | link_path.symlink_to(actual_path) 749 | elif link_type == "hard": 750 | os.link(actual_path, link_path) 751 | elif link_type == "reflink": 752 | reflink(str(actual_path), str(link_path)) 753 | 754 | 755 | def reflink(path, destination): 756 | """ 757 | Perform a reflink (if supported, currently only xfs, apfs, btrfs is) 758 | This code is modified from dvc (https://github.com/iterative/dvc/blob/f4bec650eddc8874b3f7ab2f8b34bc5dfe60fd49/dvc/system.py#L105). 759 | These libraries are available under the Apache 2.0 license, which can be obtained from http://www.apache.org/licenses/LICENSE-2.0. 760 | """ 761 | system = platform.system() 762 | logger.debug(f"platform is {system}") 763 | try: 764 | if system == "Windows": 765 | ret = _reflink_windows(path, destination) 766 | elif system == "Darwin": 767 | ret = _reflink_darwin(path, destination) 768 | elif system == "Linux": 769 | ret = _reflink_linux(path, destination) 770 | else: 771 | ret = -1 772 | except IOError: 773 | ret = -1 774 | 775 | if ret != 0: 776 | raise Exception("reflink is not supported") 777 | 778 | 779 | def _reflink_linux(path, destination): 780 | """ 781 | Linux only reflink via syscall FICLONE on supported filesystems 782 | """ 783 | import fcntl 784 | import os 785 | 786 | FICLONE = 0x40049409 787 | 788 | try: 789 | ret = 255 790 | with open(path, "r") as s, open(destination, "w+") as d: 791 | ret = fcntl.ioctl(d.fileno(), FICLONE, s.fileno()) 792 | finally: 793 | if ret != 0: 794 | os.unlink(destination) 795 | 796 | return ret 797 | 798 | 799 | def _reflink_windows(self, path, destination): 800 | return -1 801 | 802 | 803 | def _reflink_darwin(self, path, destination): 804 | import ctypes 805 | 806 | LIBC = "libc.dylib" 807 | LIBC_FALLBACK = "/usr/lib/libSystem.dylib" 808 | try: 809 | clib = ctypes.CDLL(LIBC) 810 | except OSError as exc: 811 | logger.debug( 812 | f"unable to access '{LIBC}' (errno '{exc.errno}'). Falling back to '{LIBC_FALLBACK}'." 813 | ) 814 | if exc.errno != errno.ENOENT: 815 | raise 816 | # NOTE: trying to bypass System Integrity Protection (SIP) 817 | clib = ctypes.CDLL(LIBC_FALLBACK) 818 | 819 | if not hasattr(clib, "clonefile"): 820 | return -1 821 | 822 | clonefile = clib.clonefile 823 | clonefile.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int] 824 | clonefile.restype = ctypes.c_int 825 | 826 | return clonefile( 827 | ctypes.c_char_p(path.encode("utf-8")), 828 | ctypes.c_char_p(destination.encode("utf-8")), 829 | ctypes.c_int(0), 830 | ) 831 | 832 | 833 | def humanize_bytes( 834 | bytes, precision=1 835 | ): # All credit goes to: http://code.activestate.com/recipes/577081-humanized-representation-of-a-number-of-bytes/ 836 | """Return a humanized string representation of a number of bytes. 837 | >>> humanize_bytes(1) 838 | '1 byte' 839 | >>> humanize_bytes(1024) 840 | '1.0 kB' 841 | >>> humanize_bytes(1024*123) 842 | '123.0 kB' 843 | >>> humanize_bytes(1024*12342) 844 | '12.1 MB' 845 | >>> humanize_bytes(1024*12342,2) 846 | '12.05 MB' 847 | >>> humanize_bytes(1024*1234,2) 848 | '1.21 MB' 849 | >>> humanize_bytes(1024*1234*1111,2) 850 | '1.31 GB' 851 | >>> humanize_bytes(1024*1234*1111,1) 852 | '1.3 GB' 853 | """ 854 | abbrevs = ( 855 | (1 << 50, "PB"), 856 | (1 << 40, "TB"), 857 | (1 << 30, "GB"), 858 | (1 << 20, "MB"), 859 | (1 << 10, "kB"), 860 | (1, "bytes"), 861 | ) 862 | if bytes == 1: 863 | return "1 byte" 864 | for factor, suffix in abbrevs: 865 | if bytes >= factor: 866 | break 867 | return "%.*f %s" % (precision, bytes / factor, suffix) 868 | 869 | 870 | def add_status_formatter(status, torrent_path, message): 871 | status_specs = { 872 | "seeded": ["blue", "Seeded"], 873 | "exists": ["yellow", "Exists"], 874 | "missing_files": ["red", "Missing"], 875 | "failed": ["magenta", "Failed"], 876 | "added": ["green", "Added"], 877 | } 878 | status_spec = status_specs[status] 879 | 880 | status_msg = f"[{click.style(status_spec[1], fg=status_spec[0])}]" 881 | click.echo(f" {status_msg:18s} {torrent_path.name!r} {message}") 882 | 883 | 884 | def filter_torrents(client, torrents_or_infohashes, query): 885 | """Extract all infohashes from client, filter them against sqlite query and torrents variable.""" 886 | 887 | db = sqlite3.connect(":memory:") 888 | c = db.cursor() 889 | c.execute( 890 | """CREATE TABLE torrents ( 891 | infohash TEXT UNIQUE, 892 | name TEXT, 893 | size INTEGER, 894 | state TEXT, 895 | progress REAL, 896 | uploaded INTEGER, 897 | added DATETIME, 898 | tracker TEXT, 899 | upload_rate INTEGER, 900 | download_rate INTEGER, 901 | label TEXT, 902 | 903 | ratio REAL, 904 | complete BOOL 905 | )""" 906 | ) 907 | db.commit() 908 | 909 | torrents = client.list() 910 | while torrents: 911 | c.executemany( 912 | "INSERT INTO torrents VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", 913 | [ 914 | ( 915 | t.infohash, 916 | t.name, 917 | t.size, 918 | t.state, 919 | t.progress, 920 | t.uploaded, 921 | t.added, 922 | t.tracker, 923 | t.upload_rate, 924 | t.download_rate, 925 | t.label, 926 | ( 927 | t.progress > 0 928 | and (t.uploaded / ((t.progress * t.size) / 100)) 929 | or None 930 | ), 931 | t.progress == 100.0, 932 | ) 933 | for t in torrents[:500] 934 | ], 935 | ) 936 | torrents = torrents[500:] 937 | 938 | infohashes = set( 939 | [ 940 | infohash 941 | for (infohash,) in c.execute( 942 | f"SELECT infohash FROM torrents WHERE {query}" 943 | ).fetchall() 944 | ] 945 | ) 946 | return [ 947 | torrents_or_infohash 948 | for torrents_or_infohash in torrents_or_infohashes 949 | if ( 950 | isinstance(torrents_or_infohash, str) and torrents_or_infohash in infohashes 951 | ) 952 | or ( 953 | hasattr(torrents_or_infohash, "infohash") 954 | and torrents_or_infohash.infohash in infohashes 955 | ) 956 | ] 957 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/__init__.py -------------------------------------------------------------------------------- /tests/fixtures.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path, PurePosixPath 3 | 4 | import click 5 | import libtc 6 | import pytest 7 | import toml 8 | from click.testing import CliRunner 9 | from libtc.clients.tests.utils_testclient import TestClient 10 | 11 | import autotorrent.__main__ 12 | from autotorrent.db import Database 13 | from autotorrent.indexer import Indexer 14 | from autotorrent.matcher import Matcher 15 | from autotorrent.utils import PathRewriter 16 | from autotorrent.__main__ import cli 17 | 18 | __all__ = [ 19 | "db", 20 | "indexer", 21 | "matcher", 22 | "client", 23 | "rewriter", 24 | "testfiles", 25 | "configfile", 26 | ] 27 | 28 | 29 | @pytest.fixture 30 | def db(tmp_path): 31 | return Database(tmp_path / "autotorrent.db") 32 | 33 | 34 | @pytest.fixture 35 | def indexer(db): 36 | return Indexer(db) 37 | 38 | 39 | @pytest.fixture 40 | def rewriter(): 41 | return PathRewriter([]) 42 | 43 | 44 | @pytest.fixture 45 | def matcher(db, rewriter): 46 | return Matcher(rewriter, db) 47 | 48 | 49 | @pytest.fixture 50 | def client(): 51 | return TestClient() 52 | 53 | 54 | @pytest.fixture 55 | def testfiles(tmp_path): 56 | testfiles = Path(__file__).parent / "testfiles" 57 | shutil.copytree(testfiles, tmp_path / "testfiles") 58 | return tmp_path / "testfiles" 59 | 60 | 61 | class ConfigFile: 62 | config = None 63 | 64 | def __init__(self, tmp_path, client): 65 | self.config_path = tmp_path / 'config.toml' 66 | self.client = client 67 | 68 | def create_config(self): 69 | runner = CliRunner() 70 | result = runner.invoke(cli, ['check-config']) 71 | self.config = toml.loads(self.config_path.read_text()) 72 | 73 | def save_config(self): 74 | self.config_path.write_text(toml.dumps(self.config)) 75 | 76 | 77 | @pytest.fixture 78 | def configfile(tmp_path, monkeypatch, client): 79 | monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmp_path.resolve())) 80 | monkeypatch.setattr(autotorrent.__main__, "parse_clients_from_toml_dict", lambda x: {"testclient": {"client": client, "display_name": "testclient"}}) 81 | 82 | cf = ConfigFile(tmp_path, client) 83 | cf.create_config() 84 | store_path = tmp_path.resolve() / "store_path" / "{torrent_name}" 85 | store_path.mkdir(parents=True, exist_ok=True) 86 | cf.config["autotorrent"]["store_path"] = str(store_path) 87 | cf.save_config() 88 | return cf 89 | 90 | 91 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import libtc 4 | 5 | from click.testing import CliRunner 6 | from libtc import bdecode, bencode 7 | from pathlib import Path 8 | 9 | from autotorrent.__main__ import cli 10 | 11 | from .fixtures import * 12 | 13 | @pytest.mark.parametrize("linktype", ["soft", "hard"]) # test server with reflink support? 14 | def test_cli_add_link_type(testfiles, indexer, matcher, client, configfile, tmp_path, linktype): 15 | configfile.config["autotorrent"]["link_type"] = linktype 16 | configfile.save_config() 17 | 18 | runner = CliRunner() 19 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 20 | assert result.exit_code == 0 21 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 22 | assert result.exit_code == 0 23 | action, kwargs = client._action_queue[0] 24 | assert action == "add" 25 | link_file = (kwargs["destination_path"] / "testfiles" / "file_a.txt") 26 | assert not kwargs["stopped"] 27 | assert link_file.exists() 28 | if linktype == "soft": 29 | assert link_file.is_symlink() 30 | elif linktype == "hard": 31 | assert not link_file.is_symlink() 32 | else: 33 | raise Exception(f"Unknown link type {linktype}") 34 | 35 | 36 | def test_cli_ignore_file_patterns(testfiles, indexer, matcher, client, configfile, tmp_path): 37 | configfile.config["autotorrent"]["ignore_file_patterns"] = ["*.txt"] 38 | configfile.save_config() 39 | 40 | runner = CliRunner() 41 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 42 | assert result.exit_code == 0 43 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 44 | assert result.exit_code == 0 45 | assert 'Failed' in result.output 46 | 47 | 48 | def test_cli_ignore_directory_patterns(testfiles, indexer, matcher, client, configfile, tmp_path): 49 | configfile.config["autotorrent"]["ignore_directory_patterns"] = ['SaMpl*'] 50 | configfile.save_config() 51 | 52 | runner = CliRunner() 53 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 54 | assert result.exit_code == 0 55 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "Some-Release.torrent")], catch_exceptions=False) 56 | assert result.exit_code == 0 57 | assert not client._action_queue 58 | 59 | 60 | def test_cli_add_symlink_folder(testfiles, indexer, matcher, client, configfile, tmp_path): 61 | symlinked_testfiles = tmp_path / 'symlinked' / 'testfiles' 62 | symlinked_testfiles.parent.mkdir(parents=True, exist_ok=True) 63 | symlinked_testfiles.symlink_to(testfiles) 64 | 65 | runner = CliRunner() 66 | result = runner.invoke(cli, ['scan', '-p', str(symlinked_testfiles)], catch_exceptions=False) 67 | assert result.exit_code == 0 68 | result = runner.invoke(cli, ['add', 'testclient', '-e', str(testfiles / "test.torrent")], catch_exceptions=False) 69 | assert result.exit_code == 0 70 | action, kwargs = client._action_queue[0] 71 | assert action == "add" 72 | assert symlinked_testfiles.relative_to(kwargs["destination_path"]) == Path('testfiles') 73 | 74 | 75 | def test_cli_add_symlink_files(testfiles, indexer, matcher, client, configfile, tmp_path): 76 | symlinked_testfiles = tmp_path / 'symlinked' / 'testfiles' 77 | symlinked_testfiles.mkdir(parents=True, exist_ok=True) 78 | for f in testfiles.iterdir(): 79 | (symlinked_testfiles / f.name).symlink_to(f) 80 | 81 | runner = CliRunner() 82 | result = runner.invoke(cli, ['scan', '-p', str(symlinked_testfiles)], catch_exceptions=False) 83 | assert result.exit_code == 0 84 | result = runner.invoke(cli, ['add', 'testclient', '-e', str(testfiles / "test.torrent")], catch_exceptions=False) 85 | assert result.exit_code == 0 86 | action, kwargs = client._action_queue[0] 87 | assert action == "add" 88 | assert symlinked_testfiles.relative_to(kwargs["destination_path"]) == Path('testfiles') 89 | 90 | 91 | def test_cli_add_symlink_store_path(testfiles, indexer, matcher, client, configfile, tmp_path): 92 | store_path = tmp_path.resolve() / "symlinked_store_path" 93 | store_path.symlink_to(configfile.config["autotorrent"]["store_path"]) 94 | configfile.config["autotorrent"]["store_path"] = str(store_path / "{torrent_name}") 95 | configfile.save_config() 96 | 97 | runner = CliRunner() 98 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 99 | assert result.exit_code == 0 100 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 101 | assert result.exit_code == 0 102 | action, kwargs = client._action_queue[0] 103 | assert action == "add" 104 | assert (store_path / "test" / "data") == kwargs["destination_path"] 105 | 106 | 107 | def test_cli_add_stopped_state(testfiles, indexer, matcher, client, configfile, tmp_path): 108 | runner = CliRunner() 109 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 110 | assert result.exit_code == 0 111 | result = runner.invoke(cli, ['add', 'testclient', '--stopped', '-e', str(testfiles / "test.torrent")], catch_exceptions=False) 112 | assert result.exit_code == 0 113 | action, kwargs = client._action_queue[0] 114 | assert action == "add" 115 | assert kwargs["stopped"] 116 | 117 | 118 | def test_cli_add_extreme_limits(testfiles, indexer, matcher, client, configfile, tmp_path): 119 | configfile.config["autotorrent"]["add_limit_percent"] = 200 120 | configfile.save_config() 121 | 122 | runner = CliRunner() 123 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 124 | assert result.exit_code == 0 125 | action, kwargs = client._action_queue[0] 126 | assert action == "add" 127 | 128 | 129 | def test_cli_missing_variable_store_path(testfiles, indexer, matcher, client, configfile, tmp_path): 130 | store_path = tmp_path / "inaccessible" 131 | configfile.config["autotorrent"]["store_path"] = str(tmp_path / "garbage") 132 | configfile.save_config() 133 | 134 | runner = CliRunner() 135 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 136 | assert result.exit_code == 1 137 | 138 | 139 | def test_cli_inaccessible_store_path(testfiles, indexer, matcher, client, configfile, tmp_path): 140 | store_path = tmp_path / "inaccessible" 141 | store_path.mkdir(mode=0o000) 142 | try: 143 | configfile.config["autotorrent"]["store_path"] = str(store_path / "{torrent_name}") 144 | configfile.save_config() 145 | 146 | runner = CliRunner() 147 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 148 | assert result.exit_code == 0 149 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 150 | assert result.exit_code == 0 151 | finally: 152 | store_path.chmod(0o777) 153 | 154 | 155 | 156 | def test_cli_store_path_passed_variables(testfiles, indexer, matcher, client, configfile, tmp_path): 157 | store_path = tmp_path.resolve() / "store_path" 158 | configfile.config["autotorrent"]["store_path"] = str(store_path / "{custom_variable}"/ "{torrent_name}") 159 | configfile.save_config() 160 | 161 | runner = CliRunner() 162 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 163 | assert result.exit_code == 0 164 | result = runner.invoke(cli, ['add', 'testclient', '--store-path-variable', 'custom_variable=smart-choice', '--store-path-variable', 'unused=something', str(testfiles / "test.torrent")], catch_exceptions=False) 165 | assert result.exit_code == 0 166 | 167 | assert (store_path / "smart-choice" / "test").exists() 168 | 169 | 170 | def test_cli_store_path_inferred_variables(testfiles, indexer, matcher, client, configfile, tmp_path): 171 | torrent_data = bdecode((testfiles / "test.torrent").read_bytes()) 172 | torrent_data[b"info"][b"source"] = b"real-source" 173 | torrent_data[b"announce"] = b"http://example.com/horse?example=1" 174 | torrent_data[b"announce-list"] = [[b"http://example.com/horse?example=3", b"http://example.com/horse?example=2"]] 175 | (testfiles / "test.torrent").write_bytes(bencode(torrent_data)) 176 | 177 | store_path = tmp_path.resolve() / "store_path" 178 | configfile.config["autotorrent"]["store_path"] = str(store_path / "{tracker_domain}" / "{torrent_source}" / "{torrent_name}") 179 | configfile.save_config() 180 | 181 | runner = CliRunner() 182 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 183 | assert result.exit_code == 0 184 | result = runner.invoke(cli, ['add', 'testclient', '--store-path-variable', 'custom_variable=smart-choice', '--store-path-variable', 'unused=something', str(testfiles / "test.torrent")], catch_exceptions=False) 185 | assert result.exit_code == 0 186 | 187 | assert (store_path / "example.com" / "real-source" / "test").exists() 188 | 189 | 190 | def test_cli_store_path_missing_in_torrent_variables(testfiles, indexer, matcher, client, configfile, tmp_path): 191 | torrent_data = bdecode((testfiles / "test.torrent").read_bytes()) 192 | torrent_data[b"announce"] = b"http://example.com/horse?example=1" 193 | (testfiles / "test.torrent").write_bytes(bencode(torrent_data)) 194 | 195 | store_path = tmp_path.resolve() / "store_path" 196 | configfile.config["autotorrent"]["store_path"] = str(store_path / "{tracker_domain}" / "{torrent_source}" / "{torrent_name}") 197 | configfile.save_config() 198 | 199 | runner = CliRunner() 200 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 201 | assert result.exit_code == 0 202 | result = runner.invoke(cli, ['add', 'testclient', '--store-path-variable', 'torrent_source=other-source', '--store-path-variable', 'unused=something', str(testfiles / "test.torrent")], catch_exceptions=False) 203 | assert result.exit_code == 0 204 | 205 | assert (store_path / "example.com" / "other-source" / "test").exists() 206 | 207 | def test_cli_inaccessible_scan_path(testfiles, indexer, matcher, client, configfile, tmp_path): 208 | inaccessible_test_file = testfiles / "inaccessible" 209 | inaccessible_test_file.mkdir(mode=0o000) 210 | try: 211 | runner = CliRunner() 212 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 213 | assert result.exit_code == 0 214 | finally: 215 | inaccessible_test_file.chmod(0o777) 216 | 217 | def test_cli_add_skip_store_metadata_disabled(testfiles, indexer, matcher, client, configfile, tmp_path): 218 | configfile.config["autotorrent"]["skip_store_metadata"] = False 219 | configfile.save_config() 220 | 221 | runner = CliRunner() 222 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 223 | assert result.exit_code == 0 224 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 225 | assert result.exit_code == 0 226 | action, kwargs = client._action_queue[0] 227 | assert action == "add" 228 | assert kwargs["destination_path"].name == "data" 229 | 230 | 231 | def test_cli_add_skip_store_metadata_enabled(testfiles, indexer, matcher, client, configfile, tmp_path): 232 | configfile.config["autotorrent"]["skip_store_metadata"] = True 233 | configfile.save_config() 234 | 235 | runner = CliRunner() 236 | result = runner.invoke(cli, ['scan', '-p', str(testfiles)], catch_exceptions=False) 237 | assert result.exit_code == 0 238 | result = runner.invoke(cli, ['add', 'testclient', str(testfiles / "test.torrent")], catch_exceptions=False) 239 | assert result.exit_code == 0 240 | action, kwargs = client._action_queue[0] 241 | assert action == "add" 242 | assert kwargs["destination_path"].name != "data" 243 | 244 | 245 | def test_cli_scan_single_file(testfiles, indexer, matcher, client, configfile, tmp_path): 246 | runner = CliRunner() 247 | result = runner.invoke(cli, ['scan', '-p', str(testfiles / 'file_b.txt')], catch_exceptions=False) 248 | assert result.exit_code == 0 249 | -------------------------------------------------------------------------------- /tests/test_rm_clients.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/test_rm_clients.py -------------------------------------------------------------------------------- /tests/test_scan_add_torrents.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from datetime import datetime 3 | from pathlib import Path, PurePosixPath 4 | 5 | import pytest 6 | from libtc import TorrentData, TorrentFile, TorrentState, bdecode 7 | 8 | from .fixtures import * 9 | 10 | 11 | def test_scan_match_exact_client(testfiles, indexer, matcher, client): 12 | indexer.scan_paths([testfiles]) 13 | assert ( 14 | matcher.match_files_exact(bdecode((testfiles / "test.torrent").read_bytes())) 15 | == testfiles.parent 16 | ) 17 | assert ( 18 | matcher.match_files_exact( 19 | bdecode((testfiles / "test_single.torrent").read_bytes()) 20 | ) 21 | == testfiles 22 | ) 23 | assert ( 24 | matcher.match_files_exact( 25 | bdecode((testfiles / "My-Bluray.torrent").read_bytes()) 26 | ) 27 | == testfiles 28 | ) 29 | assert ( 30 | matcher.match_files_exact(bdecode((testfiles / "My-DVD.torrent").read_bytes())) 31 | == testfiles 32 | ) 33 | assert ( 34 | matcher.match_files_exact( 35 | bdecode((testfiles / "Some-CD-Release.torrent").read_bytes()) 36 | ) 37 | == testfiles 38 | ) 39 | assert ( 40 | matcher.match_files_exact( 41 | bdecode((testfiles / "Some-Release.torrent").read_bytes()) 42 | ) 43 | == testfiles 44 | ) 45 | 46 | (testfiles / "file_a.txt").write_bytes(b"not a good match") 47 | 48 | indexer.scan_paths([testfiles]) 49 | 50 | assert ( 51 | matcher.match_files_exact(bdecode((testfiles / "test.torrent").read_bytes())) 52 | == None 53 | ) 54 | assert ( 55 | matcher.match_files_exact( 56 | bdecode((testfiles / "test_single.torrent").read_bytes()) 57 | ) 58 | == None 59 | ) 60 | assert ( 61 | matcher.match_files_exact( 62 | bdecode((testfiles / "My-Bluray.torrent").read_bytes()) 63 | ) 64 | == testfiles 65 | ) 66 | assert ( 67 | matcher.match_files_exact(bdecode((testfiles / "My-DVD.torrent").read_bytes())) 68 | == testfiles 69 | ) 70 | assert ( 71 | matcher.match_files_exact( 72 | bdecode((testfiles / "Some-CD-Release.torrent").read_bytes()) 73 | ) 74 | == testfiles 75 | ) 76 | assert ( 77 | matcher.match_files_exact( 78 | bdecode((testfiles / "Some-Release.torrent").read_bytes()) 79 | ) 80 | == testfiles 81 | ) 82 | 83 | (testfiles / "file_a.txt").unlink() 84 | 85 | indexer.scan_paths([testfiles]) 86 | 87 | assert ( 88 | matcher.match_files_exact(bdecode((testfiles / "test.torrent").read_bytes())) 89 | == None 90 | ) 91 | assert ( 92 | matcher.match_files_exact( 93 | bdecode((testfiles / "test_single.torrent").read_bytes()) 94 | ) 95 | == None 96 | ) 97 | assert ( 98 | matcher.match_files_exact( 99 | bdecode((testfiles / "My-Bluray.torrent").read_bytes()) 100 | ) 101 | == testfiles 102 | ) 103 | assert ( 104 | matcher.match_files_exact(bdecode((testfiles / "My-DVD.torrent").read_bytes())) 105 | == testfiles 106 | ) 107 | assert ( 108 | matcher.match_files_exact( 109 | bdecode((testfiles / "Some-CD-Release.torrent").read_bytes()) 110 | ) 111 | == testfiles 112 | ) 113 | assert ( 114 | matcher.match_files_exact( 115 | bdecode((testfiles / "Some-Release.torrent").read_bytes()) 116 | ) 117 | == testfiles 118 | ) 119 | 120 | 121 | def test_scan_match_dynamic_basic_as_exact(testfiles, indexer, matcher, client): 122 | indexer.scan_paths([testfiles]) 123 | 124 | result = matcher.match_files_dynamic( 125 | bdecode((testfiles / "test.torrent").read_bytes()) 126 | ) 127 | assert result.touched_files == [] 128 | assert result.matched_files == { 129 | PurePosixPath("testfiles/file_a.txt"): testfiles / "file_a.txt", 130 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_b.txt", 131 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 132 | } 133 | 134 | result = matcher.match_files_dynamic( 135 | bdecode((testfiles / "test_single.torrent").read_bytes()) 136 | ) 137 | assert result.touched_files == [] 138 | assert result.matched_files == { 139 | PurePosixPath("file_a.txt"): testfiles / "file_a.txt", 140 | } 141 | 142 | result = matcher.match_files_dynamic( 143 | bdecode((testfiles / "My-Bluray.torrent").read_bytes()) 144 | ) 145 | assert result.touched_files == [] 146 | assert result.matched_files == { 147 | PurePosixPath("My-Bluray/BDMV/BACKUP/MovieObject.bdmv"): testfiles 148 | / "My-Bluray/BDMV/BACKUP/MovieObject.bdmv", 149 | PurePosixPath("My-Bluray/BDMV/BACKUP/PLAYLIST/00000.mpls"): testfiles 150 | / "My-Bluray/BDMV/BACKUP/PLAYLIST/00000.mpls", 151 | PurePosixPath("My-Bluray/BDMV/BACKUP/index.bdmv"): testfiles 152 | / "My-Bluray/BDMV/BACKUP/index.bdmv", 153 | PurePosixPath("My-Bluray/BDMV/MovieObject.bdmv"): testfiles 154 | / "My-Bluray/BDMV/MovieObject.bdmv", 155 | PurePosixPath("My-Bluray/BDMV/PLAYLIST/00000.mpls"): testfiles 156 | / "My-Bluray/BDMV/PLAYLIST/00000.mpls", 157 | PurePosixPath("My-Bluray/BDMV/STREAM/00000.m2ts"): testfiles 158 | / "My-Bluray/BDMV/STREAM/00000.m2ts", 159 | PurePosixPath("My-Bluray/BDMV/index.bdmv"): testfiles 160 | / "My-Bluray/BDMV/index.bdmv", 161 | } 162 | 163 | result = matcher.match_files_dynamic( 164 | bdecode((testfiles / "My-DVD.torrent").read_bytes()) 165 | ) 166 | assert result.touched_files == [] 167 | assert result.matched_files == { 168 | PurePosixPath("My-DVD/VIDEO_TS/VIDEO_TS.BUP"): testfiles 169 | / "My-DVD/VIDEO_TS/VIDEO_TS.BUP", 170 | PurePosixPath("My-DVD/VIDEO_TS/VIDEO_TS.IFO"): testfiles 171 | / "My-DVD/VIDEO_TS/VIDEO_TS.IFO", 172 | PurePosixPath("My-DVD/VIDEO_TS/VTS_01_0.BUP"): testfiles 173 | / "My-DVD/VIDEO_TS/VTS_01_0.BUP", 174 | PurePosixPath("My-DVD/VIDEO_TS/VTS_01_0.IFO"): testfiles 175 | / "My-DVD/VIDEO_TS/VTS_01_0.IFO", 176 | PurePosixPath("My-DVD/VIDEO_TS/VTS_01_0.VOB"): testfiles 177 | / "My-DVD/VIDEO_TS/VTS_01_0.VOB", 178 | PurePosixPath("My-DVD/VIDEO_TS/VTS_01_1.VOB"): testfiles 179 | / "My-DVD/VIDEO_TS/VTS_01_1.VOB", 180 | } 181 | 182 | result = matcher.match_files_dynamic( 183 | bdecode((testfiles / "Some-CD-Release.torrent").read_bytes()) 184 | ) 185 | assert result.touched_files == [] 186 | assert result.matched_files == { 187 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r00"): testfiles 188 | / "Some-CD-Release/CD1/somestuff-1.r00", 189 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r01"): testfiles 190 | / "Some-CD-Release/CD1/somestuff-1.r01", 191 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r02"): testfiles 192 | / "Some-CD-Release/CD1/somestuff-1.r02", 193 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r03"): testfiles 194 | / "Some-CD-Release/CD1/somestuff-1.r03", 195 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r04"): testfiles 196 | / "Some-CD-Release/CD1/somestuff-1.r04", 197 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r05"): testfiles 198 | / "Some-CD-Release/CD1/somestuff-1.r05", 199 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.r06"): testfiles 200 | / "Some-CD-Release/CD1/somestuff-1.r06", 201 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.rar"): testfiles 202 | / "Some-CD-Release/CD1/somestuff-1.rar", 203 | PurePosixPath("Some-CD-Release/CD1/somestuff-1.sfv"): testfiles 204 | / "Some-CD-Release/CD1/somestuff-1.sfv", 205 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r00"): testfiles 206 | / "Some-CD-Release/CD2/somestuff-2.r00", 207 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r01"): testfiles 208 | / "Some-CD-Release/CD2/somestuff-2.r01", 209 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r02"): testfiles 210 | / "Some-CD-Release/CD2/somestuff-2.r02", 211 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r03"): testfiles 212 | / "Some-CD-Release/CD2/somestuff-2.r03", 213 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r04"): testfiles 214 | / "Some-CD-Release/CD2/somestuff-2.r04", 215 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r05"): testfiles 216 | / "Some-CD-Release/CD2/somestuff-2.r05", 217 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r06"): testfiles 218 | / "Some-CD-Release/CD2/somestuff-2.r06", 219 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.r07"): testfiles 220 | / "Some-CD-Release/CD2/somestuff-2.r07", 221 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.rar"): testfiles 222 | / "Some-CD-Release/CD2/somestuff-2.rar", 223 | PurePosixPath("Some-CD-Release/CD2/somestuff-2.sfv"): testfiles 224 | / "Some-CD-Release/CD2/somestuff-2.sfv", 225 | PurePosixPath("Some-CD-Release/Sample/some-rls.mkv"): testfiles 226 | / "Some-CD-Release/Sample/some-rls.mkv", 227 | PurePosixPath("Some-CD-Release/Subs/somestuff-subs.r00"): testfiles 228 | / "Some-CD-Release/Subs/somestuff-subs.r00", 229 | PurePosixPath("Some-CD-Release/Subs/somestuff-subs.rar"): testfiles 230 | / "Some-CD-Release/Subs/somestuff-subs.rar", 231 | PurePosixPath("Some-CD-Release/Subs/somestuff-subs.sfv"): testfiles 232 | / "Some-CD-Release/Subs/somestuff-subs.sfv", 233 | PurePosixPath("Some-CD-Release/crap.nfo"): testfiles 234 | / "Some-CD-Release/crap.nfo", 235 | } 236 | 237 | result = matcher.match_files_dynamic( 238 | bdecode((testfiles / "Some-Release.torrent").read_bytes()) 239 | ) 240 | assert result.touched_files == [] 241 | assert result.matched_files == { 242 | PurePosixPath("Some-Release/Sample/some-rls.mkv"): testfiles 243 | / "Some-Release/Sample/some-rls.mkv", 244 | PurePosixPath("Some-Release/Subs/some-subs.rar"): testfiles 245 | / "Some-Release/Subs/some-subs.rar", 246 | PurePosixPath("Some-Release/Subs/some-subs.sfv"): testfiles 247 | / "Some-Release/Subs/some-subs.sfv", 248 | PurePosixPath("Some-Release/some-rls.sfv"): testfiles 249 | / "Some-Release/some-rls.sfv", 250 | PurePosixPath("Some-Release/some-rls.nfo"): testfiles 251 | / "Some-Release/some-rls.nfo", 252 | PurePosixPath("Some-Release/some-rls.rar"): testfiles 253 | / "Some-Release/some-rls.rar", 254 | PurePosixPath("Some-Release/some-rls.r00"): testfiles 255 | / "Some-Release/some-rls.r00", 256 | PurePosixPath("Some-Release/some-rls.r01"): testfiles 257 | / "Some-Release/some-rls.r01", 258 | PurePosixPath("Some-Release/some-rls.r02"): testfiles 259 | / "Some-Release/some-rls.r02", 260 | PurePosixPath("Some-Release/some-rls.r03"): testfiles 261 | / "Some-Release/some-rls.r03", 262 | PurePosixPath("Some-Release/some-rls.r04"): testfiles 263 | / "Some-Release/some-rls.r04", 264 | PurePosixPath("Some-Release/some-rls.r05"): testfiles 265 | / "Some-Release/some-rls.r05", 266 | PurePosixPath("Some-Release/some-rls.r06"): testfiles 267 | / "Some-Release/some-rls.r06", 268 | } 269 | 270 | 271 | def test_scan_match_dynamic_hash_probe(testfiles, indexer, matcher, client): 272 | indexer.scan_paths([testfiles]) 273 | 274 | result = matcher.match_files_dynamic( 275 | bdecode((testfiles / "test.torrent").read_bytes()), hash_probe=True 276 | ) 277 | assert result.touched_files == [] 278 | assert result.matched_files == { 279 | PurePosixPath("testfiles/file_a.txt"): testfiles / "file_a.txt", 280 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_b.txt", 281 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 282 | } 283 | 284 | with (testfiles / "file_a.txt").open("r+b") as f: 285 | f.seek(0) 286 | f.write(b"2") 287 | 288 | result = matcher.match_files_dynamic( 289 | bdecode((testfiles / "test.torrent").read_bytes()), hash_probe=False 290 | ) 291 | assert result.touched_files == [] 292 | assert result.matched_files == { 293 | PurePosixPath("testfiles/file_a.txt"): testfiles / "file_a.txt", 294 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_b.txt", 295 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 296 | } 297 | 298 | result = matcher.match_files_dynamic( 299 | bdecode((testfiles / "test.torrent").read_bytes()), 300 | add_limit_percent=100, 301 | add_limit_size=9999999, 302 | hash_probe=True, 303 | ) 304 | assert result.touched_files == [PurePosixPath("testfiles/file_b.txt")] 305 | assert result.matched_files == { 306 | PurePosixPath("testfiles/file_a.txt"): None, 307 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_b.txt", 308 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 309 | } 310 | 311 | 312 | def test_scan_match_dynamic_cutoff(testfiles, indexer, matcher, client): 313 | indexer.scan_paths([testfiles]) 314 | 315 | with (testfiles / "file_a.txt").open("r+b") as f: 316 | f.seek(0) 317 | f.write(b"2") 318 | 319 | result = matcher.match_files_dynamic( 320 | bdecode((testfiles / "test.torrent").read_bytes()), 321 | add_limit_percent=5, 322 | add_limit_size=9999999, 323 | hash_probe=True, 324 | ) 325 | assert result.success == False 326 | 327 | result = matcher.match_files_dynamic( 328 | bdecode((testfiles / "test.torrent").read_bytes()), 329 | add_limit_percent=5, 330 | add_limit_size=9999999, 331 | hash_probe=False, 332 | ) 333 | assert result.success == True 334 | 335 | result = matcher.match_files_dynamic( 336 | bdecode((testfiles / "test.torrent").read_bytes()), 337 | add_limit_percent=99, 338 | add_limit_size=9999999, 339 | hash_probe=True, 340 | ) 341 | assert result.success == True 342 | 343 | (testfiles / "file_a.txt").unlink() 344 | 345 | indexer.scan_paths([testfiles]) 346 | 347 | result = matcher.match_files_dynamic( 348 | bdecode((testfiles / "test.torrent").read_bytes()), 349 | add_limit_percent=5, 350 | add_limit_size=9999999, 351 | hash_probe=True, 352 | ) 353 | assert result.success == False 354 | 355 | result = matcher.match_files_dynamic( 356 | bdecode((testfiles / "test.torrent").read_bytes()), 357 | add_limit_percent=5, 358 | add_limit_size=9999999, 359 | hash_probe=False, 360 | ) 361 | assert result.success == False 362 | 363 | result = matcher.match_files_dynamic( 364 | bdecode((testfiles / "test.torrent").read_bytes()), 365 | add_limit_percent=99, 366 | add_limit_size=9999999, 367 | hash_probe=True, 368 | ) 369 | assert result.success == True 370 | 371 | 372 | def test_scan_match_dynamic_match_hash_size(testfiles, indexer, matcher, client): 373 | indexer.scan_paths([testfiles]) 374 | 375 | result = matcher.match_files_dynamic( 376 | bdecode((testfiles / "test.torrent").read_bytes()), 377 | match_hash_size=True, 378 | add_limit_percent=50, 379 | add_limit_size=9999999, 380 | ) 381 | assert sorted(result.touched_files) == sorted( 382 | [PurePosixPath("testfiles/file_a.txt"), PurePosixPath("testfiles/file_c.txt")] 383 | ) 384 | assert result.matched_files == { 385 | PurePosixPath("testfiles/file_a.txt"): testfiles / "file_a.txt", 386 | PurePosixPath("testfiles/file_b.txt"): None, 387 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 388 | } 389 | 390 | (testfiles / "file_a.txt").rename(testfiles / "secret_1.txt") 391 | (testfiles / "file_b.txt").rename(testfiles / "secret_2.txt") 392 | (testfiles / "file_c.txt").rename(testfiles / "secret_3.txt") 393 | 394 | indexer.scan_paths([testfiles]) 395 | 396 | result = matcher.match_files_dynamic( 397 | bdecode((testfiles / "test.torrent").read_bytes()), 398 | match_hash_size=True, 399 | add_limit_percent=50, 400 | add_limit_size=9999999, 401 | ) 402 | assert sorted(result.touched_files) == sorted( 403 | [PurePosixPath("testfiles/file_a.txt"), PurePosixPath("testfiles/file_c.txt")] 404 | ) 405 | assert result.matched_files == { 406 | PurePosixPath("testfiles/file_a.txt"): testfiles / "secret_1.txt", 407 | PurePosixPath("testfiles/file_b.txt"): None, 408 | PurePosixPath("testfiles/file_c.txt"): testfiles / "secret_3.txt", 409 | } 410 | 411 | 412 | def test_scan_match_dynamic_mixed_unsplitable_normal( 413 | testfiles, indexer, matcher, client 414 | ): 415 | (testfiles / "file_a.txt").rename(testfiles / "FILE A.txt") 416 | (testfiles / "file_b.txt").rename(testfiles / "file_B.txt") 417 | (testfiles / "file_c.txt").rename(testfiles / "filE-c.txt") 418 | 419 | indexer.scan_paths([testfiles]) 420 | 421 | result = matcher.match_files_dynamic( 422 | bdecode((testfiles / "test-unsplitable-normal.torrent").read_bytes()) 423 | ) 424 | assert result.touched_files == [] 425 | assert result.matched_files == { 426 | PurePosixPath("testfiles/file_a.txt"): testfiles / "FILE A.txt", 427 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_B.txt", 428 | PurePosixPath("testfiles/file_c.txt"): testfiles / "filE-c.txt", 429 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r00"): testfiles 430 | / "Some-CD-Release/CD1/somestuff-1.r00", 431 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r01"): testfiles 432 | / "Some-CD-Release/CD1/somestuff-1.r01", 433 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r02"): testfiles 434 | / "Some-CD-Release/CD1/somestuff-1.r02", 435 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r03"): testfiles 436 | / "Some-CD-Release/CD1/somestuff-1.r03", 437 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r04"): testfiles 438 | / "Some-CD-Release/CD1/somestuff-1.r04", 439 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r05"): testfiles 440 | / "Some-CD-Release/CD1/somestuff-1.r05", 441 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.r06"): testfiles 442 | / "Some-CD-Release/CD1/somestuff-1.r06", 443 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.rar"): testfiles 444 | / "Some-CD-Release/CD1/somestuff-1.rar", 445 | PurePosixPath("testfiles/Some-CD-Release/CD1/somestuff-1.sfv"): testfiles 446 | / "Some-CD-Release/CD1/somestuff-1.sfv", 447 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r00"): testfiles 448 | / "Some-CD-Release/CD2/somestuff-2.r00", 449 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r01"): testfiles 450 | / "Some-CD-Release/CD2/somestuff-2.r01", 451 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r02"): testfiles 452 | / "Some-CD-Release/CD2/somestuff-2.r02", 453 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r03"): testfiles 454 | / "Some-CD-Release/CD2/somestuff-2.r03", 455 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r04"): testfiles 456 | / "Some-CD-Release/CD2/somestuff-2.r04", 457 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r05"): testfiles 458 | / "Some-CD-Release/CD2/somestuff-2.r05", 459 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r06"): testfiles 460 | / "Some-CD-Release/CD2/somestuff-2.r06", 461 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.r07"): testfiles 462 | / "Some-CD-Release/CD2/somestuff-2.r07", 463 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.rar"): testfiles 464 | / "Some-CD-Release/CD2/somestuff-2.rar", 465 | PurePosixPath("testfiles/Some-CD-Release/CD2/somestuff-2.sfv"): testfiles 466 | / "Some-CD-Release/CD2/somestuff-2.sfv", 467 | PurePosixPath("testfiles/Some-CD-Release/Sample/some-rls.mkv"): testfiles 468 | / "Some-CD-Release/Sample/some-rls.mkv", 469 | PurePosixPath("testfiles/Some-CD-Release/Subs/somestuff-subs.r00"): testfiles 470 | / "Some-CD-Release/Subs/somestuff-subs.r00", 471 | PurePosixPath("testfiles/Some-CD-Release/Subs/somestuff-subs.rar"): testfiles 472 | / "Some-CD-Release/Subs/somestuff-subs.rar", 473 | PurePosixPath("testfiles/Some-CD-Release/Subs/somestuff-subs.sfv"): testfiles 474 | / "Some-CD-Release/Subs/somestuff-subs.sfv", 475 | PurePosixPath("testfiles/Some-CD-Release/crap.nfo"): testfiles 476 | / "Some-CD-Release/crap.nfo", 477 | } 478 | 479 | 480 | def test_scan_match_dynamic_normalized(testfiles, indexer, matcher, client): 481 | (testfiles / "file_a.txt").rename(testfiles / "FILE A.txt") 482 | (testfiles / "file_b.txt").rename(testfiles / "file_B.txt") 483 | (testfiles / "file_c.txt").rename(testfiles / "filE-c.txt") 484 | 485 | indexer.scan_paths([testfiles]) 486 | 487 | result = matcher.match_files_dynamic( 488 | bdecode((testfiles / "test.torrent").read_bytes()) 489 | ) 490 | assert result.touched_files == [] 491 | assert result.matched_files == { 492 | PurePosixPath("testfiles/file_a.txt"): testfiles / "FILE A.txt", 493 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_B.txt", 494 | PurePosixPath("testfiles/file_c.txt"): testfiles / "filE-c.txt", 495 | } 496 | 497 | 498 | def test_scan_match_partial_scan(testfiles, indexer, matcher, client): 499 | (testfiles / "file_a.txt").rename(testfiles / "wrong.txt") 500 | 501 | indexer.scan_paths([testfiles]) 502 | (testfiles / "wrong.txt").rename(testfiles / "file_a.txt") 503 | indexer.scan_paths([testfiles], full_scan=False) 504 | 505 | result = matcher.match_files_dynamic( 506 | bdecode((testfiles / "test.torrent").read_bytes()) 507 | ) 508 | assert result.touched_files == [] 509 | assert result.matched_files == { 510 | PurePosixPath("testfiles/file_a.txt"): testfiles / "file_a.txt", 511 | PurePosixPath("testfiles/file_b.txt"): testfiles / "file_b.txt", 512 | PurePosixPath("testfiles/file_c.txt"): testfiles / "file_c.txt", 513 | } 514 | 515 | 516 | def test_scan_invalid_encoding(testfiles, indexer, matcher, client): 517 | """Should just not error out when hitting non-utf-8""" 518 | with open( 519 | str(testfiles).encode() + b"/" + b"\x82\xa9\x82\xed\x82\xa2\x82\xa2\x94n", "wb" 520 | ) as f: # Shift JIS 521 | f.write(b"cute horse") 522 | 523 | indexer.scan_paths([testfiles]) 524 | indexer.db.utf8_compat_mode = True 525 | indexer.scan_paths([testfiles]) 526 | 527 | 528 | def test_scan_different_folder_in_torrent_unsplitable(testfiles, indexer, matcher, client): 529 | indexer.scan_paths([testfiles]) 530 | 531 | result = matcher.match_files_dynamic( 532 | bdecode((testfiles / "Some-Release [test].torrent").read_bytes()), 533 | add_limit_size=100, 534 | add_limit_percent=10, 535 | ) 536 | assert result.matched_files == { 537 | PurePosixPath("Some-Release [test]/Sample/some-rls.mkv"): testfiles 538 | / "Some-Release/Sample/some-rls.mkv", 539 | PurePosixPath("Some-Release [test]/Subs/some-subs.rar"): testfiles 540 | / "Some-Release/Subs/some-subs.rar", 541 | PurePosixPath("Some-Release [test]/Subs/some-subs.sfv"): testfiles 542 | / "Some-Release/Subs/some-subs.sfv", 543 | PurePosixPath("Some-Release [test]/some-rls.sfv"): testfiles 544 | / "Some-Release/some-rls.sfv", 545 | PurePosixPath("Some-Release [test]/some-rls.nfo"): testfiles 546 | / "Some-Release/some-rls.nfo", 547 | PurePosixPath("Some-Release [test]/some-rls.rar"): testfiles 548 | / "Some-Release/some-rls.rar", 549 | PurePosixPath("Some-Release [test]/some-rls.r00"): testfiles 550 | / "Some-Release/some-rls.r00", 551 | PurePosixPath("Some-Release [test]/some-rls.r01"): testfiles 552 | / "Some-Release/some-rls.r01", 553 | PurePosixPath("Some-Release [test]/some-rls.r02"): testfiles 554 | / "Some-Release/some-rls.r02", 555 | PurePosixPath("Some-Release [test]/some-rls.r03"): testfiles 556 | / "Some-Release/some-rls.r03", 557 | PurePosixPath("Some-Release [test]/some-rls.r04"): testfiles 558 | / "Some-Release/some-rls.r04", 559 | PurePosixPath("Some-Release [test]/some-rls.r05"): testfiles 560 | / "Some-Release/some-rls.r05", 561 | PurePosixPath("Some-Release [test]/some-rls.r06"): testfiles 562 | / "Some-Release/some-rls.r06", 563 | } 564 | 565 | 566 | def test_scan_different_folder_in_torrent_missing_files_should_be_there_unsplitable(testfiles, indexer, matcher, client): 567 | (testfiles / "Some-Release" / "some-rls.r03").unlink() 568 | indexer.scan_paths([testfiles]) 569 | 570 | result = matcher.match_files_dynamic( 571 | bdecode((testfiles / "Some-Release [test].torrent").read_bytes()), 572 | add_limit_size=100, 573 | add_limit_percent=10, 574 | ) 575 | assert result.matched_files is None 576 | 577 | 578 | def test_scan_different_folder_in_torrent_missing_files_can_not_be_there_unsplitable(testfiles, indexer, matcher, client): 579 | (testfiles / "Some-Release" / "some-rls.nfo").unlink() 580 | indexer.scan_paths([testfiles]) 581 | 582 | result = matcher.match_files_dynamic( 583 | bdecode((testfiles / "Some-Release [test].torrent").read_bytes()), 584 | add_limit_size=100, 585 | add_limit_percent=10, 586 | ) 587 | assert result.matched_files is not None 588 | 589 | 590 | def test_scan_extra_folder_in_torrent_unsplitable(testfiles, indexer, matcher, client): 591 | indexer.scan_paths([testfiles]) 592 | 593 | result = matcher.match_files_dynamic( 594 | bdecode((testfiles / "folder-does-not-exist.torrent").read_bytes()), 595 | add_limit_size=100, 596 | add_limit_percent=10, 597 | ) 598 | assert result.matched_files == { 599 | PurePosixPath("folder-does-not-exist/Some-Release/Sample/some-rls.mkv"): testfiles 600 | / "Some-Release/Sample/some-rls.mkv", 601 | PurePosixPath("folder-does-not-exist/Some-Release/Subs/some-subs.rar"): testfiles 602 | / "Some-Release/Subs/some-subs.rar", 603 | PurePosixPath("folder-does-not-exist/Some-Release/Subs/some-subs.sfv"): testfiles 604 | / "Some-Release/Subs/some-subs.sfv", 605 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.sfv"): testfiles 606 | / "Some-Release/some-rls.sfv", 607 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.nfo"): testfiles 608 | / "Some-Release/some-rls.nfo", 609 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.rar"): testfiles 610 | / "Some-Release/some-rls.rar", 611 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r00"): testfiles 612 | / "Some-Release/some-rls.r00", 613 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r01"): testfiles 614 | / "Some-Release/some-rls.r01", 615 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r02"): testfiles 616 | / "Some-Release/some-rls.r02", 617 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r03"): testfiles 618 | / "Some-Release/some-rls.r03", 619 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r04"): testfiles 620 | / "Some-Release/some-rls.r04", 621 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r05"): testfiles 622 | / "Some-Release/some-rls.r05", 623 | PurePosixPath("folder-does-not-exist/Some-Release/some-rls.r06"): testfiles 624 | / "Some-Release/some-rls.r06", 625 | } 626 | 627 | def test_scan_ignore_patterns(testfiles, indexer, matcher, client): 628 | indexer.ignore_file_patterns = ["*.txt"] 629 | indexer.scan_paths([testfiles]) 630 | assert ( 631 | matcher.match_files_exact(bdecode((testfiles / "test.torrent").read_bytes())) 632 | is None 633 | ) 634 | -------------------------------------------------------------------------------- /tests/test_scan_ls_clients.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from pathlib import Path 3 | 4 | import pytest 5 | from libtc import TorrentData, TorrentFile, TorrentState 6 | 7 | from .fixtures import * 8 | from .fixtures import client as client2 9 | from .fixtures import client as client3 10 | 11 | 12 | def test_scan_ls_client(tmp_path, indexer, matcher, client): 13 | infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709" 14 | name = "test torrent 1" 15 | download_path = tmp_path / "test torrent 1" 16 | files = [] 17 | size = 0 18 | seeded_files = [("file1", 400, True), ("file2", 600, True), ("file3", 100, False)] 19 | for fn, fsize, add_to_files in seeded_files: 20 | fp = download_path / Path(fn) 21 | fp.parent.mkdir(parents=True, exist_ok=True) 22 | fp.write_bytes(b"a" * fsize) 23 | if add_to_files: 24 | size += fsize 25 | files.append(TorrentFile(fn, fsize, 100)) 26 | 27 | client._inject_torrent( 28 | TorrentData( 29 | infohash, 30 | name, 31 | size, 32 | TorrentState.ACTIVE, 33 | 100, 34 | 1000, 35 | datetime(2020, 1, 1, 1, 1), 36 | "example.com", 37 | 0, 38 | 0, 39 | None, 40 | ), 41 | files, 42 | download_path, 43 | ) 44 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False) 45 | 46 | not_seeded_file = tmp_path / "not_seeded.bin" 47 | not_seeded_file.write_bytes(b"a" * 60) 48 | 49 | map_result = matcher.map_path_to_clients(download_path) 50 | assert map_result.total_size == 1100 51 | assert map_result.seeded_size == 1000 52 | assert len(map_result.files) == 3 53 | 54 | expected_files = {sf[0]: sf[1:] for sf in seeded_files} 55 | for f, mf in map_result.files.items(): 56 | expected_file = expected_files.pop(f.name) 57 | assert expected_file[0] == mf.size 58 | if expected_file[1]: 59 | assert len(mf.clients) == 1 60 | assert mf.clients[0][0] == "test_client" 61 | assert mf.clients[0][1] == infohash 62 | else: 63 | assert not mf.clients 64 | 65 | 66 | def test_new_download_path_fast_scan(tmp_path, indexer, matcher, client): 67 | infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709" 68 | name = "test torrent 1" 69 | download_path = tmp_path / "test torrent 1" 70 | files = [] 71 | size = 0 72 | seeded_files = [("file1", 400, True), ("file2", 600, True)] 73 | for fn, fsize, add_to_files in seeded_files: 74 | fp = download_path / Path(fn) 75 | fp.parent.mkdir(parents=True, exist_ok=True) 76 | fp.write_bytes(b"a" * fsize) 77 | if add_to_files: 78 | size += fsize 79 | files.append(TorrentFile(fn, fsize, 100)) 80 | 81 | client._inject_torrent( 82 | TorrentData( 83 | infohash, 84 | name, 85 | size, 86 | TorrentState.ACTIVE, 87 | 100, 88 | 1000, 89 | datetime(2020, 1, 1, 1, 1), 90 | "example.com", 91 | 0, 92 | 0, 93 | None, 94 | ), 95 | files, 96 | download_path, 97 | ) 98 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=True) 99 | 100 | map_result = matcher.map_path_to_clients(download_path) 101 | assert map_result.seeded_size == 1000 102 | 103 | new_download_path = tmp_path / "test torrent 2" 104 | download_path.rename(new_download_path) 105 | download_path.mkdir() 106 | 107 | map_result = matcher.map_path_to_clients(download_path) 108 | assert map_result.seeded_size == 0 109 | 110 | map_result = matcher.map_path_to_clients(new_download_path) 111 | assert map_result.seeded_size == 0 112 | assert map_result.total_size == 1000 113 | 114 | map_result = matcher.map_path_to_clients(new_download_path) 115 | assert map_result.seeded_size == 0 116 | assert map_result.total_size == 1000 117 | 118 | client._inject_torrent( 119 | TorrentData( 120 | infohash, 121 | name, 122 | size, 123 | TorrentState.ACTIVE, 124 | 100, 125 | 1000, 126 | datetime(2020, 1, 1, 1, 1), 127 | "example.com", 128 | 0, 129 | 0, 130 | None, 131 | ), 132 | files, 133 | new_download_path, 134 | ) 135 | 136 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=True) 137 | 138 | map_result = matcher.map_path_to_clients(new_download_path) 139 | assert map_result.seeded_size == 0 140 | assert map_result.total_size == 1000 141 | 142 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False) 143 | 144 | map_result = matcher.map_path_to_clients(new_download_path) 145 | assert map_result.seeded_size == 1000 146 | assert map_result.total_size == 1000 147 | 148 | 149 | def test_multiple_clients(tmp_path, indexer, matcher, client, client2): 150 | infohash_1 = "1a39a3ee5e6b4b0d3255bfef95601890afd80709" 151 | infohash_2 = "2a39a3ee5e6b4b0d3255bfef95601890afd80709" 152 | infohash_3_1 = "3a39a3ee5e6b4b0d3255bfef95601890afd80709" 153 | infohash_3_2 = "3b39a3ee5e6b4b0d3255bfef95601890afd80709" 154 | files_1, files_2, files_3 = [], [], [] 155 | name_1 = "test torrent 1" 156 | name_2 = "test torrent 2" 157 | name_3 = "test torrent 3" 158 | download_path_1 = tmp_path / "torrent 1" 159 | download_path_2 = tmp_path / "torrent 2" 160 | download_path_3 = tmp_path / "torrent 3" 161 | 162 | download_path_1.mkdir() 163 | file_1 = download_path_1 / "file 1.bin" 164 | file_1.write_bytes(b"a" * 100) 165 | client._inject_torrent( 166 | TorrentData( 167 | infohash_1, 168 | name_2, 169 | 100, 170 | TorrentState.ACTIVE, 171 | 100, 172 | 1000, 173 | datetime(2020, 1, 1, 1, 1), 174 | "example.com", 175 | 0, 176 | 0, 177 | None, 178 | ), 179 | [TorrentFile("file 1.bin", 100, 100)], 180 | download_path_1, 181 | ) 182 | 183 | download_path_2.mkdir() 184 | file_2 = download_path_2 / "file 1.bin" 185 | file_2.write_bytes(b"a" * 200) 186 | client2._inject_torrent( 187 | TorrentData( 188 | infohash_2, 189 | name_2, 190 | 200, 191 | TorrentState.ACTIVE, 192 | 100, 193 | 1000, 194 | datetime(2020, 1, 1, 1, 1), 195 | "example.com", 196 | 0, 197 | 0, 198 | None, 199 | ), 200 | [TorrentFile("file 1.bin", 200, 100)], 201 | download_path_2, 202 | ) 203 | 204 | download_path_3.mkdir() 205 | file_3 = download_path_3 / "file 1.bin" 206 | file_3.write_bytes(b"a" * 300) 207 | client._inject_torrent( 208 | TorrentData( 209 | infohash_3_1, 210 | name_3, 211 | 300, 212 | TorrentState.ACTIVE, 213 | 100, 214 | 1000, 215 | datetime(2020, 1, 1, 1, 1), 216 | "example.com", 217 | 0, 218 | 0, 219 | None, 220 | ), 221 | [TorrentFile("file 1.bin", 300, 100)], 222 | download_path_3, 223 | ) 224 | client2._inject_torrent( 225 | TorrentData( 226 | infohash_3_2, 227 | name_3, 228 | 300, 229 | TorrentState.ACTIVE, 230 | 100, 231 | 1000, 232 | datetime(2020, 1, 1, 1, 1), 233 | "example.com", 234 | 0, 235 | 0, 236 | None, 237 | ), 238 | [TorrentFile("file 1.bin", 300, 100)], 239 | download_path_3, 240 | ) 241 | 242 | clients = { 243 | "client1": client, 244 | "client2": client2, 245 | } 246 | indexer.scan_clients(clients) 247 | indexer.scan_clients({"client1": client}, full_scan=True) 248 | 249 | map_result = matcher.map_path_to_clients(download_path_1) 250 | assert map_result.seeded_size == 100 251 | assert len(map_result.files) == 1 252 | assert list(map_result.files.keys())[0].name == "file 1.bin" 253 | assert len(list(map_result.files.values())[0].clients) == 1 254 | assert list(map_result.files.values())[0].clients[0] == ("client1", infohash_1) 255 | 256 | map_result = matcher.map_path_to_clients(download_path_2) 257 | assert map_result.seeded_size == 200 258 | assert len(map_result.files) == 1 259 | assert list(map_result.files.keys())[0].name == "file 1.bin" 260 | assert len(list(map_result.files.values())[0].clients) == 1 261 | assert list(map_result.files.values())[0].clients[0] == ("client2", infohash_2) 262 | 263 | map_result = matcher.map_path_to_clients(download_path_3) 264 | assert map_result.seeded_size == 300 265 | assert len(map_result.files) == 1 266 | assert list(map_result.files.keys())[0].name == "file 1.bin" 267 | assert len(list(map_result.files.values())[0].clients) == 2 268 | assert sorted(list(map_result.files.values())[0].clients) == sorted( 269 | [("client1", infohash_3_1), ("client2", infohash_3_2)] 270 | ) 271 | 272 | 273 | @pytest.mark.parametrize("include_inodes", [False, True]) 274 | def test_symlink(tmp_path, indexer, matcher, client, client2, client3, include_inodes): 275 | indexer.include_inodes = include_inodes 276 | matcher.include_inodes = include_inodes 277 | infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709" 278 | name = "test torrent 1" 279 | download_path = tmp_path / "test torrent 1" 280 | files = [] 281 | size = 0 282 | seeded_files = [("file1", 400, True), ("file2", 600, True)] 283 | for fn, fsize, add_to_files in seeded_files: 284 | fp = download_path / Path(fn) 285 | fp.parent.mkdir(parents=True, exist_ok=True) 286 | fp.write_bytes(b"a" * fsize) 287 | if add_to_files: 288 | size += fsize 289 | files.append(TorrentFile(fn, fsize, 100)) 290 | 291 | client._inject_torrent( 292 | TorrentData( 293 | infohash, 294 | name, 295 | size, 296 | TorrentState.ACTIVE, 297 | 100, 298 | 1000, 299 | datetime(2020, 1, 1, 1, 1), 300 | "example.com", 301 | 0, 302 | 0, 303 | None, 304 | ), 305 | files, 306 | download_path, 307 | ) 308 | 309 | download_path_symlink = tmp_path / "test torrent 2" 310 | download_path_symlink.symlink_to(download_path) 311 | 312 | download_path_symlink_files = tmp_path / "test torrent 3" 313 | download_path_symlink_files.mkdir() 314 | (download_path_symlink_files / "file1").symlink_to(download_path_symlink / "file1") 315 | (download_path_symlink_files / "file2").symlink_to(download_path_symlink / "file2") 316 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False) 317 | 318 | map_result = matcher.map_path_to_clients(download_path) 319 | assert map_result.seeded_size == 1000 320 | assert len(map_result.files) == 2 321 | for mf in map_result.files.values(): 322 | assert len(mf.clients) == 1 323 | 324 | map_result = matcher.map_path_to_clients(download_path_symlink) 325 | assert map_result.seeded_size == 0 326 | assert map_result.total_size == 1000 327 | assert len(map_result.files) == 2 328 | for f, mf in map_result.files.items(): 329 | assert len(mf.clients) == 0 330 | 331 | map_result = matcher.map_path_to_clients(download_path_symlink_files) 332 | assert map_result.seeded_size == 0 333 | assert map_result.total_size == 1000 334 | assert len(map_result.files) == 2 335 | for f, mf in map_result.files.items(): 336 | assert len(mf.clients) == 0 337 | 338 | client2._inject_torrent( 339 | TorrentData( 340 | infohash, 341 | name, 342 | size, 343 | TorrentState.ACTIVE, 344 | 100, 345 | 1000, 346 | datetime(2020, 1, 1, 1, 1), 347 | "example.com", 348 | 0, 349 | 0, 350 | None, 351 | ), 352 | files, 353 | download_path_symlink, 354 | ) 355 | 356 | client3._inject_torrent( 357 | TorrentData( 358 | infohash, 359 | name, 360 | size, 361 | TorrentState.ACTIVE, 362 | 100, 363 | 1000, 364 | datetime(2020, 1, 1, 1, 1), 365 | "example.com", 366 | 0, 367 | 0, 368 | None, 369 | ), 370 | files, 371 | download_path_symlink_files, 372 | ) 373 | 374 | indexer.scan_clients({"test_client2": client2, "test_client3": client3}, full_scan=False, fast_scan=False) 375 | 376 | map_result = matcher.map_path_to_clients(download_path) 377 | assert map_result.seeded_size == 1000 378 | assert len(map_result.files) == 2 379 | for mf in map_result.files.values(): 380 | assert sorted(mf.clients) == sorted( 381 | [ 382 | ("test_client", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 383 | ("test_client2", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 384 | ("test_client3", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 385 | ] 386 | ) 387 | assert len(mf.clients) == 3 388 | 389 | map_result = matcher.map_path_to_clients(download_path_symlink) 390 | assert map_result.seeded_size == 1000 391 | assert map_result.total_size == 1000 392 | assert len(map_result.files) == 2 393 | for f, mf in map_result.files.items(): 394 | assert len(mf.clients) == 1 395 | 396 | map_result = matcher.map_path_to_clients(download_path_symlink_files) 397 | assert map_result.seeded_size == 1000 398 | assert map_result.total_size == 1000 399 | assert len(map_result.files) == 2 400 | for f, mf in map_result.files.items(): 401 | assert len(mf.clients) == 1 402 | 403 | 404 | def test_rewrite(tmp_path, rewriter, indexer, matcher, client): 405 | infohash_1 = "1a39a3ee5e6b4b0d3255bfef95601890afd80709" 406 | infohash_2 = "2a39a3ee5e6b4b0d3255bfef95601890afd80709" 407 | name = "test torrent" 408 | 409 | download_path_1 = tmp_path / "test torrent 1" 410 | download_path_1.mkdir() 411 | (download_path_1 / "file1").write_bytes(b"a" * 100) 412 | 413 | download_path_2 = tmp_path / "test torrent 2" 414 | download_path_2.mkdir() 415 | (download_path_2 / "file1").write_bytes(b"a" * 100) 416 | 417 | download_path_3 = tmp_path / "test torrent 3" 418 | download_path_3_subpath = download_path_3 / "deeppath" 419 | download_path_3_subpath.mkdir(parents=True) 420 | (download_path_3_subpath / "file1").write_bytes(b"a" * 100) 421 | 422 | download_path_4 = tmp_path / "test torrent 4" 423 | download_path_4_subpath = download_path_4 / "deeppath" 424 | download_path_4_subpath.mkdir(parents=True) 425 | (download_path_4_subpath / "file1").write_bytes(b"a" * 100) 426 | 427 | client._inject_torrent( 428 | TorrentData( 429 | infohash_1, 430 | name, 431 | 100, 432 | TorrentState.ACTIVE, 433 | 100, 434 | 1000, 435 | datetime(2020, 1, 1, 1, 1), 436 | "example.com", 437 | 0, 438 | 0, 439 | None, 440 | ), 441 | [TorrentFile("file1", 100, 100)], 442 | download_path_1, 443 | ) 444 | 445 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False) 446 | 447 | map_result = matcher.map_path_to_clients(download_path_1) 448 | assert map_result.seeded_size == 100 449 | assert len(map_result.files) == 1 450 | for mf in map_result.files.values(): 451 | assert len(mf.clients) == 1 452 | 453 | map_result = matcher.map_path_to_clients(download_path_2) 454 | assert map_result.seeded_size == 0 455 | assert len(map_result.files) == 1 456 | for mf in map_result.files.values(): 457 | assert len(mf.clients) == 0 458 | 459 | rewriter.handle_path_mappings( 460 | [ 461 | [str(download_path_1), str(download_path_2)], 462 | [str(download_path_3), str(download_path_4)], 463 | ] 464 | ) 465 | 466 | map_result = matcher.map_path_to_clients(download_path_2) 467 | assert map_result.seeded_size == 100 468 | assert len(map_result.files) == 2 469 | found_zero, found_one = False, False 470 | for mf in map_result.files.values(): 471 | if len(mf.clients) == 1: 472 | found_one = True 473 | if len(mf.clients) == 0: 474 | found_zero = True 475 | assert found_one 476 | assert found_zero 477 | 478 | map_result = matcher.map_path_to_clients(download_path_3) 479 | assert map_result.seeded_size == 0 480 | assert len(map_result.files) == 2 481 | for mf in map_result.files.values(): 482 | assert len(mf.clients) == 0 483 | 484 | 485 | @pytest.mark.parametrize("include_inodes", [False, True]) 486 | def test_hardlink(tmp_path, indexer, matcher, client, client2, client3, include_inodes): 487 | indexer.include_inodes = include_inodes 488 | matcher.include_inodes = include_inodes 489 | infohash = "da39a3ee5e6b4b0d3255bfef95601890afd80709" 490 | name = "test torrent 1" 491 | download_path = tmp_path / "test torrent 1" 492 | files = [] 493 | size = 0 494 | seeded_files = [("file1", 400, True), ("file2", 600, True)] 495 | for fn, fsize, add_to_files in seeded_files: 496 | fp = download_path / Path(fn) 497 | fp.parent.mkdir(parents=True, exist_ok=True) 498 | fp.write_bytes(b"a" * fsize) 499 | if add_to_files: 500 | size += fsize 501 | files.append(TorrentFile(fn, fsize, 100)) 502 | 503 | client._inject_torrent( 504 | TorrentData( 505 | infohash, 506 | name, 507 | size, 508 | TorrentState.ACTIVE, 509 | 100, 510 | 1000, 511 | datetime(2020, 1, 1, 1, 1), 512 | "example.com", 513 | 0, 514 | 0, 515 | None, 516 | ), 517 | files, 518 | download_path, 519 | ) 520 | 521 | download_path_symlink = tmp_path / "test torrent 2" 522 | download_path_symlink.symlink_to(download_path) 523 | 524 | download_path_hardlink_files = tmp_path / "test torrent 3" 525 | download_path_hardlink_files.mkdir() 526 | (download_path_hardlink_files / "file1").hardlink_to(download_path_symlink / "file1") 527 | (download_path_hardlink_files / "file2").hardlink_to(download_path_symlink / "file2") 528 | indexer.scan_clients({"test_client": client}, full_scan=False, fast_scan=False) 529 | 530 | map_result = matcher.map_path_to_clients(download_path) 531 | assert map_result.seeded_size == 1000 532 | assert len(map_result.files) == 2 533 | for mf in map_result.files.values(): 534 | assert len(mf.clients) == 1 535 | 536 | map_result = matcher.map_path_to_clients(download_path_symlink) 537 | assert map_result.seeded_size == 0 538 | assert map_result.total_size == 1000 539 | assert len(map_result.files) == 2 540 | for f, mf in map_result.files.items(): 541 | assert len(mf.clients) == 0 542 | 543 | map_result = matcher.map_path_to_clients(download_path_hardlink_files) 544 | assert map_result.seeded_size == 0 545 | assert map_result.total_size == 1000 546 | assert len(map_result.files) == 2 547 | for f, mf in map_result.files.items(): 548 | assert len(mf.clients) == 0 549 | 550 | client2._inject_torrent( 551 | TorrentData( 552 | infohash, 553 | name, 554 | size, 555 | TorrentState.ACTIVE, 556 | 100, 557 | 1000, 558 | datetime(2020, 1, 1, 1, 1), 559 | "example.com", 560 | 0, 561 | 0, 562 | None, 563 | ), 564 | files, 565 | download_path_symlink, 566 | ) 567 | 568 | indexer.scan_clients({"test_client2": client2, "test_client3": client3}, full_scan=False, fast_scan=False) 569 | 570 | map_result = matcher.map_path_to_clients(download_path) 571 | assert map_result.seeded_size == 1000 572 | assert len(map_result.files) == 2 573 | for mf in map_result.files.values(): 574 | assert len(mf.clients) == 2 575 | assert sorted(mf.clients) == sorted( 576 | [ 577 | ("test_client", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 578 | ("test_client2", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 579 | ] 580 | ) 581 | if include_inodes: 582 | assert sorted(mf.indirect_clients) == sorted( 583 | [ 584 | ("test_client2", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), 585 | ] 586 | ) 587 | assert len(mf.indirect_clients) == 1 588 | else: 589 | assert len(mf.indirect_clients) == 0 590 | 591 | 592 | map_result = matcher.map_path_to_clients(download_path_symlink) 593 | assert map_result.seeded_size == 1000 594 | assert map_result.total_size == 1000 595 | assert len(map_result.files) == 2 596 | for f, mf in map_result.files.items(): 597 | assert len(mf.clients) == 1 598 | 599 | map_result = matcher.map_path_to_clients(download_path_hardlink_files) 600 | assert map_result.seeded_size == 0 601 | if include_inodes: 602 | assert map_result.indirect_seeded_size == 1000 603 | else: 604 | assert map_result.indirect_seeded_size == 0 605 | assert map_result.total_size == 1000 606 | assert len(map_result.files) == 2 607 | 608 | client3._inject_torrent( 609 | TorrentData( 610 | infohash, 611 | name, 612 | size, 613 | TorrentState.ACTIVE, 614 | 100, 615 | 1000, 616 | datetime(2020, 1, 1, 1, 1), 617 | "example.com", 618 | 0, 619 | 0, 620 | None, 621 | ), 622 | files, 623 | download_path_hardlink_files, 624 | ) 625 | 626 | indexer.scan_clients({"test_client3": client3}, full_scan=False, fast_scan=False) 627 | 628 | map_result = matcher.map_path_to_clients(download_path_hardlink_files) 629 | assert map_result.seeded_size == 1000 630 | assert map_result.total_size == 1000 631 | assert len(map_result.files) == 2 632 | for f, mf in map_result.files.items(): 633 | assert len(mf.clients) == 1 -------------------------------------------------------------------------------- /tests/test_utils_hash.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from datetime import datetime 3 | from pathlib import Path, PurePosixPath 4 | 5 | import pytest 6 | from libtc import TorrentData, TorrentFile, TorrentState, bdecode 7 | 8 | from .fixtures import * 9 | from autotorrent.utils import parse_torrent 10 | 11 | 12 | def test_verify_hash_all_files_success(testfiles, indexer, matcher, client): 13 | torrent = parse_torrent(bdecode((testfiles / "test.torrent").read_bytes())) 14 | hash_result, touch_result = torrent.verify_hash(['*'], { 15 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 16 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 17 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 18 | }) 19 | assert len(hash_result) == 3 20 | assert set(hash_result.values()) == {'hash-success'} 21 | assert not touch_result 22 | 23 | 24 | def test_verify_hash_missing_files(testfiles, indexer, matcher, client): 25 | torrent = parse_torrent(bdecode((testfiles / "test.torrent").read_bytes())) 26 | hash_result, touch_result = torrent.verify_hash(['*'], { 27 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 28 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 29 | PurePosixPath('testfiles/file_c.txt'): None, 30 | }) 31 | assert {k.path: v for (k, v) in hash_result.items()} == { 32 | PurePosixPath('testfiles/file_a.txt'): 'hash-success', 33 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 34 | PurePosixPath('testfiles/file_c.txt'): 'hash-failed', 35 | } 36 | assert {k.path: v for (k, v) in touch_result.items()} == { 37 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 38 | PurePosixPath('testfiles/file_c.txt'): 'touch-failed', 39 | } 40 | 41 | hash_result, touch_result = torrent.verify_hash(['*'], { 42 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 43 | PurePosixPath('testfiles/file_b.txt'): None, 44 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 45 | }) 46 | assert {k.path: v for (k, v) in hash_result.items()} == { 47 | PurePosixPath('testfiles/file_a.txt'): 'hash-success', 48 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 49 | PurePosixPath('testfiles/file_c.txt'): 'hash-success', 50 | } 51 | assert {k.path: v for (k, v) in touch_result.items()} == { 52 | PurePosixPath('testfiles/file_a.txt'): 'touch-success', 53 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 54 | PurePosixPath('testfiles/file_c.txt'): 'touch-success', 55 | } 56 | 57 | hash_result, touch_result = torrent.verify_hash(['*'], { 58 | PurePosixPath('testfiles/file_a.txt'): None, 59 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 60 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 61 | }) 62 | assert {k.path: v for (k, v) in hash_result.items()} == { 63 | PurePosixPath('testfiles/file_a.txt'): 'hash-failed', 64 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 65 | PurePosixPath('testfiles/file_c.txt'): 'hash-success', 66 | } 67 | assert {k.path: v for (k, v) in touch_result.items()} == { 68 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 69 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 70 | } 71 | 72 | 73 | def test_verify_hash_bad_files(testfiles, indexer, matcher, client): 74 | torrent = parse_torrent(bdecode((testfiles / "test.torrent").read_bytes())) 75 | bad_file_a = testfiles / 'file_a_bad.txt' 76 | bad_file_b = testfiles / 'file_b_bad.txt' 77 | bad_file_c = testfiles / 'file_c_bad.txt' 78 | 79 | # first piece only shared with file_a is bad 80 | shutil.copy(testfiles / 'file_a.txt', bad_file_a) 81 | with bad_file_a.open('rb+') as f: 82 | f.seek(1) 83 | f.write(b'\x00') 84 | 85 | hash_result, touch_result = torrent.verify_hash(['*'], { 86 | PurePosixPath('testfiles/file_a.txt'): bad_file_a, 87 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 88 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 89 | }) 90 | 91 | assert {k.path: v for (k, v) in hash_result.items()} == { 92 | PurePosixPath('testfiles/file_a.txt'): 'hash-failed', 93 | PurePosixPath('testfiles/file_b.txt'): 'hash-success', 94 | PurePosixPath('testfiles/file_c.txt'): 'hash-success', 95 | } 96 | assert {k.path: v for (k, v) in touch_result.items()} == { 97 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 98 | } 99 | 100 | # second piece shared between file_a and file_b as edge pieces with file_a bad 101 | shutil.copy(testfiles / 'file_a.txt', bad_file_a) 102 | with bad_file_a.open('rb+') as f: 103 | f.seek(9) 104 | f.write(b'\x00') 105 | 106 | hash_result, touch_result = torrent.verify_hash(['*'], { 107 | PurePosixPath('testfiles/file_a.txt'): bad_file_a, 108 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 109 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 110 | }) 111 | 112 | assert {k.path: v for (k, v) in hash_result.items()} == { 113 | PurePosixPath('testfiles/file_a.txt'): 'hash-failed', 114 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 115 | PurePosixPath('testfiles/file_c.txt'): 'hash-success', 116 | } 117 | assert {k.path: v for (k, v) in touch_result.items()} == { 118 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 119 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 120 | } 121 | 122 | # second piece shared between file_a and file_b as edge pieces with file_b bad 123 | shutil.copy(testfiles / 'file_b.txt', bad_file_b) 124 | with bad_file_b.open('rb+') as f: 125 | f.seek(1) 126 | f.write(b'\x00') 127 | 128 | hash_result, touch_result = torrent.verify_hash(['*'], { 129 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 130 | PurePosixPath('testfiles/file_b.txt'): bad_file_b, 131 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 132 | }) 133 | 134 | assert {k.path: v for (k, v) in hash_result.items()} == { 135 | PurePosixPath('testfiles/file_a.txt'): 'hash-failed', 136 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 137 | PurePosixPath('testfiles/file_c.txt'): 'hash-success', 138 | } 139 | assert {k.path: v for (k, v) in touch_result.items()} == { 140 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 141 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 142 | } 143 | 144 | hash_result, touch_result = torrent.verify_hash(['file_b.txt'], { 145 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 146 | PurePosixPath('testfiles/file_b.txt'): bad_file_b, 147 | PurePosixPath('testfiles/file_c.txt'): testfiles / 'file_c.txt', 148 | }) 149 | 150 | assert {k.path: v for (k, v) in hash_result.items()} == { 151 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 152 | } 153 | assert {k.path: v for (k, v) in touch_result.items()} == { 154 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 155 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 156 | } 157 | 158 | shutil.copy(testfiles / 'file_c.txt', bad_file_c) 159 | with bad_file_c.open('rb+') as f: 160 | f.seek(0) 161 | f.write(b'\x00') 162 | 163 | hash_result, touch_result = torrent.verify_hash(['file_b.txt'], { 164 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 165 | PurePosixPath('testfiles/file_b.txt'): bad_file_b, 166 | PurePosixPath('testfiles/file_c.txt'): bad_file_c, 167 | }) 168 | 169 | assert {k.path: v for (k, v) in hash_result.items()} == { 170 | PurePosixPath('testfiles/file_b.txt'): 'hash-failed', 171 | } 172 | assert {k.path: v for (k, v) in touch_result.items()} == { 173 | PurePosixPath('testfiles/file_a.txt'): 'touch-failed', 174 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 175 | PurePosixPath('testfiles/file_c.txt'): 'touch-failed', 176 | } 177 | 178 | hash_result, touch_result = torrent.verify_hash(['file_c.txt'], { 179 | PurePosixPath('testfiles/file_a.txt'): testfiles / 'file_a.txt', 180 | PurePosixPath('testfiles/file_b.txt'): testfiles / 'file_b.txt', 181 | PurePosixPath('testfiles/file_c.txt'): bad_file_c, 182 | }) 183 | 184 | assert {k.path: v for (k, v) in hash_result.items()} == { 185 | PurePosixPath('testfiles/file_c.txt'): 'hash-failed', 186 | } 187 | assert {k.path: v for (k, v) in touch_result.items()} == { 188 | PurePosixPath('testfiles/file_b.txt'): 'touch-failed', 189 | PurePosixPath('testfiles/file_c.txt'): 'touch-failed', 190 | } 191 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/My-Bluray.torrent -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/BACKUP/MovieObject.bdmv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/BACKUP/PLAYLIST/00000.mpls: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/BACKUP/index.bdmv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/MovieObject.bdmv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/PLAYLIST/00000.mpls: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/STREAM/00000.m2ts: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-Bluray/BDMV/index.bdmv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/My-DVD.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/My-DVD.torrent -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VIDEO_TS.BUP: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VIDEO_TS.IFO: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VTS_01_0.BUP: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VTS_01_0.IFO: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VTS_01_0.VOB: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/My-DVD/VIDEO_TS/VTS_01_1.VOB: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/Some-CD-Release.torrent -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r00: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r01: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r02: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r03: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r04: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r05: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.r06: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.rar: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD1/somestuff-1.sfv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r00: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r01: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r02: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r03: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r04: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r05: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r06: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.r07: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.rar: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/CD2/somestuff-2.sfv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/Sample/some-rls.mkv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/Subs/somestuff-subs.r00: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/Subs/somestuff-subs.rar: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/Subs/somestuff-subs.sfv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-CD-Release/crap.nfo: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-Release [test].torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/Some-Release [test].torrent -------------------------------------------------------------------------------- /tests/testfiles/Some-Release.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/Some-Release.torrent -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/Sample/some-rls.mkv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/Subs/some-subs.rar: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/Subs/some-subs.sfv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaaa -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.nfo: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r00: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r01: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r02: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r03: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r04: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r05: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.r06: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.rar: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/Some-Release/some-rls.sfv: -------------------------------------------------------------------------------- 1 | aaaaaaaaaaa 2 | -------------------------------------------------------------------------------- /tests/testfiles/file_a.txt: -------------------------------------------------------------------------------- 1 | 1111111111 2 | -------------------------------------------------------------------------------- /tests/testfiles/file_b.txt: -------------------------------------------------------------------------------- 1 | 2222222222 2 | -------------------------------------------------------------------------------- /tests/testfiles/file_c.txt: -------------------------------------------------------------------------------- 1 | 3333333333 2 | -------------------------------------------------------------------------------- /tests/testfiles/folder-does-not-exist.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/folder-does-not-exist.torrent -------------------------------------------------------------------------------- /tests/testfiles/test-unsplitable-normal.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/test-unsplitable-normal.torrent -------------------------------------------------------------------------------- /tests/testfiles/test.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/test.torrent -------------------------------------------------------------------------------- /tests/testfiles/test_single.torrent: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohnDoee/autotorrent2/dcbdf2899ddb326fef9d4845b6250bf828ea2029/tests/testfiles/test_single.torrent --------------------------------------------------------------------------------