├── .env.example ├── .github └── workflows │ └── docker-image.yml ├── .gitignore ├── Dockerfile ├── README.md ├── docker-compose.yml ├── functions.py ├── refresh.py └── unrestrict.py /.env.example: -------------------------------------------------------------------------------- 1 | RD_APITOKEN="your_token_here" 2 | SLEEP=100 # Delay (ms) between requests - optional, default recommended 3 | LONG_SLEEP=5000 # Long delay (ms) every 500 requests - optional, default recommended -------------------------------------------------------------------------------- /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - 16 | name: Docker Setup QEMU 17 | uses: docker/setup-qemu-action@v3 18 | id: qemu 19 | with: 20 | platforms: amd64,arm64,arm 21 | - 22 | name: Docker Setup Buildx 23 | id: buildx 24 | uses: docker/setup-buildx-action@v3 25 | - 26 | name: Log into ghcr.io registry 27 | uses: docker/login-action@v3 28 | with: 29 | registry: ghcr.io 30 | username: ${{ github.repository_owner }} 31 | password: ${{ secrets.GITHUB_TOKEN }} 32 | - 33 | name: set lower case owner name 34 | run: | 35 | echo "GITHUB_OWNER_LC=${OWNER,,}" >>${GITHUB_ENV} 36 | env: 37 | OWNER: '${{ github.repository_owner }}' 38 | - 39 | name: Build and push Docker image 40 | uses: docker/build-push-action@v5 41 | with: 42 | platforms: linux/amd64,linux/arm64,linux/arm/v7 43 | push: true 44 | cache-from: type=gha 45 | cache-to: type=gha,mode=max 46 | tags: | 47 | ghcr.io/${{ env.GITHUB_OWNER_LC }}/${{ github.event.repository.name }}:latest 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .env 3 | .vscode 4 | *.log 5 | test.py -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:alpine 2 | 3 | ENV PUID=1000 4 | ENV PGID=1000 5 | ENV PUSER=rd_refresh 6 | ENV PGROUP=rd_refresh 7 | 8 | RUN mkdir -p /opt/rd 9 | 10 | RUN addgroup -g $PGID $PGROUP && \ 11 | adduser --shell /sbin/nologin --disabled-password \ 12 | --home /opt/rd --uid $PUID --ingroup $PGROUP $PUSER 13 | 14 | RUN pip install --no-cache-dir requests python-dotenv rd_api_py 15 | 16 | RUN apk update && apk add --no-cache git 17 | 18 | RUN git clone https://github.com/s-krilla/rd_refresh.git /opt/rd/refresh 19 | 20 | RUN chmod +x /opt/rd/refresh/unrestrict.py && \ 21 | chmod +x /opt/rd/refresh/refresh.py 22 | 23 | RUN chown -R $PUSER:$PGROUP /opt/rd 24 | 25 | USER $PUSER 26 | 27 | WORKDIR /opt/rd/refresh 28 | 29 | ENTRYPOINT [ "python" ] 30 | CMD [ "refresh.py" ] 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rd_refresh 2 | 3 | A set of useful scripts for managing a Real Debrid library 4 | 5 | ## Installation 6 | 7 | Install [rd_api_py](https://github.com/s-krilla/rd_api_py) 8 | 9 | ``` 10 | pip install rd_api_py 11 | ``` 12 | 13 | Set environment variables in `.env` 14 | 15 | ```bash 16 | RD_APITOKEN="your_token_here" 17 | REFRESH_INTERVAL=15 # Interval (minutes) between refreshes - optional 18 | LOGLEVEL=INFO # Default INFO - optional 19 | 20 | # rd_api_py configuration - optional, defaults recommended 21 | SLEEP=2000 # Delay (ms) between requests 22 | LONG_SLEEP=30000 # Long delay (ms) every 500 requests 23 | ``` 24 | 25 | ## Usage 26 | 27 | Set up cron jobs to execute operations - for example: 28 | ```bash 29 | sudo chmod +x refresh.py 30 | ``` 31 | 32 | ``` 33 | */15 * * * * /path/to/refresh.py 34 | ``` 35 | 36 | Or use Docker. 37 | 38 | ## Docker 39 | 40 | ```yaml 41 | version: "3" 42 | 43 | services: 44 | rd_refresh: 45 | container_name: rd_refresh 46 | image: ghcr.io/s-krilla/rd_refresh:latest 47 | environment: 48 | - RD_APITOKEN=yourtokenhere 49 | restart: unless-stopped 50 | ``` 51 | 52 | ### refresh.py 53 | 54 | - Finds "dead" torrents 55 | - Re-adds torrents 56 | - Selects the same files 57 | - Deletes the old torrent 58 | 59 | ### unrestrict.py 60 | 61 | **Warning - excessive API calls** 62 | 63 | - Compares torrent and downloads and finds restricted downloads links 64 | - Unrestricts download links 65 | - Refreshes "bad" torrents 66 | 67 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | services: 4 | rd_refresh: 5 | container_name: rd_refresh 6 | image: ghcr.io/s-krilla/rd_refresh:latest 7 | env_file: 8 | - .env # Be sure to change the env file to `.env` and update API Token. 9 | environment: 10 | - RD_APITOKEN=yourtokenhere # Alternatively set the token here instead of .env file. 11 | restart: unless-stopped 12 | -------------------------------------------------------------------------------- /functions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import os 5 | import logging 6 | 7 | from dotenv import load_dotenv 8 | load_dotenv() 9 | 10 | from rdapi import RD 11 | RD = RD() 12 | 13 | LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper() 14 | logging.basicConfig( 15 | level=LOGLEVEL, 16 | format='%(asctime)s:%(levelname)s:%(message)s', 17 | handlers=[ 18 | logging.FileHandler('rd_refresh.log'), 19 | logging.StreamHandler(sys.stdout) 20 | ] 21 | ) 22 | 23 | def get_all(type): 24 | page = 1 25 | processed = 0 26 | collected = [] 27 | while True: 28 | request = None 29 | if type == 'torrents': 30 | request = RD.torrents.get(limit=2500, page=page) 31 | else: 32 | request = RD.downloads.get(limit=2500, page=page) 33 | data = request.json() 34 | collected += data 35 | total = int(request.headers['X-Total-Count']) 36 | processed = processed + len(data) 37 | remaining = total - processed 38 | page = page + 1 39 | 40 | if remaining == 0: 41 | break 42 | logging.info('Retrieved %s %s from RD', str(len(collected)), type) 43 | return collected 44 | 45 | def refresh_torrent(torrent): 46 | old_torrent = RD.torrents.info(torrent['id']).json() 47 | logging.warning('Refreshing old torrent:\n%s', str(old_torrent['filename'])) 48 | old_torrent_files = old_torrent['files'] 49 | files_to_keep = [] 50 | for file in old_torrent_files: 51 | if file['selected'] == 1: 52 | files_to_keep.append(file['id']) 53 | cs_files_to_keep = ','.join(map(str, files_to_keep)) 54 | logging.info('Files to keep:\n%s', cs_files_to_keep) 55 | new_torrent = RD.torrents.add_magnet(old_torrent['hash']).json() 56 | logging.info('New magnet added') 57 | RD.torrents.select_files(new_torrent['id'], cs_files_to_keep) 58 | logging.info('New files selected') 59 | RD.torrents.delete(old_torrent['id']) 60 | logging.info('Old torrent deleted') 61 | return 62 | 63 | def find_torrent_links(torrents): 64 | torrent_links = [] 65 | for torrent in torrents: 66 | if torrent['status'] == 'downloaded': 67 | if torrent['links'] != []: 68 | for link in torrent['links']: 69 | torrent_links.append(link) 70 | logging.info('Found %s torrent link(s)', str(len(torrent_links))) 71 | return torrent_links 72 | 73 | def find_download_links(downloads): 74 | download_links = [] 75 | for download in downloads: 76 | download_links.append(download['link']) 77 | logging.info('Found %s download link(s)', str(len(download_links))) 78 | return download_links 79 | 80 | def find_restricted_links(torrent_links, download_links): 81 | restricted_links = list(set(torrent_links).difference(download_links)) 82 | logging.info('Found %s restricted link(s)', str(len(restricted_links))) 83 | return restricted_links 84 | 85 | def find_bad_torrents(torrents, bad_links): 86 | bad_torrents = [] 87 | for link in bad_links: 88 | bad_torrents.append(find(torrents, link)) 89 | dedupe = [] 90 | for bad_torrent in bad_torrents: 91 | if bad_torrent not in dedupe: 92 | dedupe.append(bad_torrent) 93 | bad_torrents = dedupe 94 | logging.warning('%s bad torrent(s)', str(len(bad_torrents))) 95 | return bad_torrents 96 | 97 | def find(torrents, link): 98 | for torrent in torrents: 99 | if link in torrent["links"]: 100 | return torrent -------------------------------------------------------------------------------- /refresh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import os 5 | import time 6 | from functions import * 7 | 8 | from dotenv import load_dotenv 9 | load_dotenv() 10 | 11 | interval = int(os.getenv('REFRESH_INTERVAL', 15)) * 60 12 | 13 | while True: 14 | 15 | torrents = get_all('torrents') 16 | 17 | if any(torrent['status'] == 'dead' for torrent in torrents): 18 | for torrent in torrents: 19 | if torrent['status'] == 'dead': 20 | logging.warning('Found dead torrents') 21 | refresh_torrent(torrent) 22 | else: 23 | logging.info('Found no dead torrents') 24 | 25 | logging.info('Done') 26 | 27 | logging.info('Sleeping %s minutes', int(interval / 60)) 28 | 29 | time.sleep(interval) 30 | -------------------------------------------------------------------------------- /unrestrict.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | from functions import * 5 | 6 | torrents = get_all('torrents') 7 | downloads = get_all('downloads') 8 | 9 | torrent_links = find_torrent_links(torrents) 10 | download_links = find_download_links(downloads) 11 | restricted_links = find_restricted_links(torrent_links, download_links) 12 | 13 | logging.debug(restricted_links) 14 | 15 | bad_links = [] 16 | 17 | for link in restricted_links: 18 | response = RD.unrestrict.link(link).json() 19 | if 'error_code' in response: 20 | if response['error_code'] == 19: 21 | bad_links.append(link) 22 | logging.warning('Found bad link:\n%s', str(link)) 23 | else: 24 | logging.info('Unrestricted:\n%s', response['filename']) 25 | # time.sleep(0.2) 26 | 27 | if len(bad_links) > 0: 28 | logging.warning('Found bad links:\n%s', str(bad_links)) 29 | 30 | bad_torrents = find_bad_torrents(torrents, bad_links) 31 | 32 | for bad_torrent in bad_torrents: 33 | refresh_torrent(bad_torrent) 34 | # time.sleep(0.2) 35 | 36 | logging.info('Done') 37 | 38 | sys.exit() --------------------------------------------------------------------------------