├── AddTransmissionTrackers ├── AddTransmissionTrackers.sh └── AddTransmissionTrackers.sh.readme.md ├── AddqBittorrentTrackers ├── AddqBittorrentTrackers.py ├── AddqBittorrentTrackers.sh └── AddqBittorrentTrackers.sh.readme.md ├── AudioMediaChecker ├── AudioMediaChecker.py ├── Dockerfile ├── docker-compose.yml └── requirements.txt ├── LICENSE ├── README.md ├── TransmissionRemoveCompleteTorrent.sh ├── TransmissionRemoveCompleteTorrent.sh.readme.md ├── clean_samba_recycle.sh ├── eMulerrStalledChecker ├── Dockerfile ├── docker-compose.yml ├── eMulerr_Stalled_Checker.py └── requirements.txt ├── qBittorrentHardlinksChecker ├── qBittorrentHardlinksChecker.py ├── qBittorrentHardlinksChecker.py.readme.md ├── qBittorrentHardlinksChecker.sh └── qBittorrentHardlinksChecker.sh.readme.md ├── radarr_cleanup_packed_torrent.sh └── sonarr_cleanup_packed_torrent.sh /AddTransmissionTrackers/AddTransmissionTrackers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ########## CONFIGURATIONS ########## 4 | # Host on which qBittorrent runs 5 | transmission_host="http://10.0.0.100" 6 | # Port -> the same port that is inside qBittorrent option -> Web UI -> Web User Interface 7 | transmission_port="9091" 8 | # Username to access to Web UI 9 | transmission_username="transmission" 10 | # Password to access to Web UI 11 | transmission_password="transmission" 12 | 13 | # If true (lowercase) the script will inject trackers inside private torrent too (not a good idea) 14 | ignore_private=false 15 | 16 | # Configure here your trackers list 17 | declare -a live_trackers_list_urls=( 18 | "https://newtrackon.com/api/stable" 19 | "https://trackerslist.com/best.txt" 20 | "https://trackerslist.com/http.txt" 21 | "https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt" 22 | ) 23 | ########## CONFIGURATIONS ########## 24 | 25 | jq_executable="$(command -v jq)" 26 | curl_executable="$(command -v curl)" 27 | auto_tor_grab=0 28 | test_in_progress=0 29 | applytheforce=0 30 | all_torrent=0 31 | 32 | if [[ -z $jq_executable ]]; then 33 | echo -e "\n\e[0;91;1mFail on jq. Aborting.\n\e[0m" 34 | echo "You can find it here: https://stedolan.github.io/jq/" 35 | echo "Or you can install it with -> sudo apt install jq" 36 | exit 1 37 | fi 38 | 39 | if [[ -z $curl_executable ]]; then 40 | echo -e "\n\e[0;91;1mFail on curl. Aborting.\n\e[0m" 41 | echo "You can install it with -> sudo apt install curl" 42 | exit 2 43 | fi 44 | 45 | ########## FUNCTIONS ########## 46 | generate_trackers_list () { 47 | for j in "${live_trackers_list_urls[@]}"; do 48 | tmp_trackers_list+=$($curl_executable -sS $j) 49 | tmp_trackers_list+=$'\n' 50 | done 51 | 52 | trackers_list=$(echo "$tmp_trackers_list" | awk '{for (i=1;i<=NF;i++) if (!a[$i]++) printf("%s%s",$i,FS)}{printf("\n")}' | xargs | tr ' ' '\n') 53 | if [[ $? -ne 0 ]]; then 54 | echo "I can't download the list, I'll use a static one" 55 | cat >"${trackers_list}" <<'EOL' 56 | udp://tracker.coppersurfer.tk:6969/announce 57 | http://tracker.internetwarriors.net:1337/announce 58 | udp://tracker.internetwarriors.net:1337/announce 59 | udp://tracker.opentrackr.org:1337/announce 60 | udp://9.rarbg.to:2710/announce 61 | udp://exodus.desync.com:6969/announce 62 | udp://explodie.org:6969/announce 63 | http://explodie.org:6969/announce 64 | udp://public.popcorn-tracker.org:6969/announce 65 | udp://tracker.vanitycore.co:6969/announce 66 | http://tracker.vanitycore.co:6969/announce 67 | udp://tracker1.itzmx.com:8080/announce 68 | http://tracker1.itzmx.com:8080/announce 69 | udp://ipv4.tracker.harry.lu:80/announce 70 | udp://tracker.torrent.eu.org:451/announce 71 | udp://tracker.tiny-vps.com:6969/announce 72 | udp://tracker.port443.xyz:6969/announce 73 | udp://open.stealth.si:80/announce 74 | udp://open.demonii.si:1337/announce 75 | udp://denis.stalker.upeer.me:6969/announce 76 | udp://bt.xxx-tracker.com:2710/announce 77 | http://tracker.port443.xyz:6969/announce 78 | udp://tracker2.itzmx.com:6961/announce 79 | udp://retracker.lanta-net.ru:2710/announce 80 | http://tracker2.itzmx.com:6961/announce 81 | http://tracker4.itzmx.com:2710/announce 82 | http://tracker3.itzmx.com:6961/announce 83 | http://tracker.city9x.com:2710/announce 84 | http://torrent.nwps.ws:80/announce 85 | http://retracker.telecom.by:80/announce 86 | http://open.acgnxtracker.com:80/announce 87 | wss://ltrackr.iamhansen.xyz:443/announce 88 | udp://zephir.monocul.us:6969/announce 89 | udp://tracker.toss.li:6969/announce 90 | http://opentracker.xyz:80/announce 91 | http://open.trackerlist.xyz:80/announce 92 | udp://tracker.swateam.org.uk:2710/announce 93 | udp://tracker.kamigami.org:2710/announce 94 | udp://tracker.iamhansen.xyz:2000/announce 95 | udp://tracker.ds.is:6969/announce 96 | udp://pubt.in:2710/announce 97 | https://tracker.fastdownload.xyz:443/announce 98 | https://opentracker.xyz:443/announce 99 | http://tracker.torrentyorg.pl:80/announce 100 | http://t.nyaatracker.com:80/announce 101 | http://open.acgtracker.com:1096/announce 102 | wss://tracker.openwebtorrent.com:443/announce 103 | wss://tracker.fastcast.nz:443/announce 104 | wss://tracker.btorrent.xyz:443/announce 105 | udp://tracker.justseed.it:1337/announce 106 | udp://thetracker.org:80/announce 107 | udp://packages.crunchbangplusplus.org:6969/announce 108 | https://1337.abcvg.info:443/announce 109 | http://tracker.tfile.me:80/announce.php 110 | http://tracker.tfile.me:80/announce 111 | http://tracker.tfile.co:80/announce 112 | http://retracker.mgts.by:80/announce 113 | http://peersteers.org:80/announce 114 | http://fxtt.ru:80/announce 115 | EOL 116 | fi 117 | number_of_trackers_in_list=$(echo "$trackers_list" | wc -l) 118 | } 119 | 120 | inject_trackers () { 121 | start=1 122 | while read tracker; do 123 | if [ -n "$tracker" ]; then 124 | echo -ne "\e[0;36;1m$start/$number_of_trackers_in_list - Adding tracker $tracker\e[0;36m" 125 | $curl_executable --silent --fail --show-error --anyauth \ 126 | --user ${transmission_username}:${transmission_password} --header "$qbt_cookie" "${transmission_host}:${transmission_port}/transmission/rpc/" \ 127 | -d "{\"method\":\"torrent-set\",\"arguments\": {\"fields\":[\"ids\",\"trackerAdd\"],\"ids\":[$1],\"trackerAdd\":[\"$tracker\"]}}" 128 | 129 | if [ $? -eq 0 ]; then 130 | echo -e " -> \e[32mSuccess! " 131 | else 132 | echo -e " - \e[31m< Failed > " 133 | fi 134 | fi 135 | start=$((start+1)) 136 | done <<< "$trackers_list" 137 | echo "Done!" 138 | } 139 | 140 | get_torrent_list () { 141 | get_cookie 142 | torrent_list=$($curl_executable --silent --anyauth \ 143 | --user ${transmission_username}:${transmission_password} --header "$qbt_cookie" "${transmission_host}:${transmission_port}/transmission/rpc/" \ 144 | -d "{\"method\":\"torrent-get\",\"arguments\": {\"fields\":[\"isPrivate\",\"id\",\"name\",\"hashString\",\"trackers\"]}}") 145 | } 146 | 147 | get_cookie () { 148 | qbt_cookie=$($curl_executable --silent --anyauth \ 149 | --user ${transmission_username}:${transmission_password} ${transmission_host}:${transmission_port}/transmission/rpc/ \ 150 | | sed 's/.*//g;s/<\/code>.*//g') 151 | } 152 | 153 | hash_check() { 154 | case $1 in 155 | ( *[!0-9A-Fa-f]* | "" ) return 1 ;; 156 | ( * ) 157 | case ${#1} in 158 | ( 32 | 40 ) return 0 ;; 159 | ( * ) return 1 ;; 160 | esac 161 | esac 162 | } 163 | 164 | wait() { 165 | w=$1 166 | echo "I'll wait ${w}s to be sure ..." 167 | while [ $w -gt 0 ]; do 168 | echo -ne "$w\033[0K\r" 169 | sleep 1 170 | w=$((w-1)) 171 | done 172 | } 173 | ########## FUNCTIONS ########## 174 | 175 | if [ -t 1 ] ; then 176 | if [[ ! $@ =~ ^\-.+ ]]; then 177 | echo "Arguments must be passed with - in front, like -n foo, or -i 5. Check the instructions" 178 | echo "" 179 | $0 -h 180 | exit 181 | fi 182 | 183 | [ $# -eq 0 ] && $0 -h 184 | 185 | if [ $# -eq 1 ] && [ $1 == "-f" ]; then 186 | echo "Don't use only -f, you need to specify also the torrent!" 187 | exit 188 | fi 189 | 190 | while getopts ":aflhn:i:" opt; do 191 | case ${opt} in 192 | a ) # If used inject trackers to all torrent. 193 | all_torrent=1 194 | ;; 195 | f ) # If used force the injection also in private trackers. 196 | applytheforce=1 197 | ;; 198 | l ) # Print the list of the torrent where you can inject trackers. 199 | list=1 200 | get_torrent_list 201 | echo -e "\n\e[0;32;1mCurrent torrents:\e[0;32m" 202 | 203 | while IFS= read -r line; do 204 | torrent_id_array+=("$line") 205 | done < <(echo $torrent_list | $jq_executable --raw-output '. | .arguments | .torrents | .[] | .id') 206 | 207 | while IFS= read -r line; do 208 | torrent_name_array+=("$line") 209 | done < <(echo $torrent_list | $jq_executable --raw-output '. | .arguments | .torrents | .[] | .name') 210 | 211 | for i in "${!torrent_name_array[@]}"; do 212 | echo "ID: ${torrent_id_array[$i]} ~~~ Name: ${torrent_name_array[$i]}" 213 | done 214 | exit 215 | ;; 216 | n ) # Specify the name of the torrent example -n foo or -n "foo bar", multiple -n can be used. 217 | tor_arg_names+=("$OPTARG") 218 | ;; 219 | i ) # Specify the id of the torrent example -i 5, multiple -i can be used. 220 | tor_arg_id+=("$OPTARG") 221 | ;; 222 | : ) 223 | echo "Invalid option: -${OPTARG} requires an argument" 1>&2 224 | exit 0 225 | ;; 226 | \? ) 227 | echo "Unknow option: -${OPTARG}" 1>&2 228 | exit 1 229 | ;; 230 | h | * ) # Display help. 231 | echo "Usage:" 232 | echo "$0 -a Inject trackers to all torrent in qBittorrent, this not require any extra information" 233 | echo "$0 -f Force the injection of the trackers inside the private torrent too, this not require any extra information" 234 | echo "$0 -l Print the list of the torrent where you can inject trackers, this not require any extra information" 235 | echo "$0 -n Specify the torrent name or part of it, for example -n foo or -n 'foo bar'" 236 | echo "$0 -i Specify the torrent id, for example -i 5" 237 | echo "$0 -h Display this help" 238 | echo "NOTE:" 239 | echo "It's possible to specify more than -n and -i in one single command, even combined" 240 | echo "Just remember that if you set -a in useless to add any extra -n, but -f can always be used" 241 | exit 2 242 | ;; 243 | esac 244 | done 245 | shift $((OPTIND -1)) 246 | else 247 | if [[ -n "${sonarr_download_id}" ]] || [[ -n "${radarr_download_id}" ]] || [[ -n "${lidarr_download_id}" ]] || [[ -n "${readarr_download_id}" ]]; then 248 | wait 5 249 | if [[ -n "${sonarr_download_id}" ]]; then 250 | echo "Sonarr varialbe found -> $sonarr_download_id" 251 | hash=$(echo "$sonarr_download_id" | awk '{print tolower($0)}') 252 | fi 253 | 254 | if [[ -n "${radarr_download_id}" ]]; then 255 | echo "Radarr varialbe found -> $radarr_download_id" 256 | hash=$(echo "$radarr_download_id" | awk '{print tolower($0)}') 257 | fi 258 | 259 | if [[ -n "${lidarr_download_id}" ]]; then 260 | echo "Lidarr varialbe found -> $lidarr_download_id" 261 | hash=$(echo "$lidarr_download_id" | awk '{print tolower($0)}') 262 | fi 263 | 264 | if [[ -n "${readarr_download_id}" ]]; then 265 | echo "Readarr varialbe found -> $readarr_download_id" 266 | hash=$(echo "$readarr_download_id" | awk '{print tolower($0)}') 267 | fi 268 | 269 | hash_check "${hash}" 270 | if [[ $? -ne 0 ]]; then 271 | echo "The download is not for a torrent client, I'll exit" 272 | exit 3 273 | fi 274 | auto_tor_grab="1" 275 | fi 276 | 277 | if [[ $sonarr_eventtype == "Test" ]] || [[ $radarr_eventtype == "Test" ]] || [[ $lidarr_eventtype == "Test" ]] || [[ $readarr_eventtype == "Test" ]]; then 278 | echo "Test in progress..." 279 | test_in_progress=1 280 | fi 281 | fi 282 | 283 | for i in "${tor_arg_names[@]}"; do 284 | if [[ -z "${i// }" ]]; then 285 | echo "one or more argument for -n not valid, try again" 286 | exit 287 | fi 288 | done 289 | 290 | if [ ${#tor_arg_names[@]} -eq 0 ] && [ ${#tor_arg_id[@]} -eq 0 ] && [ $all_torrent -eq 0 ] && [ -z $list ] && [ $auto_tor_grab -eq 0 ]; then 291 | echo "No name, no ID or no -a passed, exiting" 292 | exit 293 | fi 294 | 295 | if [ ${#tor_arg_id[@]} -gt 0 ]; then 296 | re='^[0-9]+$' 297 | 298 | for i in "${tor_arg_id[@]}"; do 299 | if ! [[ $i =~ $re ]] ; then 300 | echo "Error: parameter for -i ${i} is not a number" >&2; exit 1 301 | fi 302 | done 303 | fi 304 | 305 | if [ $test_in_progress -eq 1 ]; then 306 | echo "Good-bye!" 307 | elif [ $auto_tor_grab -eq 0 ]; then # manual run 308 | get_torrent_list 309 | 310 | if [ $all_torrent -eq 1 ]; then 311 | while IFS= read -r line; do 312 | torrent_id_array+=("$line") 313 | done < <(echo $torrent_list | $jq_executable --raw-output '. | .arguments | .torrents | .[] | .id') 314 | 315 | while IFS= read -r line; do 316 | torrent_name_array+=("$line") 317 | done < <(echo $torrent_list | $jq_executable --raw-output '. | .arguments | .torrents | .[] | .name') 318 | 319 | while IFS= read -r line; do 320 | torrent_private_array+=("$line") 321 | done < <(echo $torrent_list | $jq_executable --raw-output '. | .arguments | .torrents | .[] | .isPrivate') 322 | 323 | else 324 | for i in "${tor_arg_names[@]}"; do 325 | torrent_name_list=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$i" '. | .arguments | .torrents | .[] | select(.name|test("\($tosearch)";"i")) .name') 326 | 327 | if [ -n "$torrent_name_list" ]; then # not empty 328 | torrent_name_check=1 329 | echo -e "\n\e[0;32;1mFor argument ### -n $i ###\e[0;32m" 330 | echo -e "\e[0;32;1mI found the following torrent:\e[0;32m" 331 | echo "$torrent_name_list" 332 | else 333 | torrent_name_check=0 334 | fi 335 | 336 | if [ $torrent_name_check -eq 0 ]; then 337 | echo -e "\e[0;31;1mI didn't find a torrent with the text: \e[21m$1\e[0m" 338 | shift 339 | continue 340 | else 341 | while read -r single_found; do 342 | torrent_name_array+=("$single_found") 343 | id=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$single_found" '. | .arguments | .torrents | .[] | select(.name == "\($tosearch)") .id') 344 | torrent_id_array+=("$id") 345 | private=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$single_found" '. | .arguments | .torrents | .[] | select(.name == "\($tosearch)") .isPrivate') 346 | torrent_private_array+=("$private") 347 | done <<< "$torrent_name_list" 348 | fi 349 | done 350 | 351 | for i in "${tor_arg_id[@]}"; do 352 | torrent_name_list=$(echo "$torrent_list" | $jq_executable --raw-output --argjson tosearch "$i" '. | .arguments | .torrents | .[] | select(.id == $tosearch) .name') 353 | 354 | if [ -n "$torrent_name_list" ]; then # not empty 355 | torrent_name_check=1 356 | echo -e "\n\e[0;32;1mFor argument ### -i $i ###\e[0;32m" 357 | echo -e "\e[0;32;1mI found the following torrent:\e[0;32m" 358 | echo "$torrent_name_list" 359 | else 360 | torrent_name_check=0 361 | fi 362 | 363 | if [ $torrent_name_check -eq 0 ]; then 364 | echo -e "\e[0;31;1mI didn't find a torrent with the text: \e[21m$1\e[0m" 365 | shift 366 | continue 367 | else 368 | while read -r single_found; do 369 | torrent_name_array+=("$single_found") 370 | id=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$single_found" '. | .arguments | .torrents | .[] | select(.name == "\($tosearch)") .id') 371 | torrent_id_array+=("$id") 372 | private=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$single_found" '. | .arguments | .torrents | .[] | select(.name == "\($tosearch)") .isPrivate') 373 | torrent_private_array+=("$private") 374 | done <<< "$torrent_name_list" 375 | fi 376 | done 377 | fi 378 | 379 | if [ ${#torrent_name_array[@]} -gt 0 ]; then 380 | echo "" 381 | for i in "${!torrent_name_array[@]}"; do 382 | echo -ne "\n\e[0;1;4;32mFor the Torrent: \e[0;4;32m" 383 | echo "${torrent_name_array[$i]}" 384 | 385 | if [[ $ignore_private == true ]] || [ $applytheforce -eq 1 ]; then # Inject anyway the trackers inside any torrent 386 | if [ $applytheforce -eq 1 ]; then 387 | echo -e "\e[0m\e[33mForce mode is active, I'll inject trackers anyway\e[0m" 388 | else 389 | echo -e "\e[0m\e[33mignore_private set to true, I'll inject trackers anyway\e[0m" 390 | fi 391 | [[ -z "$trackers_list" ]] && generate_trackers_list 392 | inject_trackers ${torrent_id_array[$i]} 393 | else 394 | if [[ ${torrent_private_array[$i]} == true ]]; then 395 | private_tracker_name=$(echo "$torrent_list" | $jq_executable --raw-output --argjson tosearch "${torrent_id_array[$i]}" '. | .arguments | .torrents | .[] | select(.id == $tosearch) .trackers | .[] | .announce' | sed -e 's/[^/]*\/\/\([^@]*@\)\?\([^:/]*\).*/\2/') 396 | echo -e "\e[31m< Private tracker found \e[0m\e[33m-> $private_tracker_name <- \e[0m\e[31mI'll not add any extra tracker >\e[0m" 397 | else 398 | echo -e "\e[0m\e[33mThe torrent is not private, I'll inject trackers on it\e[0m" 399 | [[ -z "$trackers_list" ]] && generate_trackers_list 400 | inject_trackers ${torrent_id_array[$i]} 401 | fi 402 | fi 403 | done 404 | else 405 | echo "No torrents found, exiting" 406 | fi 407 | else # auto_tor_grab active, so some *Arr 408 | wait 5 409 | get_torrent_list 410 | 411 | torrent_name=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$hash" '. | .arguments | .torrents | .[] | select(.hashString == "\($tosearch)") .name') 412 | torrent_id=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$hash" '. | .arguments | .torrents | .[] | select(.hashString == "\($tosearch)") .id') 413 | private_check=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$hash" '. | .arguments | .torrents | .[] | select(.hashString == "\($tosearch)") .isPrivate') 414 | 415 | echo -ne "\n\e[0;1;4;32mFor the Torrent: \e[0;4;32m" 416 | echo "$torrent_name" 417 | 418 | if [[ $private_check == true ]]; then 419 | private_tracker_name=$(echo "$torrent_list" | $jq_executable --raw-output --argjson tosearch "$torrent_id" '. | .arguments | .torrents | .[] | select(.id == $tosearch) .trackers | .[] | .announce' | sed -e 's/[^/]*\/\/\([^@]*@\)\?\([^:/]*\).*/\2/') 420 | echo -e "\e[31m< Private tracker found \e[0m\e[33m-> $private_tracker_name <- \e[0m\e[31mI'll not add any extra tracker >\e[0m" 421 | else 422 | echo -e "\e[0m\e[33mThe torrent is not private, I'll inject trackers on it\e[0m" 423 | [[ -z "$trackers_list" ]] && generate_trackers_list 424 | inject_trackers $torrent_id 425 | fi 426 | fi 427 | -------------------------------------------------------------------------------- /AddTransmissionTrackers/AddTransmissionTrackers.sh.readme.md: -------------------------------------------------------------------------------- 1 | # AddTransmissionTrackers.sh 2 | 3 | The purpose of this script is to inject trakers inside the **Transmission torrent** 4 | 5 | This can be used manually, or with Radarr/Sonarr automatically. To run the script manually, simply run the script `./AddTransmissionTrackers.sh` and see all the possible options. 6 | 7 | When Radarr and/or Sonarr grabs a new torrent *and if the torrent is not from a private tracker*, the script is triggered and the custom tracker list populated to the torrent. 8 | 9 | In the latest version, I've inserted a new way to call the script, with many options to inject trackers inside torrents. 10 | 11 | N.B for those updating to the latest script, `Transmission-remote` is no longer needed. All commands have been switched to directly use `/rpc`. This is the very first release with this method. 12 | 13 | 14 | 15 | I've also included the possibility to call the script and specify the name and/or ID where one adds trackers: 16 | 17 | * First ensure your Radarr/Sonarr user can execute the script with something like this: 18 | * Take ownership with: `chown USER:USER AddTransmissionTrackers.sh` where `USER:GROUP` is the same user and group as Transmission. 19 | * Ensure it is executable: `chmod +x AddTransmissionTrackers.sh` 20 | 21 | * Modify the scripts `########## CONFIGURATIONS ##########` section: 22 | * `transmission_username`, `transmission_password`, `transmission_host` and `transmission_port`. These are all the same as your Transmission config. 23 | * `live_trackers_list_url`, is the URL where the trackers list is obtained. This is the default list. You may specify more than one URL, just follow the example in the file. 24 | * The script will automatically check if the torrent is private or public. 25 | 26 | The configuration is complete. 27 | 28 | 29 | If you are a **Radarr and/or Sonarr user**, personally I: 30 | 1. Create a custom script (settings -> connect -> add notification -> Custom Script). 31 | 2. The name is not important. I use Add Transmission Trackers, you can use any name you like. 32 | 3. Set "On Grab". 33 | 4. Inside Path field, point to the `AddTransmissionTrackers.sh` script. 34 | 5. Save the custom script. 35 | 36 | 37 | 38 | One note about configuration and using the script manually. Before use you MUST configure the username, password, host and port within the script. Otherwise I would have to insert four new options to be called every time for manual user input, or "complicate" it by having a configuration file saved somewhere. If it's necessary I will do it, but for now I think it is easier to keep only the necessary options. 39 | 40 | -------------------------------------------------------------------------------- /AddqBittorrentTrackers/AddqBittorrentTrackers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import importlib 5 | 6 | required_packages = [ 7 | 'requests', # Not included in Python standard library 8 | ] 9 | 10 | def check_dependencies(): 11 | missing_packages = [] 12 | for package in required_packages: 13 | try: 14 | importlib.import_module(package) 15 | except ImportError: 16 | missing_packages.append(package) 17 | 18 | if missing_packages: 19 | print("The following dependencies are missing:") 20 | for package in missing_packages: 21 | print(f"- {package}") 22 | print("\nTo install them, you can use the command:") 23 | print(f"pip install {' '.join(missing_packages)}") 24 | sys.exit(1) 25 | 26 | ########## CONFIGURATIONS ########## 27 | qbt_host = "http://10.0.0.100" 28 | qbt_port = "8081" 29 | qbt_username = "admin" 30 | qbt_password = "adminadmin" 31 | 32 | ignore_private = False 33 | clean_existing_trackers = False 34 | 35 | exclude_download_client = "emulerr" # If not empty, download clients to exclude must be comma separated 36 | 37 | live_trackers_list_urls = [ 38 | "https://newtrackon.com/api/stable", 39 | "https://trackerslist.com/best.txt", 40 | "https://trackerslist.com/http.txt", 41 | "https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt", 42 | ] 43 | 44 | version = "v1.1" 45 | 46 | STATIC_TRACKERS_LIST = """ 47 | udp://tracker.coppersurfer.tk:6969/announce 48 | http://tracker.internetwarriors.net:1337/announce 49 | udp://tracker.internetwarriors.net:1337/announce 50 | udp://tracker.opentrackr.org:1337/announce 51 | udp://9.rarbg.to:2710/announce 52 | udp://exodus.desync.com:6969/announce 53 | udp://explodie.org:6969/announce 54 | http://explodie.org:6969/announce 55 | udp://public.popcorn-tracker.org:6969/announce 56 | udp://tracker.vanitycore.co:6969/announce 57 | http://tracker.vanitycore.co:6969/announce 58 | udp://tracker1.itzmx.com:8080/announce 59 | http://tracker1.itzmx.com:8080/announce 60 | udp://ipv4.tracker.harry.lu:80/announce 61 | udp://tracker.torrent.eu.org:451/announce 62 | udp://tracker.tiny-vps.com:6969/announce 63 | udp://tracker.port443.xyz:6969/announce 64 | udp://open.stealth.si:80/announce 65 | udp://open.demonii.si:1337/announce 66 | udp://denis.stalker.upeer.me:6969/announce 67 | udp://bt.xxx-tracker.com:2710/announce 68 | http://tracker.port443.xyz:6969/announce 69 | udp://tracker2.itzmx.com:6961/announce 70 | udp://retracker.lanta-net.ru:2710/announce 71 | http://tracker2.itzmx.com:6961/announce 72 | http://tracker4.itzmx.com:2710/announce 73 | http://tracker3.itzmx.com:6961/announce 74 | http://tracker.city9x.com:2710/announce 75 | http://torrent.nwps.ws:80/announce 76 | http://retracker.telecom.by:80/announce 77 | http://open.acgnxtracker.com:80/announce 78 | wss://ltrackr.iamhansen.xyz:443/announce 79 | udp://zephir.monocul.us:6969/announce 80 | udp://tracker.toss.li:6969/announce 81 | http://opentracker.xyz:80/announce 82 | http://open.trackerlist.xyz:80/announce 83 | udp://tracker.swateam.org.uk:2710/announce 84 | udp://tracker.kamigami.org:2710/announce 85 | udp://tracker.iamhansen.xyz:2000/announce 86 | udp://tracker.ds.is:6969/announce 87 | udp://pubt.in:2710/announce 88 | https://tracker.fastdownload.xyz:443/announce 89 | https://opentracker.xyz:443/announce 90 | http://tracker.torrentyorg.pl:80/announce 91 | http://t.nyaatracker.com:80/announce 92 | http://open.acgtracker.com:1096/announce 93 | wss://tracker.openwebtorrent.com:443/announce 94 | wss://tracker.fastcast.nz:443/announce 95 | wss://tracker.btorrent.xyz:443/announce 96 | udp://tracker.justseed.it:1337/announce 97 | udp://thetracker.org:80/announce 98 | udp://packages.crunchbangplusplus.org:6969/announce 99 | https://1337.abcvg.info:443/announce 100 | http://tracker.tfile.me:80/announce.php 101 | http://tracker.tfile.me:80/announce 102 | http://tracker.tfile.co:80/announce 103 | http://retracker.mgts.by:80/announce 104 | http://peersteers.org:80/announce 105 | http://fxtt.ru:80/announce 106 | """ 107 | 108 | ########## FUNCTIONS ########## 109 | def generate_trackers_list(): 110 | # Use a function attribute to store the state (equivalent to a static variable) 111 | if not hasattr(generate_trackers_list, "trackers_list_cache"): 112 | generate_trackers_list.trackers_list_cache = None 113 | 114 | # Check to see if the list is already populated 115 | if generate_trackers_list.trackers_list_cache is not None: 116 | print("Trackers list already populated. Skipping generation.") 117 | return generate_trackers_list.trackers_list_cache 118 | 119 | # If the list is not populated, generate a new one 120 | if not live_trackers_list_urls or len(live_trackers_list_urls) == 0: 121 | print("The URL list is empty. Using the static tracker list.") 122 | generate_trackers_list.trackers_list_cache = STATIC_TRACKERS_LIST.strip().split("\n") 123 | return generate_trackers_list.trackers_list_cache 124 | 125 | trackers_list = "" 126 | errors_count = 0 127 | 128 | for url in live_trackers_list_urls: 129 | try: 130 | response = requests.get(url) 131 | response.raise_for_status() 132 | trackers_list += response.text + "\n" 133 | except requests.RequestException as e: 134 | errors_count += 1 135 | print(f"Error downloading from {url}: {e}") 136 | 137 | if errors_count == len(live_trackers_list_urls): 138 | print("All URLs failed. Using the static tracker list.") 139 | generate_trackers_list.trackers_list_cache = STATIC_TRACKERS_LIST.strip().split("\n") 140 | else: 141 | generate_trackers_list.trackers_list_cache = trackers_list.strip().split("\n") 142 | 143 | return generate_trackers_list.trackers_list_cache 144 | 145 | def get_qbittorrent_session(qbt_host, qbt_port, qbt_username, qbt_password): 146 | url = f"{qbt_host}:{qbt_port}" 147 | session = requests.Session() 148 | try: 149 | response = session.post(f'{url}/api/v2/auth/login', data={'username': qbt_username, 'password': qbt_password}) 150 | response.raise_for_status() 151 | return session 152 | except requests.exceptions.RequestException as e: 153 | print(f"Error during authentication: {e}") 154 | return None 155 | 156 | def get_torrent_trackers(session, hash): 157 | try: 158 | response = session.get( 159 | f"{qbt_host}:{qbt_port}/api/v2/torrents/trackers?hash={hash}", 160 | ) 161 | response.raise_for_status() 162 | return json.loads(response.text) 163 | except Exception as e: 164 | print(f"An error occurred while getting torrent trackers: {e}") 165 | return None 166 | 167 | def inject_trackers(hash, session): 168 | print("Injecting... ", end="") 169 | 170 | trackers_data = get_torrent_trackers(session, hash) 171 | if trackers_data is None: 172 | print(" Error getting torrent trackers... ") 173 | torrent_trackers = [tracker["url"] for tracker in trackers_data[3:]] 174 | 175 | remove_trackers(hash, torrent_trackers, session) 176 | 177 | trackers_list = generate_trackers_list() 178 | 179 | if clean_existing_trackers: 180 | print(" But before a quick cleaning the existing trackers... ") 181 | trackers_list = sorted(set(trackers_list)) 182 | else: 183 | trackers_list = sorted(set(trackers_list + torrent_trackers)) 184 | 185 | trackers_list = [tracker for tracker in trackers_list if tracker.strip()] 186 | 187 | number_of_trackers_in_list = len(trackers_list) 188 | 189 | # Format trackers into tiers 190 | formatted_trackers = "" 191 | for tracker in trackers_list: 192 | formatted_trackers += f"{tracker}\n\n" 193 | 194 | # Remove the last newlines if any 195 | formatted_trackers = formatted_trackers.rstrip("\n") 196 | 197 | response = session.post( 198 | f"{qbt_host}:{qbt_port}/api/v2/torrents/addTrackers", 199 | data={"hash": hash, "urls": formatted_trackers}, 200 | ) 201 | response.raise_for_status() 202 | 203 | print(f"done, injected {number_of_trackers_in_list} tracker{'s' if number_of_trackers_in_list > 1 else ''}!") 204 | 205 | def get_torrent_list(session): 206 | response = session.get( 207 | f"{qbt_host}:{qbt_port}/api/v2/torrents/info", 208 | ) 209 | response.raise_for_status() 210 | return json.loads(response.text) 211 | 212 | def hash_check(hash): 213 | if not hash or any(c not in '0123456789ABCDEFabcdef' for c in hash): 214 | return False 215 | return len(hash) in (32, 40) 216 | 217 | def remove_trackers(hash, urls, session): 218 | urls_string = "|".join(urls) 219 | response = session.post( 220 | f"{qbt_host}:{qbt_port}/api/v2/torrents/removeTrackers", 221 | data={"hash": hash, "urls": urls_string}, 222 | ) 223 | response.raise_for_status() 224 | 225 | def check_torrent_privacy(session, torrent_hash): 226 | try: 227 | response = session.get( 228 | f"{qbt_host}:{qbt_port}/api/v2/torrents/properties?hash={torrent_hash}", 229 | ) 230 | response.raise_for_status() 231 | private_check = json.loads(response.text)["is_private"] 232 | return private_check 233 | except Exception as e: 234 | print(f"An error occurred while checking torrent privacy: {e}") 235 | return None # Or any other value to signify an error 236 | 237 | def parse_arguments(): 238 | import argparse 239 | 240 | parser = argparse.ArgumentParser(description="How to Inject trackers into qBittorrent") 241 | parser.add_argument("-a", action="store_true", help="Inject trackers to all torrent in qBittorrent, this not require any extra information") 242 | parser.add_argument("-c", action="store_true", help="Clean all the existing trackers before the injection, this not require any extra information") 243 | parser.add_argument("-f", action="store_true", help="Force the injection of the trackers inside the private torrent too, this not require any extra information") 244 | parser.add_argument("-l", action="store_true", help="Print the list of the torrent where you can inject trackers, this not require any extra information") 245 | parser.add_argument("-n", action="append", help="Specify the torrent name or part of it, for example -n foo or -n 'foo bar'") 246 | parser.add_argument("-s", action="append", help="Specify the exact category name, for example -s foo or -s 'foo bar'. If -s is passed empty, \"\", the \"Uncategorized\" category will be used") 247 | 248 | args = parser.parse_args() 249 | 250 | if not sys.stdin.isatty() and not any(os.path.abspath('.').lower().startswith(p) for p in ["qbittorrent"]): 251 | if args.f and len(sys.argv) == 2: 252 | print("Don't use only -f, you need to specify also the torrent!") 253 | sys.exit(1) 254 | else: 255 | if not any(arg.startswith('-') for arg in sys.argv[1:]): 256 | print("Arguments must be passed with - in front, like -n foo. Check instructions") 257 | parser.print_help() 258 | sys.exit(1) 259 | 260 | if len(sys.argv) == 1: 261 | parser.print_help() 262 | sys.exit(0) 263 | 264 | if args.n: 265 | for name in args.n: 266 | if not name.strip(): 267 | print("One or more arguments for -n not valid, try again") 268 | sys.exit(1) 269 | 270 | return args 271 | 272 | ########## MAIN ########## 273 | if __name__ == "__main__": 274 | 275 | check_dependencies() 276 | 277 | import os 278 | import sys 279 | import requests 280 | import json 281 | import urllib.parse 282 | import time 283 | 284 | args = parse_arguments() 285 | 286 | all_torrent = args.a 287 | clean_existing_trackers = args.c 288 | applytheforce = args.f 289 | list_torrents = args.l 290 | tor_arg_names = args.n or [] 291 | tor_categories = args.s or [] 292 | 293 | if not sys.stdin.isatty() and not any(os.path.abspath('.').lower().startswith(p) for p in ["qbittorrent"]): 294 | event_types = ["sonarr_eventtype", "radarr_eventtype", "lidarr_eventtype", "readarr_eventtype"] 295 | download_clients = ["sonarr_download_client", "radarr_download_client", "lidarr_download_client", "readarr_download_client"] 296 | download_ids = ["sonarr_download_id", "radarr_download_id", "lidarr_download_id", "readarr_download_id"] 297 | 298 | if any(os.environ.get(event_type) == "Test" for event_type in event_types): 299 | print("Test in progress... Good-bye!") 300 | sys.exit(0) 301 | 302 | if exclude_download_client: 303 | exclude_clients = exclude_download_client.split(',') 304 | exclude_clients = [client.strip() for client in exclude_clients if client.strip()] 305 | 306 | # Check clients to exclude only if exclude_download_client is not empty 307 | for download_client in download_clients: 308 | client = os.environ.get(download_client) 309 | if client and client in exclude_clients: 310 | print(f"Exiting because {download_client} matches an excluded client: {client}") 311 | sys.exit(4) 312 | 313 | session = get_qbittorrent_session(qbt_host, qbt_port, qbt_username, qbt_password) 314 | 315 | if session: 316 | # Controlling download_ids 317 | for download_id in download_ids: 318 | hash = os.environ.get(download_id) 319 | if hash: 320 | print(f"{download_id.replace('_download_id', '').capitalize()} variable found -> {hash}") 321 | hash = hash.lower() 322 | if hash_check(hash): 323 | print(f"I'll wait 5s to be sure ...") 324 | time.sleep(5) 325 | 326 | torrent_list = get_torrent_list(session) 327 | 328 | private_check = check_torrent_privacy(session, hash) 329 | 330 | if private_check and not (ignore_private or applytheforce): 331 | trackers_data = get_torrent_trackers(session, hash) 332 | 333 | if trackers_data is None: 334 | print("Error getting torrent trackers.") 335 | else: 336 | private_tracker_name = trackers_data[3]["url"].split("//")[1].split("@")[-1].split(":")[0] 337 | print(f"< Private tracker found -> {private_tracker_name} <- I'll not add any extra tracker >") 338 | else: 339 | if ignore_private and not applytheforce: 340 | print("ignore_private set to true, I'll inject trackers anyway") 341 | elif applytheforce: 342 | print("Force mode is active, I'll inject trackers anyway") 343 | else: 344 | print("The torrent is not private, I'll inject trackers on it") 345 | inject_trackers(hash, session) 346 | break 347 | else: 348 | print("No valid hash found for the torrent, I'll exit") 349 | sys.exit(3) 350 | else: 351 | print("Failed to authenticate with qBittorrent.") 352 | else: 353 | session = get_qbittorrent_session(qbt_host, qbt_port, qbt_username, qbt_password) 354 | 355 | if session: 356 | if list_torrents: 357 | torrent_list = get_torrent_list(session) 358 | print(f"\n{len(torrent_list)} active torrent{'s' if len(torrent_list) > 1 else ''}:") 359 | for torrent in torrent_list: 360 | print(f"Name: {torrent['name']}, Category: {torrent['category'] if torrent['category'] else 'Uncategorized'}") 361 | sys.exit(0) 362 | 363 | torrent_list = get_torrent_list(session) 364 | 365 | torrent_name_array = [] 366 | torrent_hash_array = [] 367 | 368 | if all_torrent: 369 | for torrent in torrent_list: 370 | torrent_name_array.append(torrent["name"]) 371 | torrent_hash_array.append(torrent["hash"]) 372 | else: 373 | if tor_arg_names and tor_categories: 374 | for name in tor_arg_names: 375 | for category in tor_categories: 376 | filtered_torrents = [t for t in torrent_list if t["category"].lower() == category.lower() and name.lower() in t["name"].lower()] 377 | if filtered_torrents: 378 | print(f"\nFor the name ### {name} ### in category ### {'Uncategorized' if category == '' else category} ###") 379 | print(f"I found {len(filtered_torrents)} torrent{'s' if len(filtered_torrents) > 1 else ''}:") 380 | for torrent in filtered_torrents: 381 | print(torrent["name"]) 382 | torrent_name_array.append(torrent["name"]) 383 | torrent_hash_array.append(torrent["hash"]) 384 | else: 385 | print(f"\nI didn't find a torrent with name ### {name} ### in category ### {'Uncategorized' if category == '' else category} ###") 386 | elif tor_arg_names: 387 | for name in tor_arg_names: 388 | filtered_torrents = [t for t in torrent_list if name.lower() in t["name"].lower()] 389 | if filtered_torrents: 390 | print(f"\nFor the name ### {name} ###") 391 | print(f"I found {len(filtered_torrents)} torrent{'s' if len(filtered_torrents) > 1 else ''}:") 392 | for torrent in filtered_torrents: 393 | print(torrent["name"]) 394 | torrent_name_array.append(torrent["name"]) 395 | torrent_hash_array.append(torrent["hash"]) 396 | else: 397 | print(f"\nI didn't find a torrent with this part of the text: {name}") 398 | else: 399 | for category in tor_categories: 400 | filtered_torrents = [t for t in torrent_list if t["category"].lower() == category.lower()] 401 | if filtered_torrents: 402 | print(f"\nFor category ### {'Uncategorized' if category == '' else category} ###") 403 | print(f"I found {len(filtered_torrents)} torrent{'s' if len(filtered_torrents) > 1 else ''}:") 404 | for torrent in filtered_torrents: 405 | print(torrent["name"]) 406 | torrent_name_array.append(torrent["name"]) 407 | torrent_hash_array.append(torrent["hash"]) 408 | else: 409 | print(f"\nI didn't find a torrent in the category: {'Uncategorized' if category == '' else category}") 410 | 411 | if torrent_name_array: 412 | for i, name in enumerate(torrent_name_array): 413 | print(f"\nFor the Torrent: {name}") 414 | 415 | if ignore_private or applytheforce: 416 | if applytheforce: 417 | print("Force mode is active, I'll inject trackers anyway") 418 | else: 419 | print("ignore_private set to true, I'll inject trackers anyway") 420 | inject_trackers(torrent_hash_array[i], session) 421 | else: 422 | private_check = check_torrent_privacy(session, torrent_hash_array[i]) 423 | if private_check: 424 | trackers_data = get_torrent_trackers(session, torrent_hash_array[i]) 425 | if trackers_data is None: 426 | print("Error getting torrent trackers.") 427 | else: 428 | private_tracker_name = trackers_data[3]["url"].split("//")[1].split("@")[-1].split(":")[0] 429 | print(f"< Private tracker found -> {private_tracker_name} <- I'll not add any extra tracker >") 430 | else: 431 | print("The torrent is not private, I'll inject trackers on it") 432 | inject_trackers(torrent_hash_array[i], session) 433 | else: 434 | print("Exiting") 435 | sys.exit(1) 436 | else: 437 | print("Failed to authenticate with qBittorrent.") 438 | -------------------------------------------------------------------------------- /AddqBittorrentTrackers/AddqBittorrentTrackers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ########## CONFIGURATIONS ########## 4 | # Host on which qBittorrent runs 5 | qbt_host="http://10.0.0.100" 6 | # Port -> the same port that is inside qBittorrent option -> Web UI -> Web User Interface 7 | qbt_port="8081" 8 | # Username to access to Web UI 9 | qbt_username="admin" 10 | # Password to access to Web UI 11 | qbt_password="adminadmin" 12 | 13 | # If true (lowercase) the script will inject trackers inside private torrent too (not a good idea) 14 | ignore_private=false 15 | 16 | # If true (lowercase) the script will remove all existing trackers before inject the new one, this functionality will works only for public trackers 17 | clean_existing_trackers=false 18 | 19 | # Configure here your trackers list 20 | declare -a live_trackers_list_urls=( 21 | "https://newtrackon.com/api/stable" 22 | "https://trackerslist.com/best.txt" 23 | "https://trackerslist.com/http.txt" 24 | "https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt" 25 | ) 26 | ########## CONFIGURATIONS ########## 27 | 28 | jq_executable="$(command -v jq)" 29 | curl_executable="$(command -v curl)" 30 | auto_tor_grab=0 31 | test_in_progress=0 32 | applytheforce=0 33 | all_torrent=0 34 | emptycategory=0 35 | 36 | if [[ -z $jq_executable ]]; then 37 | echo -e "\n\e[0;91;1mFail on jq. Aborting.\n\e[0m" 38 | echo "You can find it here: https://stedolan.github.io/jq/" 39 | echo "Or you can install it with -> sudo apt install jq" 40 | exit 1 41 | fi 42 | 43 | if [[ -z $curl_executable ]]; then 44 | echo -e "\n\e[0;91;1mFail on curl. Aborting.\n\e[0m" 45 | echo "You can install it with -> sudo apt install curl" 46 | exit 2 47 | fi 48 | 49 | if [[ $qbt_host == "https://"* ]]; then 50 | curl_executable="${curl_executable} --insecure" 51 | fi 52 | 53 | version="v3.16" 54 | 55 | STATIC_TRACKERS_LIST=$( 56 | cat <<'EOL' 57 | udp://tracker.coppersurfer.tk:6969/announce 58 | http://tracker.internetwarriors.net:1337/announce 59 | udp://tracker.internetwarriors.net:1337/announce 60 | udp://tracker.opentrackr.org:1337/announce 61 | udp://9.rarbg.to:2710/announce 62 | udp://exodus.desync.com:6969/announce 63 | udp://explodie.org:6969/announce 64 | http://explodie.org:6969/announce 65 | udp://public.popcorn-tracker.org:6969/announce 66 | udp://tracker.vanitycore.co:6969/announce 67 | http://tracker.vanitycore.co:6969/announce 68 | udp://tracker1.itzmx.com:8080/announce 69 | http://tracker1.itzmx.com:8080/announce 70 | udp://ipv4.tracker.harry.lu:80/announce 71 | udp://tracker.torrent.eu.org:451/announce 72 | udp://tracker.tiny-vps.com:6969/announce 73 | udp://tracker.port443.xyz:6969/announce 74 | udp://open.stealth.si:80/announce 75 | udp://open.demonii.si:1337/announce 76 | udp://denis.stalker.upeer.me:6969/announce 77 | udp://bt.xxx-tracker.com:2710/announce 78 | http://tracker.port443.xyz:6969/announce 79 | udp://tracker2.itzmx.com:6961/announce 80 | udp://retracker.lanta-net.ru:2710/announce 81 | http://tracker2.itzmx.com:6961/announce 82 | http://tracker4.itzmx.com:2710/announce 83 | http://tracker3.itzmx.com:6961/announce 84 | http://tracker.city9x.com:2710/announce 85 | http://torrent.nwps.ws:80/announce 86 | http://retracker.telecom.by:80/announce 87 | http://open.acgnxtracker.com:80/announce 88 | wss://ltrackr.iamhansen.xyz:443/announce 89 | udp://zephir.monocul.us:6969/announce 90 | udp://tracker.toss.li:6969/announce 91 | http://opentracker.xyz:80/announce 92 | http://open.trackerlist.xyz:80/announce 93 | udp://tracker.swateam.org.uk:2710/announce 94 | udp://tracker.kamigami.org:2710/announce 95 | udp://tracker.iamhansen.xyz:2000/announce 96 | udp://tracker.ds.is:6969/announce 97 | udp://pubt.in:2710/announce 98 | https://tracker.fastdownload.xyz:443/announce 99 | https://opentracker.xyz:443/announce 100 | http://tracker.torrentyorg.pl:80/announce 101 | http://t.nyaatracker.com:80/announce 102 | http://open.acgtracker.com:1096/announce 103 | wss://tracker.openwebtorrent.com:443/announce 104 | wss://tracker.fastcast.nz:443/announce 105 | wss://tracker.btorrent.xyz:443/announce 106 | udp://tracker.justseed.it:1337/announce 107 | udp://thetracker.org:80/announce 108 | udp://packages.crunchbangplusplus.org:6969/announce 109 | https://1337.abcvg.info:443/announce 110 | http://tracker.tfile.me:80/announce.php 111 | http://tracker.tfile.me:80/announce 112 | http://tracker.tfile.co:80/announce 113 | http://retracker.mgts.by:80/announce 114 | http://peersteers.org:80/announce 115 | http://fxtt.ru:80/announce 116 | EOL 117 | ) 118 | 119 | ########## FUNCTIONS ########## 120 | generate_trackers_list () { 121 | # If trackers_list is already populated, do nothing and return 122 | if [[ -n "$trackers_list" ]]; then 123 | echo "Trackers list already populated. Skipping generation." 124 | return 125 | fi 126 | 127 | trackers_list="" # Local variable for dynamic trackers 128 | all_failed=true # Assume that all URLs fail 129 | 130 | # 1. Check if the list of URLs is empty 131 | if [[ ${#live_trackers_list_urls[@]} -eq 0 ]]; then 132 | echo "No live tracker URLs provided. Using the static list." 133 | trackers_list="$STATIC_TRACKERS_LIST" 134 | return 135 | fi 136 | 137 | # 2. Attempts to download trackers from each URL 138 | for url in "${live_trackers_list_urls[@]}"; do 139 | echo "Fetching trackers from: $url" 140 | # Download data, silently 141 | new_trackers=$($curl_executable -sS "$url") 142 | if [[ $? -eq 0 && -n "$new_trackers" ]]; then 143 | # If the download was successful, add the new trackers to trackers_list 144 | trackers_list+="$new_trackers"$'\n' 145 | all_failed=false # At least one URL worked 146 | else 147 | # If the download fails, report the error but continue 148 | echo "Warning: Failed to fetch trackers from $url" 149 | fi 150 | done 151 | 152 | # 3. Check if all downloads have failed 153 | if [[ "$all_failed" == true ]]; then 154 | echo "All live tracker URLs failed. Using the static list." 155 | trackers_list="$STATIC_TRACKERS_LIST" 156 | fi 157 | } 158 | 159 | inject_trackers () { 160 | echo -ne "\e[0;36;1mInjecting... \e[0;36m" 161 | 162 | torrent_trackers=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 163 | --cookie - \ 164 | --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/trackers?hash=${1}" | $jq_executable --raw-output '.[] | .url' | tail -n +4) 165 | 166 | remove_trackers $1 "${torrent_trackers//$'\n'/|}" 167 | 168 | if [[ $clean_existing_trackers == true ]]; then 169 | echo -e " \e[32mBut before a quick cleaning the existing trackers... " 170 | trackers_list=$(echo "$trackers_list" | sort | uniq) 171 | else 172 | trackers_list=$(echo "$trackers_list"$'\n'"$torrent_trackers" | sort | uniq) 173 | fi 174 | 175 | trackers_list=$(sed '/^$/d' <<< "$trackers_list") 176 | 177 | number_of_trackers_in_list=$(echo "$trackers_list" | wc -l) 178 | 179 | urls=${trackers_list//$'\n'/%0A%0A} 180 | 181 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 182 | -d "hash=${1}&urls=$urls" \ 183 | --cookie - \ 184 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/addTrackers" 185 | 186 | echo -e "\e[32mdone, injected $number_of_trackers_in_list trackers!" 187 | } 188 | 189 | get_torrent_list () { 190 | get_cookie 191 | torrent_list=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 192 | --cookie - \ 193 | --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/info") 194 | } 195 | 196 | url_encode() { 197 | local string="${1}" 198 | 199 | # Check if xxd is available 200 | if command -v xxd >/dev/null 2>&1; then 201 | # If xxd is available, use xxd for encoding 202 | printf '%s' "$string" | xxd -p | sed 's/\(..\)/%\1/g' | tr -d '\n' 203 | else 204 | # If jq is available, use jq for encoding 205 | jq -nr --arg s "$string" '$s|@uri' 206 | fi 207 | } 208 | 209 | get_cookie () { 210 | encoded_username=$(url_encode "$qbt_username") 211 | encoded_password=$(url_encode "$qbt_password") 212 | 213 | # If encoding fails, exit the function 214 | if [ $? -ne 0 ]; then 215 | echo "Error during URL encoding" >&2 216 | return 1 217 | fi 218 | 219 | qbt_cookie=$($curl_executable --silent --fail --show-error \ 220 | --header "Referer: ${qbt_host}:${qbt_port}" \ 221 | --cookie-jar - \ 222 | --data "username=${encoded_username}&password=${encoded_password}" ${qbt_host}:${qbt_port}/api/v2/auth/login) 223 | } 224 | 225 | hash_check() { 226 | case $1 in 227 | ( *[!0-9A-Fa-f]* | "" ) return 1 ;; 228 | ( * ) 229 | case ${#1} in 230 | ( 32 | 40 ) return 0 ;; 231 | ( * ) return 1 ;; 232 | esac 233 | esac 234 | } 235 | 236 | remove_trackers () { 237 | hash="$1" 238 | single_url="$2" 239 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 240 | -d "hash=${hash}&urls=${single_url}" \ 241 | --cookie - \ 242 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/removeTrackers" 243 | } 244 | 245 | wait() { 246 | w=$1 247 | echo "I'll wait ${w}s to be sure ..." 248 | while [ $w -gt 0 ]; do 249 | echo -ne "$w\033[0K\r" 250 | sleep 1 251 | w=$((w-1)) 252 | done 253 | } 254 | ########## FUNCTIONS ########## 255 | 256 | if [ -t 1 ] || [[ "$PWD" == *qbittorrent* ]] ; then 257 | if [[ ! $@ =~ ^\-.+ ]]; then 258 | echo "Arguments must be passed with - in front, like -n foo. Check instructions" 259 | echo "" 260 | $0 -h 261 | exit 262 | fi 263 | 264 | [ $# -eq 0 ] && $0 -h 265 | 266 | if [ $# -eq 1 ] && [ $1 == "-f" ]; then 267 | echo "Don't use only -f, you need to specify also the torrent!" 268 | exit 269 | fi 270 | 271 | while getopts ":acflhn:s:" opt; do 272 | case ${opt} in 273 | a ) # If used inject trackers to all torrent. 274 | all_torrent=1 275 | ;; 276 | c ) # If used remove all the existing trackers before injecting the new ones. 277 | clean_existing_trackers=true 278 | ;; 279 | f ) # If used force the injection also in private trackers. 280 | applytheforce=1 281 | ;; 282 | l ) # Print the list of the torrent where you can inject trackers. 283 | get_torrent_list 284 | echo -e "\n\e[0;32;1mCurrent torrents:\e[0;32m" 285 | echo "$torrent_list" | $jq_executable --raw-output '.[] .name' 286 | exit 287 | ;; 288 | n ) # Specify the name of the torrent example -n foo or -n "foo bar", multiple -n can be used. 289 | tor_arg_names+=("$OPTARG") 290 | ;; 291 | s ) # Specify the category of the torrent example -s foo or -s "foo bar", multiple -s can be used. If -s is passed without arguments, the "default" categories will be used 292 | tor_categories+=("$OPTARG") 293 | ;; 294 | : ) 295 | echo "Invalid option: -${OPTARG} requires an argument" 1>&2 296 | exit 0 297 | ;; 298 | \? ) 299 | echo "Unknow option: -${OPTARG}" 1>&2 300 | exit 1 301 | ;; 302 | h | * ) # Display help. 303 | echo "Usage:" 304 | echo "$0 -a Inject trackers to all torrent in qBittorrent, this not require any extra information" 305 | echo "$0 -c Clean all the existing trackers before the injection, this not require any extra information" 306 | echo "$0 -f Force the injection of the trackers inside the private torrent too, this not require any extra information" 307 | echo "$0 -l Print the list of the torrent where you can inject trackers, this not require any extra information" 308 | echo "$0 -n Specify the torrent name or part of it, for example -n foo or -n 'foo bar'" 309 | echo "$0 -s Specify the exact category name, for example -s foo or -s 'foo bar'. If -s is passed empty, \"\", the \"Uncategorized\" category will be used" 310 | echo "$0 -h Display this help" 311 | echo "" 312 | echo "NOTE:" 313 | echo "It's possible to specify more than -n in one single command" 314 | echo "It's possible to specify more than -s in one single command" 315 | echo "Is also possible use -n foo -s bar to select specific name in specific category" 316 | echo "Just remember that if you set -a, is useless to add any extra arguments, like -n, but -f can always be used" 317 | exit 2 318 | ;; 319 | esac 320 | done 321 | shift $((OPTIND -1)) 322 | else 323 | if [[ -n "${sonarr_download_id}" ]] || [[ -n "${radarr_download_id}" ]] || [[ -n "${lidarr_download_id}" ]] || [[ -n "${readarr_download_id}" ]]; then 324 | #wait 5 325 | if [[ -n "${sonarr_download_id}" ]]; then 326 | echo "Sonarr variable found -> $sonarr_download_id" 327 | hash=$(echo "$sonarr_download_id" | awk '{print tolower($0)}') 328 | fi 329 | 330 | if [[ -n "${radarr_download_id}" ]]; then 331 | echo "Radarr variable found -> $radarr_download_id" 332 | hash=$(echo "$radarr_download_id" | awk '{print tolower($0)}') 333 | fi 334 | 335 | if [[ -n "${lidarr_download_id}" ]]; then 336 | echo "Lidarr variable found -> $lidarr_download_id" 337 | hash=$(echo "$lidarr_download_id" | awk '{print tolower($0)}') 338 | fi 339 | 340 | if [[ -n "${readarr_download_id}" ]]; then 341 | echo "Readarr variable found -> $readarr_download_id" 342 | hash=$(echo "$readarr_download_id" | awk '{print tolower($0)}') 343 | fi 344 | 345 | hash_check "${hash}" 346 | if [[ $? -ne 0 ]]; then 347 | echo "No valid hash found for the torrent, I'll exit" 348 | exit 3 349 | fi 350 | auto_tor_grab=1 351 | fi 352 | 353 | if [[ $sonarr_eventtype == "Test" ]] || [[ $radarr_eventtype == "Test" ]] || [[ $lidarr_eventtype == "Test" ]] || [[ $readarr_eventtype == "Test" ]]; then 354 | echo "Test in progress..." 355 | test_in_progress=1 356 | fi 357 | fi 358 | 359 | for i in "${tor_arg_names[@]}"; do 360 | if [[ -z "${i// }" ]]; then 361 | echo "one or more argument for -n not valid, try again" 362 | exit 363 | fi 364 | done 365 | 366 | if [ $test_in_progress -eq 1 ]; then 367 | echo "Good-bye!" 368 | elif [ $auto_tor_grab -eq 0 ]; then # manual run 369 | get_torrent_list 370 | 371 | if [ $all_torrent -eq 1 ]; then 372 | while IFS= read -r line; do 373 | torrent_name_array+=("$line") 374 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .name') 375 | 376 | while IFS= read -r line; do 377 | torrent_hash_array+=("$line") 378 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .hash') 379 | else 380 | if [[ ${#tor_arg_names[@]} -gt 0 && ${#tor_categories[@]} -gt 0 ]]; then 381 | for name in "${tor_arg_names[@]}"; do 382 | for category in "${tor_categories[@]}"; do 383 | torrent_name_list=$(echo "$torrent_list" | $jq_executable --arg category "$category" --arg name "$name" --raw-output '.[] | select(.category | ascii_downcase == ($category | ascii_downcase)) | select(.name | ascii_downcase | contains($name | ascii_downcase)) | .name') 384 | 385 | if [ -n "$torrent_name_list" ]; then # not empty 386 | torrent_name_check=1 387 | 388 | if [[ $category == "" ]]; then 389 | echo -e "\n\e[0;32;1mFor the name ### $name ### in category ### Uncategorized ###\e[0;32m" 390 | else 391 | echo -e "\n\e[0;32;1mFor the name ### $name ### in category ### $category ###\e[0;32m" 392 | fi 393 | 394 | echo -e "\e[0;32;1mI found the following torrent(s):\e[0;32m" 395 | echo "$torrent_name_list" 396 | else 397 | torrent_name_check=0 398 | fi 399 | 400 | if [ $torrent_name_check -eq 0 ]; then 401 | if [[ $category == "" ]]; then 402 | echo -e "\n\e[0;31;1mI didn't find a torrent with name ### $name ### in category ### Uncategorized ###\e[0m" 403 | else 404 | echo -e "\n\e[0;31;1mI didn't find a torrent with name ### $name ### in category ### $category ###\e[0m" 405 | fi 406 | 407 | shift 408 | continue 409 | else 410 | while read -r single_found; do 411 | torrent_name_array+=("$single_found") 412 | hash=$(echo "$torrent_list" | $jq_executable --arg single "$single_found" --raw-output '.[] | select(.name == "\($single)") | .hash') 413 | torrent_hash_array+=("$hash") 414 | done <<< "$torrent_name_list" 415 | fi 416 | done 417 | done 418 | elif [[ ${#tor_arg_names[@]} -gt 0 ]]; then 419 | for name in "${tor_arg_names[@]}"; do 420 | torrent_name_list=$(echo "$torrent_list" | $jq_executable --arg name "$name" --raw-output '.[] | select(.name | ascii_downcase | contains($name | ascii_downcase)) | .name') #possible fix for ONIGURUMA regex libary 421 | 422 | if [ -n "$torrent_name_list" ]; then # not empty 423 | torrent_name_check=1 424 | echo -e "\n\e[0;32;1mFor the name ### $name ###\e[0;32m" 425 | echo -e "\e[0;32;1mI found the following torrent(s):\e[0;32m" 426 | echo "$torrent_name_list" 427 | else 428 | torrent_name_check=0 429 | fi 430 | 431 | if [ $torrent_name_check -eq 0 ]; then 432 | echo -e "\n\e[0;31;1mI didn't find a torrent with this part of the text: \e[21m$name\e[0m" 433 | shift 434 | continue 435 | else 436 | while read -r single_found; do 437 | torrent_name_array+=("$single_found") 438 | hash=$(echo "$torrent_list" | $jq_executable --arg single "$single_found" --raw-output '.[] | select(.name == "\($single)") | .hash') 439 | torrent_hash_array+=("$hash") 440 | done <<< "$torrent_name_list" 441 | fi 442 | done 443 | else 444 | for category in "${tor_categories[@]}"; do 445 | torrent_name_list=$(echo "$torrent_list" | $jq_executable --arg category "$category" --raw-output '.[] | select(.category | ascii_downcase == ($category | ascii_downcase)) | .name') 446 | 447 | if [ -n "$torrent_name_list" ]; then # not empty 448 | torrent_name_check=1 449 | 450 | if [[ $category == "" ]]; then 451 | echo -e "\n\e[0;32;1mFor category ### Uncategorized ###\e[0;32m" 452 | else 453 | echo -e "\n\e[0;32;1mFor category ### $category ###\e[0;32m" 454 | fi 455 | 456 | echo -e "\e[0;32;1mI found the following torrent(s):\e[0;32m" 457 | echo "$torrent_name_list" 458 | else 459 | torrent_name_check=0 460 | fi 461 | 462 | if [ $torrent_name_check -eq 0 ]; then 463 | echo -e "\n\e[0;31;1mI didn't find a torrent in the category: \e[21m$category\e[0m" 464 | shift 465 | continue 466 | else 467 | while read -r single_found; do 468 | torrent_name_array+=("$single_found") 469 | hash=$(echo "$torrent_list" | $jq_executable --arg single "$single_found" --raw-output '.[] | select(.name == "\($single)") | .hash') 470 | torrent_hash_array+=("$hash") 471 | done <<< "$torrent_name_list" 472 | fi 473 | done 474 | fi 475 | fi 476 | 477 | if [ ${#torrent_name_array[@]} -gt 0 ]; then 478 | echo "" 479 | for i in "${!torrent_name_array[@]}"; do 480 | echo -ne "\n\e[0;1;4;32mFor the Torrent: \e[0;4;32m" 481 | echo "${torrent_name_array[$i]}" 482 | 483 | if [[ $ignore_private == true ]] || [ $applytheforce -eq 1 ]; then # Inject anyway the trackers inside any torrent 484 | if [ $applytheforce -eq 1 ]; then 485 | echo -e "\e[0m\e[33mForce mode is active, I'll inject trackers anyway\e[0m" 486 | else 487 | echo -e "\e[0m\e[33mignore_private set to true, I'll inject trackers anyway\e[0m" 488 | fi 489 | generate_trackers_list 490 | inject_trackers ${torrent_hash_array[$i]} 491 | else 492 | private_check=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error --cookie - --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/properties?hash=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "${torrent_name_array[$i]}" '.[] | select(.name == "\($tosearch)") | .hash')" | $jq_executable --raw-output '.is_private') 493 | 494 | if [[ $private_check == true ]]; then 495 | private_tracker_name=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error --cookie - --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/trackers?hash=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "${torrent_name_array[$i]}" '.[] | select(.name == "\($tosearch)") | .hash')" | $jq_executable --raw-output '.[3] | .url' | sed -e 's/[^/]*\/\/\([^@]*@\)\?\([^:/]*\).*/\2/') 496 | echo -e "\e[31m< Private tracker found \e[0m\e[33m-> $private_tracker_name <- \e[0m\e[31mI'll not add any extra tracker >\e[0m" 497 | else 498 | echo -e "\e[0m\e[33mThe torrent is not private, I'll inject trackers on it\e[0m" 499 | generate_trackers_list 500 | inject_trackers ${torrent_hash_array[$i]} 501 | fi 502 | fi 503 | done 504 | else 505 | echo "No torrents found, exiting" 506 | fi 507 | else # auto_tor_grab active, so some *Arr 508 | wait 5 509 | get_torrent_list 510 | 511 | private_check=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error --cookie - --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/properties?hash=$hash" | $jq_executable --raw-output '.is_private') 512 | 513 | if [[ $private_check == true ]]; then 514 | private_tracker_name=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error --cookie - --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/trackers?hash=$hash" | $jq_executable --raw-output '.[3] | .url' | sed -e 's/[^/]*\/\/\([^@]*@\)\?\([^:/]*\).*/\2/') 515 | echo -e "\e[31m< Private tracker found \e[0m\e[33m-> $private_tracker_name <- \e[0m\e[31mI'll not add any extra tracker >\e[0m" 516 | else 517 | echo -e "\e[0m\e[33mThe torrent is not private, I'll inject trackers on it\e[0m" 518 | generate_trackers_list 519 | inject_trackers $hash 520 | fi 521 | fi 522 | -------------------------------------------------------------------------------- /AddqBittorrentTrackers/AddqBittorrentTrackers.sh.readme.md: -------------------------------------------------------------------------------- 1 | # AddqBittorrentTrackers.sh 2 | 3 | The purpose of this script is to inject trackers into your **qBittorrent downloads**. 4 | 5 | It may be executed manually or automatically with Radarr/Sonarr. 6 | 7 | This script works with the qBittorrent v4.1+ API. It may work with lower versions, but must be checked. Let me know if you use an earlier version and it works, so I can expand the version compatability. 8 | 9 | To use this script you'll need: 10 | * [jq](https://stedolan.github.io/jq/). Check if `jq` is available for your distro with `sudo apt install jq` (or the appropriate package management tool) 11 | * Curl. Install it with `sudo apt install curl` 12 | 13 | * First make sure your Radarr/Sonarr user can execute the script with a process similar to this: 14 | * `chown USER:GROUP AddqBittorrentTrackers.sh` where `USER:GROUP` is the same user and group as qBittorrent. 15 | * Then be sure it is executable with: `chmod +x AddqBittorrentTrackers.sh` 16 | 17 | * Modify the scripts `########## CONFIGURATIONS ##########` section: 18 | * `qbt_username` -> username to access to qBittorrent Web UI. 19 | * `qbt_password` -> password to access to qBittorrent Web UI. **Password MUST BE url encoded**, otherwise any special characters will break the curl request. 20 | * Note that if the script runs on the same device that runs qBittorrent, you can set `Bypass authentication for clients on localhost`. With this option set and when the script runs, the username and password are not required. 21 | * `qbt_host` -> if the script is on the same device as qBittorrent `http://localhost`, otherwise, set this to the remote device. 22 | * `qbt_port` -> is the Web UI port. 23 | * `live_trackers_list_url`, is the url from where the trackers list is obtained. These lists are automatically generated. You can specify more than one url, just follow the example in the file. 24 | * The script will automatically check if the torrent is private or public. 25 | 26 | Configuration is now complete. 27 | 28 | 29 | If you are a **Radarr and/or Sonarr user**, personally I: 30 | 1. Create a custom script (settings -> connect -> add notification -> Custom Script). 31 | 2. The name is not important. I use Add qBitTorrent Trackers, you can use any name you like. 32 | 3. Set "On Grab". 33 | 4. Inside Path field, point to the `AddqBittorrentTrackers.sh` script. 34 | 5. Save the custom script. 35 | 36 | Now, when _Radarr and/or Sonarr_ grabs a new torrent, the script will be automatically triggered and a custom tracker list will be added to the torrent. This is true only if the torrent is _not_ from a private tracker. 37 | 38 | To run the script manually, simply run `./AddqBittorrentTrackers.sh`. All the possible options will be shown. When calling the script, there are many options to add trackers to torrents. 39 | 40 | One note about configuration, if you want to use it manually, you must configure the username, password, host and port within the file. This is for simplicity. Otherwise I would have to insert four new options to be called every time manually, or "complicate" the script by checking for the possibility of a configuration file to be saved somewhere. If it is necessary I will do it, but for now I think it is easier to keep the necessary options hard coded. 41 | 42 | 43 | -------------------------------------------------------------------------------- /AudioMediaChecker/AudioMediaChecker.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import signal 4 | import sys 5 | import subprocess 6 | import json 7 | import random 8 | import logging 9 | from faster_whisper import WhisperModel 10 | from pydub import AudioSegment 11 | import pycountry 12 | import io 13 | from pathlib import Path 14 | import psutil 15 | from tqdm import tqdm 16 | import datetime 17 | 18 | def _setup_logger(verbose=False): 19 | """ 20 | Configures and returns a logger with stream on stdout. 21 | 22 | Arguments: 23 | verbose (bool): if True, sets the DEBUG level, otherwise INFO. 24 | 25 | Returns: 26 | logging.Logger: the configured logger. 27 | """ 28 | logger = logging.getLogger(__name__) 29 | # Evita duplicazioni di log se il logger è già configurato 30 | if not logger.handlers: 31 | handler = logging.StreamHandler(sys.stdout) 32 | formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') 33 | handler.setFormatter(formatter) 34 | logger.addHandler(handler) 35 | logger.propagate = False 36 | logger.setLevel(logging.DEBUG if verbose else logging.INFO) 37 | return logger 38 | 39 | def find_files(folder_path: Path, max_depth: int, current_level: int = 0): 40 | """ 41 | Search for files with the .mkv extension in 'folder_path' by exploring to the specified depth. 42 | 43 | Arguments: 44 | folder_path (Path): starting directory. 45 | max_depth (int): depth levels to explore; 46 | if 0, includes all subdirectories (unlimited recursion). 47 | If > 0, only sub-levels such that current_level < max_depth are explored, 48 | where the starting directory equals level 0. 49 | current_level (int): current level in recursion (not to be set from outside). 50 | 51 | Returns: 52 | list[Path]: files found. 53 | """ 54 | # If max_depth is 0, we perform an unlimited recursive search. 55 | if max_depth == 0: 56 | return list(folder_path.rglob('*.mkv')) 57 | 58 | files = list(folder_path.glob('*.mkv')) 59 | 60 | # If we have not reached the maximum level, we explore the subfolders. 61 | if current_level < max_depth: 62 | for subdir in folder_path.iterdir(): 63 | if subdir.is_dir(): 64 | files.extend(find_files(subdir, max_depth, current_level + 1)) 65 | return files 66 | 67 | class AudioMediaChecker: 68 | def __init__(self, file_path, check_all_tracks=False, verbose=False, dry_run=False, 69 | force_language=None, confidence_threshold=65, model='base', gpu=False, logger=None): 70 | """ 71 | Initialize the media file controller. 72 | 73 | Arguments: 74 | file_path (str): file path. 75 | check_all_tracks (bool): if True, parses all audio tracks. 76 | verbose (bool): enable detailed logging. 77 | dry_run (bool): if True, does not perform edit operations. 78 | force_language (str): force language code in ISO 639-2 format. 79 | confidence_threshold (int): confidence threshold. 80 | model (str): whisper model to be used. 81 | gpu (bool): if True, use GPU. 82 | logger (logging.Logger): logger to use. 83 | """ 84 | self.verbose = verbose 85 | self.file_path = Path(file_path) 86 | self.check_all_tracks = check_all_tracks 87 | self.dry_run = dry_run 88 | self.force_language = force_language 89 | self.confidence_threshold = confidence_threshold 90 | self.interrupted = False 91 | self.whisper_model_size = model 92 | self.gpu = gpu 93 | self.logger = logger if logger else _setup_logger(verbose) 94 | 95 | if self.file_path.suffix.lower() != '.mkv': 96 | raise ValueError(f"Formato file non supportato: {self.file_path}") 97 | 98 | # Get the multimedia information once and save it. 99 | self.media_info = self.get_media_info() 100 | self.total_duration = float(self.media_info['format']['duration']) 101 | 102 | self._validate_model_ram() 103 | 104 | def _validate_model_ram(self): 105 | """ 106 | Verify that the available RAM is sufficient for the selected model. 107 | """ 108 | model_requirements = { 109 | 'tiny': 2, 'base': 3, 'small': 5, 'medium': 10, 'large': 16, 'large-v3': 16 110 | } 111 | required_ram = model_requirements.get(self.whisper_model_size, 4) 112 | 113 | if self._system_ram_gb() < required_ram: 114 | raise MemoryError(f"Il modello {self.whisper_model_size} richiede almeno {required_ram}GB di RAM") 115 | 116 | def _best_compute_type(self): 117 | """ 118 | Determines the best type of computation based on the model and available RAM. 119 | """ 120 | model_size_map = { 121 | 'tiny': 'int8', 122 | 'base': 'int8', 123 | 'small': 'int8', 124 | 'medium': 'int8' if self._system_ram_gb() >= 16 else 'float32', 125 | 'large': 'float32', 126 | 'large-v3': 'float32' 127 | } 128 | return model_size_map.get(self.whisper_model_size, 'int8') 129 | 130 | def _optimal_cpu_threads(self): 131 | """ 132 | It calculates the optimal number of CPU threads (maximum 8). 133 | """ 134 | available_cores = os.cpu_count() or 4 135 | return min(available_cores, 8) 136 | 137 | @staticmethod 138 | def _system_ram_gb(): 139 | """ 140 | Returns the amount of system RAM (in GB). 141 | """ 142 | try: 143 | return round(psutil.virtual_memory().total / (1024 ** 3)) 144 | except Exception: 145 | return 4 # Conservative value if RAM cannot be determined. 146 | 147 | def process_file(self): 148 | """ 149 | Main process for analyzing and possibly updating audio track tags. 150 | """ 151 | if self.interrupted: 152 | return False 153 | 154 | # self.logger.info(f"File analysis: {self.file_path}") 155 | tqdm.write(f" - File analysis: {self.file_path}") 156 | 157 | if not self.file_path.exists(): 158 | self.logger.error("File not found") 159 | return False 160 | 161 | try: 162 | # Using the media_info obtained in __init__. 163 | audio_streams = [s for s in self.media_info['streams'] if s['codec_type'] == 'audio'] 164 | 165 | if not audio_streams: 166 | self.logger.warning("No audio track found in the file") 167 | return False 168 | 169 | # Select the tracks to be analyzed 170 | tracks_to_analyze = self.get_tracks_to_analyze(audio_streams) 171 | 172 | if not tracks_to_analyze: 173 | self.logger.info("There are no unknown audio tracks to analyze") 174 | self.logger.info("--" * 30) 175 | return True 176 | 177 | self.logger.info("--" * 30) 178 | num_tracks = len(tracks_to_analyze) 179 | self.logger.info(f"Analysis of {num_tracks} audio {'tracks' if num_tracks > 1 else 'track'}") 180 | self.logger.info("--" * 30) 181 | 182 | first_attempt_positions = [10, 35, 60, 85] 183 | first_attempt_duration = 30 184 | 185 | for track in tracks_to_analyze: 186 | if self.interrupted: 187 | return False 188 | 189 | stream = track['stream'] 190 | # Relative index for ffmpeg 191 | audio_position = track['relative_index'] 192 | # Absolute index as reported by ffprobe (used for mkvpropedit and log). 193 | ffprobe_index = track['ffprobe_index'] 194 | 195 | self.log_stream_info(stream) 196 | self.logger.info("--" * 30) 197 | 198 | # First attempt 199 | self.logger.info(f"Attempt 1 - Track with ffprobe index {ffprobe_index}") 200 | self.logger.info("--" * 30) 201 | 202 | first_attempt_confidences = {} 203 | for start_percent in first_attempt_positions: 204 | audio_segment = self.extract_audio_sample(audio_position, start_percent, first_attempt_duration) 205 | if audio_segment is None: 206 | continue 207 | detected_lang, confidence = self.detect_language(audio_segment) 208 | 209 | if detected_lang not in first_attempt_confidences: 210 | first_attempt_confidences[detected_lang] = {'total_confidence': 0, 'count': 0} 211 | first_attempt_confidences[detected_lang]['total_confidence'] += confidence 212 | first_attempt_confidences[detected_lang]['count'] += 1 213 | 214 | self.logger.info(f"Position {start_percent}%: Language detected '{detected_lang}', Confidence {confidence * 100:.2f}%") 215 | 216 | # Save only if there are detections. 217 | if first_attempt_confidences: 218 | total_detections = sum(lang_data['count'] for lang_data in first_attempt_confidences.values()) 219 | first_attempt_weighted_averages = {} 220 | for lang, stats in first_attempt_confidences.items(): 221 | average_confidence = stats['total_confidence'] / stats['count'] 222 | weighted_average = (average_confidence * stats['count']) / total_detections 223 | first_attempt_weighted_averages[lang] = weighted_average * 100 224 | 225 | self.logger.info("--" * 30) 226 | self.logger.info("Weighted averages of the confidences of each language surveyed:") 227 | for lang, weighted_avg in first_attempt_weighted_averages.items(): 228 | self.logger.info(f"-> {lang}: {weighted_avg:.2f}%") 229 | 230 | detected_lang = max(first_attempt_weighted_averages, key=first_attempt_weighted_averages.get) 231 | confidence_percent = first_attempt_weighted_averages[detected_lang] 232 | else: 233 | detected_lang = None 234 | confidence_percent = 0 235 | 236 | self.logger.info("--" * 30) 237 | self.logger.info(f"Language with higher weighted average: '{detected_lang}', Weighted average: {confidence_percent:.2f}%") 238 | 239 | if confidence_percent >= self.confidence_threshold: 240 | self.logger.info( 241 | f"Attempt 1 successful for trace with ffprobe index {ffprobe_index}. " 242 | f"Language detected: {detected_lang}, Confidence: {confidence_percent:.2f}% >= {self.confidence_threshold}%" 243 | ) 244 | self.handle_detection_result(ffprobe_index, detected_lang, confidence_percent / 100) 245 | self.logger.info("--" * 30) 246 | continue 247 | 248 | self.logger.info(f"Attempt 1 failed. Weighted average: {confidence_percent:.2f}% < {self.confidence_threshold}%") 249 | self.logger.info("--" * 30) 250 | 251 | # Subsequent attempts 252 | used_positions = set(first_attempt_positions) 253 | for attempt in range(2, 11): 254 | attempt_duration = random.randint(30, 90) 255 | attempt_positions = [] 256 | 257 | while len(attempt_positions) < 4: 258 | new_position = random.randint(5, 95) 259 | if new_position not in used_positions: 260 | attempt_positions.append(new_position) 261 | used_positions.add(new_position) 262 | 263 | self.logger.info(f"Attempt {attempt} - Track with ffprobe index {ffprobe_index}") 264 | self.logger.info("--" * 30) 265 | 266 | attempt_confidences = {} 267 | for start_percent in attempt_positions: 268 | audio_segment = self.extract_audio_sample(audio_position, start_percent, attempt_duration) 269 | if audio_segment is None: 270 | continue 271 | detected_lang, confidence = self.detect_language(audio_segment) 272 | 273 | if detected_lang not in attempt_confidences: 274 | attempt_confidences[detected_lang] = {'total_confidence': 0, 'count': 0} 275 | attempt_confidences[detected_lang]['total_confidence'] += confidence 276 | attempt_confidences[detected_lang]['count'] += 1 277 | 278 | self.logger.info(f"Position {start_percent}%: Language detected '{detected_lang}', Confidence {confidence * 100:.2f}%") 279 | 280 | if attempt_confidences: 281 | total_detections = sum(lang_data['count'] for lang_data in attempt_confidences.values()) 282 | attempt_weighted_averages = {} 283 | for lang, stats in attempt_confidences.items(): 284 | average_confidence = stats['total_confidence'] / stats['count'] 285 | weighted_average = (average_confidence * stats['count']) / total_detections 286 | attempt_weighted_averages[lang] = weighted_average * 100 287 | 288 | self.logger.info("--" * 30) 289 | self.logger.info("Weighted averages of the confidences of each language surveyed:") 290 | for lang, weighted_avg in attempt_weighted_averages.items(): 291 | self.logger.info(f"-> {lang}: {weighted_avg:.2f}%") 292 | 293 | detected_lang = max(attempt_weighted_averages, key=attempt_weighted_averages.get) 294 | confidence_percent = attempt_weighted_averages[detected_lang] 295 | else: 296 | detected_lang = None 297 | confidence_percent = 0 298 | 299 | self.logger.info(f"Language with higher weighted average: '{detected_lang}', Weighted average: {confidence_percent:.2f}%") 300 | 301 | if confidence_percent >= self.confidence_threshold: 302 | self.logger.info( 303 | f"Attempt {attempt} successful for trace with ffprobe index {ffprobe_index}. " 304 | f"Language detected: {detected_lang}, Confidence: {confidence_percent:.2f}% >= {self.confidence_threshold}%" 305 | ) 306 | self.handle_detection_result(ffprobe_index, detected_lang, confidence_percent / 100) 307 | self.logger.info("--" * 30) 308 | break 309 | 310 | self.logger.info(f"Attempt {attempt} failed. Weighted average: {confidence_percent:.2f}% < {self.confidence_threshold}%") 311 | self.logger.info("--" * 30) 312 | 313 | return True 314 | 315 | except Exception as e: 316 | self.logger.error(f"Error during file processing: {str(e)}", exc_info=self.verbose) 317 | return False 318 | 319 | def get_tracks_to_analyze(self, audio_streams): 320 | """Select the audio tracks to be analyzed according to the parameters. 321 | 322 | For each track, store: 323 | - 'relative_index': the relative index among the audio tracks only (for ffmpeg) 324 | - 'ffprobe_index': the absolute index of the stream, as reported by ffprobe 325 | """ 326 | tracks = [] 327 | relative_index = 0 328 | for stream in audio_streams: 329 | tags = stream.get('tags', {}) 330 | current_lang = tags.get('LANGUAGE', None) or tags.get('language', None) 331 | if self.check_all_tracks or not current_lang: 332 | tracks.append({ 333 | 'stream': stream, 334 | 'relative_index': relative_index, # To use in ffmpeg: 0:a:{relative_index} 335 | 'ffprobe_index': stream.get('index') # Use with mkvpropedit to identify the exact track 336 | }) 337 | relative_index += 1 338 | return tracks 339 | 340 | def log_stream_info(self, stream): 341 | """ 342 | Logga le informazioni della traccia audio. 343 | """ 344 | bitrate = stream.get('bit_rate') 345 | info = { 346 | 'Index': stream.get('index', 'n.d.'), 347 | 'Codec': stream.get('codec_name', 'unknown'), 348 | 'Current language': stream.get('tags', {}).get('language', 'not set'), 349 | 'Bitrate': f"{int(bitrate) // 1000} Kb/s" if bitrate else 'unknown' 350 | } 351 | self.logger.info("Audio track details:") 352 | for k, v in info.items(): 353 | self.logger.info(f"• {k}: {v}") 354 | 355 | def handle_detection_result(self, stream_index, detected_lang, confidence): 356 | """ 357 | Handles the result of language detection: 358 | - converts the code to ISO 639-2 (using pycountry) 359 | - checks the confidence obtained 360 | - updates the trace tag if necessary 361 | 362 | Arguments: 363 | stream_index (int): stream index (ffprobe) 364 | detected_lang (str): detected language code 365 | confidence (float): confidence (0-1) 366 | """ 367 | self.logger.debug(f"Start handle_detection_result for trace {stream_index}") 368 | 369 | try: 370 | detected_lang_3 = pycountry.languages.get(alpha_2=detected_lang).alpha_3 371 | except AttributeError: 372 | self.logger.warning(f"Language code not found for {detected_lang}. Using the original code.") 373 | detected_lang_3 = detected_lang 374 | 375 | current_lang = self.media_info['streams'][stream_index].get('tags', {}).get('language') 376 | self.logger.debug(f"Current language by track {stream_index}: {current_lang}") 377 | 378 | confidence_percent = confidence * 100 379 | 380 | if detected_lang_3: 381 | self.logger.debug(f"Language detected for track {stream_index}: {detected_lang_3} (original: {detected_lang})") 382 | 383 | if confidence_percent >= self.confidence_threshold: 384 | if detected_lang_3 != current_lang: 385 | self.logger.info(f"Recognized language for track {stream_index}: {detected_lang_3} with confidence {confidence_percent:.2f}%") 386 | self.update_language_tag(stream_index, detected_lang_3) 387 | else: 388 | self.logger.info(f"Language by track {stream_index} remains unchanged: {detected_lang_3} with confidence {confidence_percent:.2f}%") 389 | else: 390 | if self.force_language is not None: 391 | if self.force_language == '': 392 | self.logger.info(f"Recognized language for track {stream_index}: {detected_lang_3} forced under threshold") 393 | self.update_language_tag(stream_index, detected_lang_3) 394 | else: 395 | self.logger.info(f"Forced language for track {stream_index}: {self.force_language}") 396 | self.update_language_tag(stream_index, self.force_language) 397 | else: 398 | self.logger.info(f"Recognized language for track {stream_index}: {detected_lang_3} with confidence {confidence_percent:.2f}%, but below the required threshold") 399 | else: 400 | self.logger.warning(f"No language detected by trace {stream_index} (confidence {confidence_percent:.2f}%)") 401 | if self.force_language and self.force_language != '': 402 | self.logger.info(f"Forced language for track {stream_index}: {self.force_language}") 403 | self.update_language_tag(stream_index, self.force_language) 404 | else: 405 | self.logger.debug(f"No update by track {stream_index}") 406 | 407 | self.logger.debug(f"Done handle_detection_result for track {stream_index}") 408 | 409 | def get_media_info(self): 410 | """ 411 | Extracts media file information using ffprobe. 412 | Returns: 413 | dict: information in JSON format. 414 | """ 415 | cmd = [ 416 | 'ffprobe', 417 | '-v', 'quiet', 418 | '-print_format', 'json', 419 | '-show_format', 420 | '-show_streams', 421 | str(self.file_path) 422 | ] 423 | result = subprocess.run(cmd, capture_output=True, text=True) 424 | if result.returncode != 0 or not result.stdout: 425 | raise RuntimeError("Error executing ffprobe") 426 | return json.loads(result.stdout) 427 | 428 | def update_language_tag(self, stream_index, language): 429 | """ 430 | Updates the language tag for the track identified by stream_index. 431 | 432 | Arguments: 433 | stream_index (int): stream index (ffprobe) to update the tag to. 434 | language (str): new language code (ISO 639-2). 435 | """ 436 | if not language: 437 | self.logger.info(f"No language to set by track {stream_index}") 438 | return 439 | 440 | cmd = [ 441 | 'mkvpropedit', 442 | str(self.file_path), 443 | '--edit', f'track:{stream_index + 1}', # mkvpropedit use index base-1 444 | '--set', f'language={language}' 445 | ] 446 | 447 | if not self.dry_run: 448 | try: 449 | result = subprocess.run(cmd, check=True, capture_output=True, text=True) 450 | self.logger.info(f"Updated language tag for track {stream_index}: {language}") 451 | except subprocess.CalledProcessError as e: 452 | self.logger.error(f"Error running mkvpropedit: {e.stderr}") 453 | else: 454 | self.logger.info(f"[DRY RUN] Update language tag per track {stream_index}: {language}") 455 | 456 | def detect_language(self, audio_file): 457 | """ 458 | Performs language detection using the Whisper model. 459 | 460 | Arguments: 461 | audio_file (file-like): audio sample in BytesIO format. 462 | 463 | Returns: 464 | tuple: (language detected (str), confidence (float)) 465 | """ 466 | model_size = self.whisper_model_size 467 | device = 'cuda' if self.gpu else 'cpu' 468 | compute_type = self._best_compute_type() 469 | cpu_threads = self._optimal_cpu_threads() if device == 'cpu' else 0 470 | 471 | self.logger.info("Beginning language detection") 472 | self.logger.debug(f"Whisper configuration for {device}: compute_type={compute_type}, threads={cpu_threads or 'auto'}") 473 | 474 | # NOTE: here you load the model every time you call. It could be improved by loading it only once, 475 | # e.g., by saving it in self.whisper_model on first use. 476 | model = WhisperModel(model_size, 477 | device=device, 478 | compute_type=compute_type, 479 | cpu_threads=cpu_threads, 480 | download_root="/models") 481 | 482 | segments, info = model.transcribe(audio_file, language=None, beam_size=5) 483 | detected_language = info.language 484 | 485 | if self.verbose: 486 | self.logger.debug("Recognized text:") 487 | for segment in segments: 488 | self.logger.debug(f"[{segment.start:.2f}s -> {segment.end:.2f}s] {segment.text}") 489 | 490 | if self.verbose: 491 | self.logger.info(f"Detected language: {detected_language} with confidence: {info.language_probability:.2f}") 492 | 493 | return detected_language, info.language_probability 494 | 495 | def extract_audio_sample(self, audio_position, start_percent, duration_seconds): 496 | """ 497 | Extracts an audio sample in WAV format of the specified duration from a percentage of the file. 498 | 499 | Arguments: 500 | audio_position (int): index of the audio track (for ffmpeg). 501 | start_percent (float): start percentage of the sample. 502 | duration_seconds (float): duration of the sample in seconds. 503 | 504 | Return: 505 | BytesIO: campione audio; None in caso di errore. 506 | """ 507 | try: 508 | audio_sample = io.BytesIO() 509 | 510 | # Reuse self.total_duration if available 511 | total_duration = self.total_duration 512 | start_time_seconds = (total_duration * start_percent) / 100 513 | 514 | extract_cmd = [ 515 | 'ffmpeg', '-y', 516 | '-ss', f'{start_time_seconds:.2f}', 517 | '-i', str(self.file_path), 518 | '-t', f'{duration_seconds:.2f}', 519 | '-map', f'0:a:{audio_position}', 520 | '-ac', '1', 521 | '-ar', '16000', 522 | '-acodec', 'pcm_s16le', 523 | '-f', 'wav', 524 | '-' 525 | ] 526 | 527 | with subprocess.Popen(extract_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process: 528 | stdout, stderr = process.communicate() 529 | 530 | if process.returncode != 0: 531 | self.logger.error(f"Sample extraction error by position {start_percent}:") 532 | self.logger.error(f"Command: {' '.join(extract_cmd)}") 533 | self.logger.error(f"Error: {stderr.decode('utf-8', errors='ignore')}") 534 | raise subprocess.CalledProcessError(process.returncode, extract_cmd, stdout, stderr) 535 | 536 | audio_sample.write(stdout) 537 | audio_sample.seek(0) 538 | return audio_sample 539 | 540 | except Exception as e: 541 | self.logger.error(f"Sample extraction error: {str(e)}") 542 | return None 543 | 544 | def main(): 545 | checker = None 546 | try: 547 | VALID_MODELS = ['tiny', 'base', 'small', 'medium', 'large', 'large-v3'] 548 | 549 | parser = argparse.ArgumentParser(description='Analyzes and corrects language tags of audio tracks') 550 | parser.add_argument('--file', help='Path of the file to be analyzed') 551 | parser.add_argument('--folder', help='Directory path to be parsed') 552 | parser.add_argument('--recursive', nargs='?', const=0, type=int, 553 | help="""Depth levels to explore. 554 | If 0 or omitted, the search is unlimited (the starting folder and all subdirectories). 555 | If > 0, the search is limited to that number of levels (starting folder is level 0).""") 556 | parser.add_argument('--check-all-tracks', action='store_true', 557 | help='Analyzes all audio tracks, not just those without tags') 558 | parser.add_argument('--verbose', action='store_true', 559 | help='Enable detailed logging') 560 | parser.add_argument('--dry-run', action='store_true', 561 | help='Simulates operations without modifying the file') 562 | parser.add_argument('--force-language', nargs='?', const='', 563 | help='Language to be set when detection fails. Use ISO 639-2 format (3 letters)') 564 | parser.add_argument('--confidence', type=int, default=65, 565 | help='Confidence threshold for language detection (default: 65)') 566 | parser.add_argument('--model', 567 | choices=VALID_MODELS, 568 | default='base', 569 | help=f"Whisper model (size): {' '.join(VALID_MODELS)}, default: %(default)s") 570 | parser.add_argument('--gpu', action='store_true', help='Use GPU for language detection (optional)') 571 | parser.add_argument('--help-languages', action='store_true', help='Show a list of available language codes') 572 | 573 | args = parser.parse_args() 574 | 575 | logger = _setup_logger(args.verbose) 576 | 577 | if args.help_languages: 578 | print("Available language codes (ISO 639-2 format):") 579 | for language in pycountry.languages: 580 | if hasattr(language, 'alpha_3'): 581 | print(f"{language.alpha_3} - {language.name}") 582 | sys.exit(0) 583 | 584 | if not args.file and not args.folder: 585 | parser.error("the following arguments are required: --file or --folder") 586 | 587 | # Forced language code validation 588 | if args.force_language: 589 | if args.force_language != '': 590 | language_obj = pycountry.languages.get(alpha_3=args.force_language) 591 | if language_obj is not None: 592 | logger.debug(f"Forced language set to: {args.force_language} -> {language_obj.name}") 593 | else: 594 | logger.info(f"Error: '{args.force_language}' is not a valid language code according to ISO 639-2.") 595 | logger.info("For a list of available codes, use the option --help-languages.") 596 | sys.exit(1) 597 | else: 598 | logger.debug("Forces language detection even if below threshold.") 599 | 600 | # Validation of confidence threshold 601 | if args.confidence < 1 or args.confidence > 100: 602 | print("Error: the confidence threshold should be between 1 and 100") 603 | sys.exit(1) 604 | 605 | files_to_process = [] 606 | 607 | if args.file: 608 | file_path = Path(args.file) 609 | if file_path.suffix.lower() != '.mkv': 610 | print(f"Error: the file must be in MKV format. File provided: {file_path}") 611 | sys.exit(1) 612 | files_to_process.append(file_path) 613 | 614 | if args.folder: 615 | folder_path = Path(args.folder) 616 | if not folder_path.is_dir(): 617 | print(f"Error: '{folder_path}' is not a valid directory.") 618 | sys.exit(1) 619 | 620 | # If --recursive is not passed, args.recursive will be None, 621 | # and then a NON-recursive search will be done (only in the source directory) 622 | if args.recursive is None: 623 | # In this case we consider only the indicated directory (no recursion) 624 | depth = 0 # We can decide to handle it as non-recursive 625 | files_to_process.extend(list(folder_path.glob('*.mkv'))) 626 | else: 627 | depth = args.recursive 628 | files_to_process.extend(find_files(folder_path, depth)) 629 | 630 | if not files_to_process: 631 | print("No MKV files found.") 632 | sys.exit(1) 633 | 634 | if args.verbose: 635 | params = { 636 | 'check_all_tracks': args.check_all_tracks, 637 | 'dry_run': args.dry_run, 638 | 'force_language': args.force_language if args.force_language is not None else 'False', 639 | 'confidence_threshold': args.confidence, 640 | 'model': args.model, 641 | 'gpu': args.gpu 642 | } 643 | logger.info("Execution parameters:") 644 | for param, value in params.items(): 645 | logger.info(f" {param}: {value}") 646 | logger.info("--" * 30) 647 | 648 | with tqdm(total=len(files_to_process), desc=" - INFO - Processing files", unit="file", initial=1, leave=False) as pbar: 649 | for file_path in files_to_process: 650 | 651 | now = datetime.datetime.now() 652 | timestamp = now.strftime("%Y-%m-%d %H:%M:%S,%f")[:-3] # Format the date and time with milliseconds 653 | pbar.set_description(f"{timestamp} - INFO - Processing files") 654 | 655 | checker = AudioMediaChecker( 656 | str(file_path), 657 | check_all_tracks=args.check_all_tracks, 658 | verbose=args.verbose, 659 | dry_run=args.dry_run, 660 | force_language=args.force_language, 661 | confidence_threshold=args.confidence, 662 | model=args.model, 663 | gpu=args.gpu, 664 | logger=logger 665 | ) 666 | checker.process_file() 667 | pbar.update(1) # Update the progress bar after processing the file 668 | 669 | 670 | logger.info("Script successfully completed.") 671 | sys.exit(0) 672 | 673 | except KeyboardInterrupt: 674 | print("\nOperation aborted by user.") 675 | sys.exit(1) 676 | except Exception as e: 677 | print(f"Unexpected error: {str(e)}") 678 | sys.exit(1) 679 | 680 | if __name__ == "__main__": 681 | main() 682 | -------------------------------------------------------------------------------- /AudioMediaChecker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nvidia/cuda:12.3.2-cudnn9-runtime-ubuntu22.04 2 | 3 | # Setup Python e dipendenze di sistema 4 | RUN apt-get update && apt-get install -y \ 5 | python3 \ 6 | python3-pip \ 7 | python3-dev \ 8 | gcc \ 9 | g++ \ 10 | mkvtoolnix \ 11 | ffmpeg \ 12 | && rm -rf /var/lib/apt/lists/* 13 | 14 | # Installazione delle dipendenze Python 15 | COPY requirements.txt . 16 | RUN pip install --no-cache-dir -r requirements.txt 17 | 18 | WORKDIR /app 19 | COPY . . 20 | 21 | RUN ln -s /usr/bin/python3 /usr/bin/python 22 | 23 | CMD ["python3", "AudioMediaChecker.py"] 24 | -------------------------------------------------------------------------------- /AudioMediaChecker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | audio-media-checker: 3 | build: . 4 | container_name: audio_processor_container 5 | image: audio-media-checker:latest 6 | volumes: 7 | - /path_to_yout_data:/data -------------------------------------------------------------------------------- /AudioMediaChecker/requirements.txt: -------------------------------------------------------------------------------- 1 | faster-whisper 2 | mutagen 3 | pydub 4 | ffmpeg-python 5 | pycountry 6 | psutil 7 | tqdm -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Jorman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # qBittorrent Helper Scripts 2 | 3 | This small, but useful collection of scripts are a fantastic way to help you manage, maintain and prune your Torrent file lists. 4 | 5 | They automate some of the tasks that become a bit boring after clicking buttons from within the client itself for the 350th time... 6 | 7 | There are FOUR main scripts and several sub-scripts that are specific purpose. 8 | 9 | Download the lot, review and modify them to suit your needs. 10 | 11 | ### AddqBittorrentTrackers.sh 12 | This script injects trackers into your **qBittorrent downloads**. 13 | 14 | 15 | ### AddTransmissionTrackers.sh 16 | This script injects trakers inside the **Transmission torrent** 17 | 18 | 19 | ### TransmissionRemoveCompleteTorrent.sh 20 | This script removes completed torrents. 21 | 22 | 23 | ### qBittorrentHardlinksChecker.sh 24 | This script checks qBittorrents Hard Links. 25 | 26 | 27 | # FEEDBACK and ERRORS 28 | 29 | Please star this project - if you find a problem, please do report it. 30 | 31 | While the scripts do not change often, the project is regualrly reviewed. The world of Torrenting is well established, but tweaks, tricks and usage evolves. 32 | 33 | As things evolve, these scripts will be updated. If you would like to add, suggest ideas or propose new methods, please do open either an Issue or pop me a message. 34 | 35 | 36 | 37 | # TODO 38 | Make some good expalation on how to use these scripts - see the Wiki (top) 39 | -------------------------------------------------------------------------------- /TransmissionRemoveCompleteTorrent.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ########## CONFIGURATIONS ########## 4 | # Access Information for Transmission 5 | t_username= 6 | t_password= 7 | # Host where transmission runs 8 | t_host=localhost 9 | # Transmission port 10 | t_port=9091 11 | t_remote="$(command -v transmission-remote)" 12 | # Log 0 for disable | 1 for enable 13 | t_log=1 14 | t_log_path="/data/Varie" 15 | # Folder for automatic download? Be carefull must be different and not included into the default Download folder of transmission 16 | # for example if your download go to /data/download and your automatic download from transmission go to /data/download/automatic 17 | # you have to point to that automatic folder, because when the script run will search for automatic 18 | automatic_folder="Automatici" 19 | # If more than 0 this will indicate the max seed time (in days) for the automatic torrents. If reached the torrent will be deleted 20 | max_days_seed=7 21 | # If true, this will also delete data for non automatic torrent 22 | remove_normal=true 23 | ########## CONFIGURATIONS ########## 24 | 25 | if [[ "$t_log" == "1" ]]; then 26 | if [[ ! -w "$t_log_path/${0##*/}.log" ]]; then 27 | touch "$t_log_path/${0##*/}.log" 28 | fi 29 | fi 30 | 31 | [[ "$t_log" == "1" ]] && echo "########## $(date) ##########" >> "$t_log_path/${0##*/}.log" 32 | 33 | # use transmission-remote to get torrent list from transmission-remote list 34 | torrent_list=`$t_remote $t_host:$t_port -n=$t_username:$t_password -l | awk '{print $1}' | grep -o '[0-9]*'` 35 | # for each torrent in the list 36 | for torrent_id in $torrent_list; do 37 | torrent_name=`$t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -i | grep Name: | sed -e 's/\s\sName:\s//'` 38 | [[ "$t_log" == "1" ]] && echo "* * * * * Checking torrent Nr. $torrent_id -> $torrent_name * * * * *" >> "$t_log_path/${0##*/}.log" 39 | 40 | # check if torrent download is completed 41 | percent_done=`$t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -i | grep 'Percent Done' | awk '{print $3}' | sed 's/.$//'` 42 | done_auto=`$t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -i | grep Location | awk '{print $2}' | grep "$automatic_folder"` 43 | done_seed=`$t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -i | grep Seeding | awk -F'[()]' '{print $2}' | grep -o '[[:digit:]]*'` 44 | 45 | # check torrents current state is "Stopped", "Finished", or "Idle" 46 | state_stopped=`$t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -i | grep "State: Stopped\|Finished"` 47 | 48 | if [ "$percent_done" == "100" ]; then # torrent complete 49 | [[ "$t_log" == "1" ]] && echo " Torrent done at $percent_done%" >> "$t_log_path/${0##*/}.log" 50 | if [ "$done_auto" != "" ]; then # automatic torrent 51 | [[ "$t_log" == "1" ]] && echo " Torrent is under automatic folder ..." >> "$t_log_path/${0##*/}.log" 52 | if [ "$state_stopped" != "" ]; then # transmission stopped the torrent 53 | [[ "$t_log" == "1" ]] && echo " Torrent is stopped, I'll remove torrent and data!" >> "$t_log_path/${0##*/}.log" 54 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -rad 55 | elif [ $(( done_seed / 60)) -gt $(( max_days_seed * 60 * 24)) ] && [ $max_days_seed -gt 0 ]; then # maximum seed time reached 56 | [[ "$t_log" == "1" ]] && echo " Torrent have a good seed time ($(( done_seed / 60))/$(( max_days_seed * 60 * 24)) minutes). I'll also remove the data!" >> "$t_log_path/${0##*/}.log" 57 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -rad 58 | else 59 | [[ "$t_log" == "1" ]] && echo " Torrent not yet fully finished. Seed time ($(( done_seed / 60))/$(( max_days_seed * 60 * 24)) minutes)" >> "$t_log_path/${0##*/}.log" 60 | fi 61 | else # not automatic torrent 62 | [[ "$t_log" == "1" ]] && echo " This's a normal torrent ..." >> "$t_log_path/${0##*/}.log" 63 | if [ "$state_stopped" != "" ]; then # transmission stopped the torrent 64 | [[ "$t_log" == "1" ]] && echo " Torrent is stopped" >> "$t_log_path/${0##*/}.log" 65 | if [[ "$remove_normal" == "true" ]]; then 66 | [[ "$t_log" == "1" ]] && echo " Also remove normal torrent is active, I'll remove torrent and data!" >> "$t_log_path/${0##*/}.log" 67 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -rad 68 | else 69 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -r 70 | fi 71 | elif [ $(( done_seed / 60 / 60 / 24)) -gt $max_days_seed ] && [ $max_days_seed -gt 0 ]; then # maximum seed time reached 72 | [[ "$t_log" == "1" ]] && echo " Torrent have a good seed time ($(( done_seed / 60))/$(( max_days_seed * 60 * 24)) minutes)" >> "$t_log_path/${0##*/}.log" 73 | if [[ "$remove_normal" == "true" ]]; then 74 | [[ "$t_log" == "1" ]] && echo " Also remove normal torrent is active, I'll remove torrent and data!" >> "$t_log_path/${0##*/}.log" 75 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -rad 76 | else 77 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -r 78 | fi 79 | else 80 | [[ "$t_log" == "1" ]] && echo " Torrent not yet fully finished. Seed time ($(( done_seed / 60))/$(( max_days_seed * 60 * 24)) minutes)" >> "$t_log_path/${0##*/}.log" 81 | fi 82 | fi 83 | elif [ "$percent_done" == "99.9" ] && [ "$state_stopped" != "" ]; then # torrent stalled 84 | [[ "$t_log" == "1" ]] && echo " Seems that torrent Nr. #$torrent_id is stalled, I'll try to restart it!" >> "$t_log_path/${0##*/}.log" 85 | $t_remote $t_host:$t_port -n=$t_username:$t_password -t $torrent_id -s 86 | elif [ "$percent_done" == "nan" ]; then # torrent not yet started 87 | [[ "$t_log" == "1" ]] && echo " Torrent not yet started" >> "$t_log_path/${0##*/}.log" 88 | else # torrent not complete 89 | [[ "$t_log" == "1" ]] && echo " Torrent not yet finished done at $percent_done%" >> "$t_log_path/${0##*/}.log" 90 | fi 91 | [[ "$t_log" == "1" ]] && echo -e "* * * * * Checking torrent Nr. $torrent_id complete. * * * * *\n" >> "$t_log_path/${0##*/}.log" 92 | done -------------------------------------------------------------------------------- /TransmissionRemoveCompleteTorrent.sh.readme.md: -------------------------------------------------------------------------------- 1 | # TransmissionRemoveCompleteTorrent.sh 2 | 3 | The best way is to use it is to cronize it. 4 | 5 | This script uses `transmission-remote`, normally this is already installed if you use transmission. 6 | 7 | * First make sure your Radarr/Sonarr user can execute the script with someting like this: 8 | * `chown USER:USER TransmissionRemoveCompleteTorrent.sh` 9 | * Then ensure it is executable: `chmod +x TransmissionRemoveCompleteTorrent.sh` 10 | 11 | * Modify the scripts `########## CONFIGURATIONS ##########` section: 12 | * `t_username`, `t_password`, `t_host` and `t_port` are all Transmission related. Set them accordingly. 13 | * `t_log` is to enable the logfile. If set to 1 the logfile will be written to `t_log_path`. 14 | * The most important setting is `automatic_folder`. This is the folder that contains all the **automatic downloads** 15 | * I use this folder structure for automatic downloads that came from Radarr/Sonarr: 16 | - download 17 | - automatic 18 | - movie 19 | - tv_show 20 | 21 | * Within the files configuration example, I've set `automatic` for `automatic_folder` option. 22 | * `max_days_seed` is the maximum seed time. 23 | * `remove_normal`. Pay attention if you set this to true, because this enables a kind of **force** option that also checks all non-automatic downloads. 24 | 25 | * Lastly, consider using cron for the script. Add this to your cron scheduler with something like this (varies according to your own Linux installs cron manager): 26 | * `30 01 * * * /PATHOFTHESCRIPT/TransmissionRemoveCompleteTorrent.sh >/dev/null 2>&1` 27 | * this example will execute the script at 01:30 every day. 28 | -------------------------------------------------------------------------------- /clean_samba_recycle.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # cleanup recycle dir: 4 | # delete all files with last access time 5 | # older than a specific number of days and 6 | # remove all empty subdirs afterwards. 7 | # 8 | # in your smb.conf: 9 | # make sure you set recycle:touch = yes 10 | # in order to periodically delete old files, you can 11 | # cron the script with something like 00 01 * * * clean_samba_recycle.sh 12 | 13 | # set vars 14 | recycle_dir1='/dir1' 15 | recycle_dir2='/dir2' 16 | recycle_dir3='/dir3' 17 | recycle_dir4='/dir4' 18 | recycle_dir5='/dir5' 19 | recycle_dir6='/dir6' 20 | lastaccess_50_maxdays=50 21 | lastaccess_10_maxdays=10 22 | lastaccess_5_maxdays=5 23 | 24 | # execute commands for recycle_dir1 25 | find $recycle_dir1 -atime +$lastaccess_10_maxdays -type f -delete 26 | find $recycle_dir1 -type d ! -path $recycle_dir1 -empty -delete 27 | 28 | # execute commands for recycle_dir2 29 | find $recycle_dir2 -atime +$lastaccess_50_maxdays -type f -delete 30 | find $recycle_dir2 -type d ! -path $recycle_dir2 -empty -delete 31 | 32 | # execute commands for recycle_dir3 33 | find $recycle_dir3 -atime +$lastaccess_50_maxdays -type f -delete 34 | find $recycle_dir3 -type d ! -path $recycle_dir3 -empty -delete 35 | 36 | # execute commands for recycle_dir4 37 | find $recycle_dir4 -atime +$lastaccess_5_maxdays -type f -delete 38 | find $recycle_dir4 -type d ! -path $recycle_dir4 -empty -delete 39 | 40 | # execute commands for recycle_dir5 41 | find $recycle_dir5 -atime +$lastaccess_10_maxdays -type f -delete 42 | find $recycle_dir5 -type d ! -path $recycle_dir5 -empty -delete 43 | 44 | # execute commands for recycle_dir6 45 | find $recycle_dir6 -atime +$lastaccess_10_maxdays -type f -delete 46 | find $recycle_dir6 -type d ! -path $recycle_dir6 -empty -delete 47 | -------------------------------------------------------------------------------- /eMulerrStalledChecker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-alpine 2 | 3 | WORKDIR /app 4 | COPY requirements.txt . 5 | COPY eMulerr_Stalled_Checker.py . 6 | 7 | RUN pip install -r requirements.txt 8 | 9 | CMD ["python", "eMulerr_Stalled_Checker.py"] -------------------------------------------------------------------------------- /eMulerrStalledChecker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | emulerr-stalled-checker: 3 | build: . 4 | container_name: emulerr-stalled-checker 5 | environment: 6 | - CHECK_INTERVAL=10 # in minutes 7 | - EMULERR_HOST=http://10.0.0.100:3000 8 | - STALL_CHECKS=30 # number of checks before marking as stalled 9 | - STALL_DAYS=20 # days if the download has been never seen completed 10 | - RECENT_DOWNLOAD_GRACE_PERIOD=30 # how many minutes wait before checking if the download, apply only if the download is recent 11 | - DELETE_IF_UNMONITORED_SERIE=false # delete if the serie is not monitored 12 | - DELETE_IF_UNMONITORED_SEASON=false # delete if the season is not monitored 13 | - DELETE_IF_UNMONITORED_EPISODE=true # delete if the episode is not monitored 14 | - DELETE_IF_ONLY_ON_EMULERR=false # delete if the download is only on eMulerr but not inside *Arr, apply only for downloads with *Arr eMulerr category 15 | - PUSHOVER_USER_KEY=*** 16 | - PUSHOVER_APP_TOKEN=*** 17 | - LOG_LEVEL=info # or debug, warning, error, critical 18 | - LOG_TO_FILE=/path/to/logfile # optional, by default no log file is created, if set, the file will be created in the path specified, note volume must be mounted to the path 19 | - DRY_RUN=false 20 | - DOWNLOAD_CLIENT=emulerr # name of the download client inside *Arr 21 | - RADARR_HOST=http://10.0.0.100:7878 22 | - RADARR_API_KEY=*** 23 | - RADARR_CATEGORY=radarr-eMulerr # Radarr category for eMulerr 24 | - SONARR_HOST=http://10.0.0.100:8989 25 | - SONARR_API_KEY=*** 26 | - SONARR_CATEGORY=tv-sonarr-eMulerr # Sonarr category for eMulerr 27 | restart: unless-stopped 28 | healthcheck: 29 | test: ["CMD", "wget", "--spider", "http://10.0.0.100:3000"] 30 | interval: 1m 31 | timeout: 10s 32 | retries: 3 -------------------------------------------------------------------------------- /eMulerrStalledChecker/eMulerr_Stalled_Checker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import time 4 | import sys 5 | import os 6 | import math 7 | from datetime import datetime 8 | from typing import List 9 | import logging 10 | from logging.handlers import RotatingFileHandler 11 | from urllib3.util.retry import Retry 12 | import requests 13 | from requests.adapters import HTTPAdapter 14 | from requests import Session 15 | 16 | # Sets the logging level based on the environment variable LOG_LEVEL 17 | log_level = os.environ.get('LOG_LEVEL', 'INFO').upper() 18 | numeric_level = getattr(logging, log_level, None) 19 | if not isinstance(numeric_level, int): 20 | raise ValueError(f'Invalid log level: {log_level}') 21 | 22 | # Get the environment variable for the log file directly 23 | log_to_file_path = os.getenv("LOG_TO_FILE", "") 24 | 25 | # Configure the logger 26 | logger = logging.getLogger(__name__) 27 | logger.setLevel(numeric_level) 28 | 29 | # Make sure there are no duplicate handlers 30 | for handler in logger.handlers[:]: 31 | logger.removeHandler(handler) 32 | 33 | # Log format 34 | log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') 35 | 36 | # Handler for the console 37 | console_handler = logging.StreamHandler(sys.stdout) 38 | console_handler.setFormatter(log_format) 39 | logger.addHandler(console_handler) 40 | 41 | # Handler for the file if specified 42 | if log_to_file_path: 43 | try: 44 | # Make sure the directory exists 45 | os.makedirs(log_to_file_path, exist_ok=True) 46 | 47 | # Create the full path to the log file inside that directory 48 | log_file = os.path.join(log_to_file_path, "emulerr_stalled_checker.log") 49 | 50 | # Use this full path to the log file 51 | file_handler = RotatingFileHandler( 52 | log_file, 53 | maxBytes=2 * 1024 * 1024, # 2 MB 54 | backupCount=6, 55 | encoding="utf-8" 56 | ) 57 | file_handler.setFormatter(log_format) 58 | logger.addHandler(file_handler) 59 | logger.info("Log file configured in: %s", log_file) 60 | except Exception as e: 61 | logger.error("Log file configuration error: %s", e) 62 | 63 | class Config: 64 | # All environment variables must be provided by docker-compose.yml 65 | DRY_RUN = os.environ.get('DRY_RUN', 'false').lower() == 'true' # flags for dry running 66 | 67 | EMULERR_ENDPOING = '/download-client?_data=routes%2F_shell.download-client' 68 | EMULERR_HOST = f"{os.environ.get('EMULERR_HOST', '')}" 69 | 70 | CHECK_INTERVAL = int(os.environ.get('CHECK_INTERVAL')) # in minutes 71 | STALL_CHECKS = int(os.environ.get('STALL_CHECKS')) # number of checks before considering stall 72 | STALL_DAYS = int(os.environ.get('STALL_DAYS')) # days after which a complete visa file is considered stalled 73 | RECENT_DOWNLOAD_GRACE_PERIOD = int(os.environ.get('RECENT_DOWNLOAD_GRACE_PERIOD', '30')) # in minutes 74 | 75 | # New configuration options for monitoring checks 76 | DELETE_IF_UNMONITORED_SERIE = os.environ.get('DELETE_IF_UNMONITORED_SERIE', 'false').lower() == 'true' 77 | DELETE_IF_UNMONITORED_SEASON = os.environ.get('DELETE_IF_UNMONITORED_SEASON', 'false').lower() == 'true' 78 | DELETE_IF_UNMONITORED_EPISODE = os.environ.get('DELETE_IF_UNMONITORED_EPISODE', 'false').lower() == 'true' 79 | 80 | DELETE_IF_UNMONITORED_MOVIE = os.environ.get('DELETE_IF_UNMONITORED_MOVIE', 'false').lower() == 'true' 81 | 82 | DELETE_IF_ONLY_ON_EMULERR = os.environ.get('DELETE_IF_ONLY_ON_EMULERR', 'false').lower() == 'true' 83 | 84 | # Download client name 85 | DOWNLOAD_CLIENT = os.environ.get('DOWNLOAD_CLIENT', '') # download client name in Sonarr/Radarr 86 | 87 | # Radarr config (optional) 88 | RADARR_HOST = os.environ.get('RADARR_HOST', None) 89 | RADARR_API_KEY = os.environ.get('RADARR_API_KEY', None) 90 | RADARR_CATEGORY = os.environ.get('RADARR_CATEGORY', None) # category for Radarr downloads 91 | 92 | # Sonarr config (optional) 93 | SONARR_HOST = os.environ.get('SONARR_HOST', None) 94 | SONARR_API_KEY = os.environ.get('SONARR_API_KEY', None) 95 | SONARR_CATEGORY = os.environ.get('SONARR_CATEGORY', None) # category for Sonarr downloads 96 | 97 | # Pushover configuration 98 | PUSHOVER_APP_TOKEN = os.environ.get('PUSHOVER_APP_TOKEN', '') 99 | PUSHOVER_USER_KEY = os.environ.get('PUSHOVER_USER_KEY', '') 100 | 101 | # Assigns API_URL directly in the body of the class 102 | API_URL = f"{os.environ.get('EMULERR_HOST', '')}{EMULERR_ENDPOING}" 103 | 104 | @staticmethod 105 | def validate(): 106 | mandatory_fields = [ 107 | 'CHECK_INTERVAL', 'API_URL', 'STALL_CHECKS', 'STALL_DAYS', 'DOWNLOAD_CLIENT', 'EMULERR_HOST' 108 | ] 109 | 110 | for field in mandatory_fields: 111 | value = getattr(Config, field) 112 | if value is None or value == '': 113 | logger.error("Environment variable %s must be set.", field) 114 | sys.exit(1) 115 | 116 | radarr_used = Config.RADARR_HOST is not None 117 | sonarr_used = Config.SONARR_HOST is not None 118 | 119 | if not radarr_used and not sonarr_used: 120 | logger.error("At least one of RADARR_HOST or SONARR_HOST must be set.") 121 | sys.exit(1) 122 | 123 | if radarr_used and not sonarr_used: 124 | if Config.RADARR_API_KEY is None or Config.RADARR_CATEGORY is None: 125 | logger.error("When using Radarr, RADARR_API_KEY and RADARR_CATEGORY must be set.") 126 | sys.exit(1) 127 | 128 | Config.SONARR_HOST = None 129 | Config.SONARR_API_KEY = None 130 | Config.SONARR_CATEGORY = None 131 | 132 | if sonarr_used and not radarr_used: 133 | if Config.SONARR_API_KEY is None or Config.SONARR_CATEGORY is None: 134 | logger.error("When using Sonarr, SONARR_API_KEY and SONARR_CATEGORY must be set.") 135 | sys.exit(1) 136 | 137 | Config.RADARR_HOST = None 138 | Config.RADARR_API_KEY = None 139 | Config.RADARR_CATEGORY = None 140 | 141 | # New validation for *_HOST variables 142 | host_variables = ['RADARR_HOST', 'SONARR_HOST', 'EMULERR_HOST'] 143 | 144 | for host_var in host_variables: 145 | host_value = os.environ.get(host_var) 146 | if host_value and not host_value.startswith(('http://', 'https://')): 147 | logger.error("Environment variable %s must start with 'http://' or 'https://'.", host_var) 148 | sys.exit(1) 149 | 150 | class EmulerrDownload: 151 | def __init__(self, file_data: dict): 152 | self.name = file_data.get('name', '') 153 | self.hash = file_data.get('hash', '') 154 | self.size = file_data.get('size', 0) 155 | self.size_done = file_data.get('size_done', 0) 156 | self.progress = file_data.get('progress', 0) * 100 # notare che viene moltiplicato per 100 157 | self.status = file_data.get('status_str', '') 158 | self.src_count = file_data.get('src_count', 0) 159 | self.src_count_a4af = file_data.get('src_count_a4af', 0) 160 | self.last_seen_complete = file_data.get('last_seen_complete', 0) 161 | self.category = file_data.get('meta', {}).get('category', 'unknown') 162 | self.addedOn = file_data.get('meta', {}).get('addedOn', 0) 163 | 164 | def __repr__(self): 165 | return ( 166 | f"EmulerrDownload(name={self.name!r}, hash={self.hash!r}, size={self.size}, " 167 | f"size_done={self.size_done}, progress={self.progress}, status={self.status!r}, " 168 | f"src_count={self.src_count}, src_count_a4af={self.src_count_a4af}, " 169 | f"last_seen_complete={self.last_seen_complete}, category={self.category!r}, " 170 | f"addedOn={self.addedOn})" 171 | ) 172 | 173 | class SonarrDownload: 174 | def __init__(self, record_data: dict): 175 | self.title = record_data.get('sourceTitle', '') 176 | self.downloadId = record_data.get('downloadId', '') 177 | self.download_client = record_data.get('downloadClientName', '') 178 | self.id = record_data.get('id', '') 179 | 180 | # Assicuriamoci che size sia un intero 181 | try: 182 | self.size = int(record_data.get('size', 0)) 183 | except (ValueError, TypeError): 184 | self.size = 0 185 | 186 | self.series_id = record_data.get('seriesId', None) 187 | self.season_number = record_data.get('seasonNumber', None) 188 | self.episode_id = record_data.get('episodeId', None) 189 | 190 | def __repr__(self): 191 | return ( 192 | f"SonarrDownload(title={self.title!r}, downloadId={self.downloadId!r}, " 193 | f"download_client={self.download_client!r}, id={self.id!r}, size={self.size}, " 194 | f"series_id={self.series_id!r}, season_number={self.season_number!r}, " 195 | f"episode_id={self.episode_id!r})" 196 | ) 197 | 198 | class RadarrDownload: 199 | def __init__(self, record_data: dict): 200 | self.title = record_data.get('sourceTitle', '') 201 | self.downloadId = record_data.get('downloadId', '') 202 | self.download_client = record_data.get('downloadClientName', '') 203 | self.id = record_data.get('id', '') 204 | 205 | # Convertiamo size in intero, nel caso non lo sia già 206 | try: 207 | self.size = int(record_data.get('size', 0)) 208 | except (ValueError, TypeError): 209 | self.size = 0 210 | 211 | self.movie_id = record_data.get('movieId', None) 212 | 213 | def __repr__(self): 214 | return ( 215 | f"RadarrDownload(title={self.title!r}, downloadId={self.downloadId!r}, " 216 | f"download_client={self.download_client!r}, id={self.id!r}, size={self.size}, " 217 | f"movie_id={self.movie_id!r})" 218 | ) 219 | 220 | def check_special_cases(emulerr_data): 221 | """ 222 | Processes the list of incomplete downloads: 223 | - For each download makes a paged request to the 'history' endpoint to get the records. 224 | - If no valid records (eventType "grabbed" and downloadClientName == Config.DOWNLOAD_CLIENT) 225 | is found, the download is considered present only on eMulerr and added to emulerr_downloads_to_remove. 226 | - If a valid record is present, an object is created (RadarrDownload or SonarrDownload) 227 | which also includes a reference to the original download and the valid record, and is added to the relevant queue. 228 | 229 | Next, for each object in the queues: 230 | - For Radarr: if the movie is not monitored (verified via is_movie_monitored), the original download 231 | is added to sonarr_radarr_downloads_to_remove. 232 | - For Sonarr: if the series, season or episode is not monitored (verified via respective functions), 233 | the original download is added to sonarr_radarr_downloads_to_remove. 234 | 235 | Returns a tuple with: 236 | (emulerr_downloads_to_remove, sonarr_radarr_downloads_to_remove) 237 | """ 238 | # Output lists 239 | emulerr_downloads_to_remove = [] 240 | sonarr_radarr_downloads_to_remove = [] 241 | sonarr_queue = [] 242 | radarr_queue = [] 243 | 244 | def get_history_records(download, host, api_key, full_hash, page_size=10): 245 | headers = { 246 | "accept": "application/json", 247 | "X-Api-Key": api_key 248 | } 249 | all_records = [] 250 | page = 1 251 | 252 | while True: 253 | history_url = f"{host}/api/v3/history?page={page}&pageSize={page_size}&downloadId={full_hash}" 254 | try: 255 | response = requests.get(history_url, headers=headers, timeout=10) 256 | if response.status_code != 200: 257 | logger.error( 258 | f"Error ({response.status_code}) in the request history for '{download.name}' from {history_url}" 259 | ) 260 | break 261 | 262 | page_data = response.json() 263 | except Exception as e: 264 | logger.error("Exception during history request for '%s': %s", download.name, e) 265 | break 266 | 267 | records = page_data.get("records", []) 268 | all_records.extend(records) 269 | 270 | # Calculates the total number of pages based on totalRecords. 271 | total_records = page_data.get("totalRecords", 0) 272 | total_pages = math.ceil(total_records / page_size) 273 | 274 | logger.debug("Page %s/%s for '%s', records obtained: %s", page, total_pages, download.name, len(records)) 275 | 276 | if page >= total_pages: 277 | break 278 | page += 1 279 | 280 | return all_records 281 | 282 | def get_series_monitor_status(host, api_key, series_id): 283 | """Gets series monitoring status.""" 284 | logger.debug("Getting series monitor status for series_id: %s", series_id) 285 | url = f"{host}/api/v3/series/{series_id}" 286 | headers = {"X-Api-Key": api_key} 287 | try: 288 | response = requests.get(url, headers=headers) 289 | logger.debug("Series API response status code: %s", response.status_code) 290 | if response.status_code == 200: 291 | series = response.json() 292 | logger.debug("Series monitored status: %s", series.get('monitored')) 293 | logger.debug("Number of seasons: %s", len(series.get('seasons', []))) 294 | return series.get('monitored', False), series.get('seasons', []) 295 | else: 296 | logger.error("Error in retrieving series information. Status code: %s", response.status_code) 297 | return False, [] 298 | except Exception as e: 299 | logger.error("Error in retrieving series information: %s", e) 300 | return False, [] 301 | 302 | def get_season_number_for_episode(sonarr_host, sonarr_api_key, episode_id): 303 | """ 304 | Retrieve the season number of the episode using the Sonarr API. 305 | 306 | Args: 307 | sonarr_host (str): base URL of the Sonarr instance (e.g., "http://localhost:8989") 308 | sonarr_api_key (str): API Key for the Sonarr instance. 309 | episode_id (int): ID of the episode to be queried. 310 | 311 | Returns: 312 | int or None: The season number if found, otherwise None. 313 | """ 314 | url = f"{sonarr_host}/api/v3/episode/{episode_id}" 315 | params = { 316 | "apikey": sonarr_api_key 317 | } 318 | 319 | try: 320 | response = requests.get(url, params=params, timeout=10) 321 | response.raise_for_status() 322 | data = response.json() 323 | 324 | season_number = data.get("seasonNumber") 325 | if season_number is None: 326 | logger.error("Season number not found for episode %s in the answer: %s", episode_id, data) 327 | else: 328 | logger.debug("Season number for the episode %s: %s", episode_id, season_number) 329 | return season_number 330 | 331 | except requests.RequestException as e: 332 | logger.error("Error when calling Sonarr for the episode. %s: %s", episode_id, e) 333 | return None 334 | 335 | def get_season_monitor_status(seasons, season_number): 336 | """Gets the status of monitoring the season.""" 337 | logger.debug("Getting season monitor status for season_number: %s", season_number) 338 | for season in seasons: 339 | logger.debug("Checking season %s", season.get('seasonNumber')) 340 | if season.get('seasonNumber') == season_number: 341 | logger.debug("Season monitored status: %s", season.get('monitored')) 342 | return season.get('monitored', False) 343 | logger.debug("No season found with number %s", season_number) 344 | return False 345 | 346 | def get_episode_monitor_status(host, api_key, episode_id): 347 | """Gets the status of episode monitoring.""" 348 | logger.debug("Getting episode monitor status for episode_id: %s", episode_id) 349 | url = f"{host}/api/v3/episode/{episode_id}" 350 | headers = {"X-Api-Key": api_key} 351 | try: 352 | response = requests.get(url, headers=headers) 353 | logger.debug("Episode API response status code: %s", response.status_code) 354 | if response.status_code == 200: 355 | episode = response.json() 356 | logger.debug("Episode monitored status: %s", episode.get('monitored')) 357 | return episode.get('monitored', False) 358 | else: 359 | logger.error("Error in retrieving episode information. Status code: %s", response.status_code) 360 | return False 361 | except Exception as e: 362 | logger.error("Error in retrieving episode information: %s", e) 363 | return False 364 | 365 | def is_movie_monitored(host, api_key, movie_id): 366 | """Check if the film is monitored.""" 367 | logger.debug("Checking movie monitor status for movie_id: %s", movie_id) 368 | url = f"{host}/api/v3/movie/{movie_id}" 369 | headers = {"X-Api-Key": api_key} 370 | try: 371 | response = requests.get(url, headers=headers) 372 | logger.debug("Movie API response status code: %s", response.status_code) 373 | if response.status_code == 200: 374 | movie = response.json() 375 | logger.debug("Movie monitored status: %s", movie.get('monitored')) 376 | return movie.get('monitored', False) 377 | else: 378 | logger.error("Error in retrieving film information. Status Code: %s", response.status_code) 379 | return False 380 | except Exception as e: 381 | logger.error("Error in retrieving film information: %s", e) 382 | return False 383 | 384 | # First loop: processes each download by querying the history. 385 | for download in emulerr_data: 386 | # Constructs the full hash: assuming 32 characters + "00000000" 387 | full_hash = download.hash + "00000000" 388 | 389 | # Determines the client and connection details based on the category set in Config. 390 | client = None 391 | host = None 392 | api_key = None 393 | 394 | if Config.RADARR_CATEGORY is not None and download.category == Config.RADARR_CATEGORY: 395 | client = "radarr" 396 | host = Config.RADARR_HOST 397 | api_key = Config.RADARR_API_KEY 398 | elif Config.SONARR_CATEGORY is not None and download.category == Config.SONARR_CATEGORY: 399 | client = "sonarr" 400 | host = Config.SONARR_HOST 401 | api_key = Config.SONARR_API_KEY 402 | else: 403 | logger.warning( 404 | f"Category '{download.category}' does not match either RADARR_CATEGORY or SONARR_CATEGORY defined in Config.. " 405 | f"Skip processing for downloading '{download.name}'." 406 | ) 407 | continue 408 | 409 | history_records = get_history_records(download, host, api_key, full_hash) 410 | 411 | valid_record = None 412 | # Cerca il primo record valido 413 | for record in history_records: 414 | if record.get("eventType") != "grabbed": 415 | continue 416 | data = record.get("data", {}) 417 | if data.get("downloadClientName") == Config.DOWNLOAD_CLIENT: 418 | valid_record = record 419 | break 420 | 421 | if valid_record is None: 422 | logger.info( 423 | f"Records present for '{download.name}' (hash: {download.hash}), but no one meets the criteria. " 424 | "Download considered present only on eMulerr." 425 | ) 426 | emulerr_downloads_to_remove.append(download) 427 | continue 428 | 429 | # If a valid record is present, creates the specific object and retains the record for later checking 430 | if client == "radarr": 431 | r_download = RadarrDownload(valid_record) 432 | radarr_queue.append(r_download) 433 | elif client == "sonarr": 434 | s_download = SonarrDownload(valid_record) 435 | sonarr_queue.append(s_download) 436 | 437 | # Second step: check monitoring for queued downloads. 438 | 439 | # For Radarr: if the movie is not monitored, flag the download for removal. 440 | for r_obj in radarr_queue: 441 | # Suppose the record contains "movieId" in data. 442 | movie_id = r_obj.movie_id 443 | if not movie_id and Config.DELETE_IF_ONLY_ON_EMULERR: 444 | logger.info("The record '%s' does not contain 'movieId', it will only be considered on eMulerr.", r_obj.title) 445 | sonarr_radarr_downloads_to_remove.append(r_obj) 446 | continue 447 | 448 | if not is_movie_monitored(Config.RADARR_HOST, Config.RADARR_API_KEY, movie_id) and Config.DELETE_IF_UNMONITORED_MOVIE: 449 | logger.warning("[RADARR] The movie '%s' Is not monitored. It will be marked for removal.", r_obj.title) 450 | sonarr_radarr_downloads_to_remove.append(r_obj) 451 | 452 | for s_obj in sonarr_queue: 453 | # Extract the main fields 454 | series_id = s_obj.series_id 455 | episode_id = s_obj.episode_id 456 | 457 | # Retrieve the season number using Sonarr's API for the episode. 458 | season_number = get_season_number_for_episode(Config.SONARR_HOST, Config.SONARR_API_KEY, episode_id) 459 | 460 | # Update the object with the obtained season_number. 461 | s_obj.season_number = season_number 462 | 463 | if not series_id and Config.DELETE_IF_ONLY_ON_EMULERR: 464 | logger.warning("The record '%s' does not contain 'seriesId', it will only be considered on eMulerr.", s_obj.title) 465 | sonarr_radarr_downloads_to_remove.append(s_obj) 466 | continue 467 | 468 | # Gets series status and season information. 469 | series_monitored, seasons = get_series_monitor_status(Config.SONARR_HOST, Config.SONARR_API_KEY, series_id) 470 | if not series_monitored and Config.DELETE_IF_UNMONITORED_SERIE: 471 | logger.warning("[SONARR] The show '%s' Is not monitored. It will be marked for removal.", s_obj.title) 472 | sonarr_radarr_downloads_to_remove.append(s_obj) 473 | continue 474 | 475 | if not episode_id and Config.DELETE_IF_ONLY_ON_EMULERR: 476 | logger.warning("The record '%s' does not contain 'episodeId', it will only be considered on eMulerr.", s_obj.title) 477 | sonarr_radarr_downloads_to_remove.append(s_obj) 478 | continue 479 | 480 | if season_number is None and Config.DELETE_IF_ONLY_ON_EMULERR: 481 | logger.warning("It was not possible to determine the season number for the episode %s.", episode_id) 482 | sonarr_radarr_downloads_to_remove.append(s_obj) 483 | continue 484 | 485 | # Check season tracking using season information. 486 | if not get_season_monitor_status(seasons, season_number) and Config.DELETE_IF_UNMONITORED_SEASON: 487 | logger.warning("[SONARR] The season %s for '%s' Is not monitored. It will be marked for removal.", season_number, s_obj.title) 488 | sonarr_radarr_downloads_to_remove.append(s_obj) 489 | continue 490 | 491 | # Check the monitoring of the episode 492 | if not get_episode_monitor_status(Config.SONARR_HOST, Config.SONARR_API_KEY, episode_id) and Config.DELETE_IF_UNMONITORED_EPISODE: 493 | logger.warning("[SONARR] The episode '%s' Is not monitored. It will be marked for removal.", s_obj.title) 494 | sonarr_radarr_downloads_to_remove.append(s_obj) 495 | 496 | return emulerr_downloads_to_remove, sonarr_radarr_downloads_to_remove, sonarr_queue, radarr_queue 497 | 498 | def emulerr_remove_download(hash_32: str, download_name: str, dry_run: bool = False): 499 | url = f"{Config.EMULERR_HOST}/api/v2/torrents/delete?_data=routes%2Fapi.v2.torrents.delete" 500 | headers = { 501 | 'Content-Type': 'application/x-www-form-urlencoded' 502 | } 503 | data = { 504 | '_data': 'routes/api.v2.torrents.delete', 505 | 'hashes': hash_32.upper() 506 | } 507 | 508 | if not dry_run: 509 | try: 510 | response = requests.post(url, headers=headers, data=data) 511 | response.raise_for_status() 512 | logger.info("%s successfully removed from eMulerr.", download_name) 513 | except requests.exceptions.RequestException as e: 514 | logger.error("Error removing '%s': %s", download_name, e) 515 | else: 516 | logger.debug("DRY_RUN: Would remove %s from eMulerr.", download_name) 517 | 518 | def handle_stalled_download(name: str, queue_id: str, host: str, api_key: str, dry_run: bool = True) -> bool: 519 | """ 520 | Mark as failed a download (identified by queue_id) using the 521 | endpoint /api/v3/history/failed/{id}. 522 | 523 | :param name: The name of the download to be marked as failed. 524 | :param queue_id: The id of the download in the queue (Radarr/Sonarr) to be marked as failed. 525 | :param host: The base host (URL) of the service (Radarr or Sonarr). 526 | :param api_key: The API key for authentication. 527 | :param dry_run: If True, the function only logs the call without executing the action 528 | :return: True if the operation was successful, False otherwise 529 | """ 530 | 531 | url = f"{host}/api/v3/history/failed/{queue_id}" 532 | headers = { 533 | 'X-Api-Key': api_key, 534 | 'Content-Type': 'application/json' 535 | } 536 | 537 | if dry_run: 538 | logger.info("[DRY RUN] I would mark as failed the download with id %s using: %s", name, url) 539 | return True 540 | 541 | try: 542 | response = requests.post(url, headers=headers) 543 | if response.status_code == 200: 544 | logger.info("%s -> Successfully marked as failed", name) 545 | return True 546 | else: 547 | logger.error("Error in marking as failed the download with id %s: status code %s, response: %s", name, response.status_code, response.text) 548 | return False 549 | except Exception as e: 550 | logger.exception("Exception in marking the download as failed %s: %s", name, e) 551 | return False 552 | 553 | def send_pushover_notification(message: str, dry_run: bool = False): 554 | if dry_run: 555 | logger.debug("Dry run is active. Pushover notification not sent: %s", message) 556 | return 557 | 558 | if Config.PUSHOVER_APP_TOKEN and Config.PUSHOVER_USER_KEY: 559 | try: 560 | response = requests.post("https://api.pushover.net/1/messages.json", data={ 561 | "token": Config.PUSHOVER_APP_TOKEN, 562 | "user": Config.PUSHOVER_USER_KEY, 563 | "message": message 564 | }) 565 | response.raise_for_status() 566 | logger.debug("Pushover notification sent successfully: %s", message) 567 | except requests.RequestException as e: 568 | logger.error("Failed to send Pushover notification: %s", str(e)) 569 | else: 570 | logger.warning("Pushover notification not sent because PUSHOVER_APP_TOKEN or PUSHOVER_USER_KEY is not set.") 571 | 572 | class StallChecker: 573 | def __init__(self): 574 | self.warnings = {} 575 | self.previous_warnings = set() # To keep track of downloads previously in warning 576 | self.previous_downloads = [] # Download history for future reference 577 | 578 | def check_status(self, download: EmulerrDownload) -> tuple[bool, str, int]: 579 | current_hash = download.hash 580 | 581 | added_on = download.addedOn / 1000 # Convert to seconds 582 | recent_download_threshold = time.time() - (Config.RECENT_DOWNLOAD_GRACE_PERIOD * 60) 583 | if added_on > recent_download_threshold: 584 | if current_hash in self.warnings: 585 | del self.warnings[current_hash] 586 | return False, "", 0 587 | 588 | # Check if src_count_a4af > 0 589 | if download.src_count_a4af > 0: 590 | if current_hash in self.warnings: 591 | del self.warnings[current_hash] 592 | return False, "", 0 593 | 594 | # Check if download is 100% complete 595 | if download.progress >= 100: 596 | if current_hash in self.warnings: 597 | del self.warnings[current_hash] 598 | return False, "", 0 599 | 600 | # Check if size_done has changed 601 | if current_hash in self.warnings and download.size_done != self.warnings[current_hash]['last_size']: 602 | del self.warnings[current_hash] 603 | return False, "", 0 604 | 605 | if download.last_seen_complete == 0: 606 | reason = "Never seen complete" 607 | if current_hash in self.warnings: 608 | # Increment check_count if size_done hasn't changed 609 | self.warnings[current_hash]['count'] += 1 610 | self.warnings[current_hash]['last_size'] = download.size_done 611 | count = self.warnings[current_hash]['count'] 612 | 613 | if count > Config.STALL_CHECKS: 614 | return True, reason, count 615 | else: 616 | return False, reason, count 617 | else: 618 | # Add to warnings if not previously warned 619 | self.warnings[current_hash] = {'count': 1, 'last_size': download.size_done} 620 | return False, reason, 1 621 | 622 | # Rule 3: If last_seen_complete > STALL_DAYS 623 | if download.last_seen_complete > 0: 624 | stall_time = time.time() - (Config.STALL_DAYS * 24 * 60 * 60) 625 | if download.last_seen_complete < stall_time: 626 | reason = f"Last seen complete > {Config.STALL_DAYS} days ago" 627 | if current_hash in self.warnings: 628 | # Increment check_count if size_done hasn't changed 629 | self.warnings[current_hash]['count'] += 1 630 | self.warnings[current_hash]['last_size'] = download.size_done 631 | count = self.warnings[current_hash]['count'] 632 | 633 | if count > Config.STALL_CHECKS: 634 | return True, reason, count 635 | else: 636 | return False, reason, count 637 | else: 638 | # Add to warnings if not previously warned 639 | self.warnings[current_hash] = {'count': 1, 'last_size': download.size_done} 640 | return False, reason, 1 641 | else: 642 | if current_hash in self.warnings: 643 | del self.warnings[current_hash] 644 | return False, "", 0 645 | return False, "", 0 646 | 647 | def cleanup_warnings(self, current_hashes: set[str], downloads_map: dict): 648 | # Crea un dizionario di mapping hash -> nome prima della pulizia 649 | if not hasattr(self, 'hash_to_name_map'): 650 | self.hash_to_name_map = {} 651 | 652 | # Assicuriamoci che stalled_hashes esista 653 | if not hasattr(self, 'stalled_hashes'): 654 | self.stalled_hashes = set() 655 | 656 | # Update mapping with all current downloads 657 | for download in self.previous_downloads: 658 | self.hash_to_name_map[download.hash] = download.name 659 | 660 | # Also update with new ones in the current map 661 | for hash_key, download in downloads_map.items(): 662 | self.hash_to_name_map[hash_key] = download.name 663 | 664 | # Remove hash from warnings 665 | to_remove = [h for h in self.warnings.keys() if h not in current_hashes] 666 | for h in to_remove: 667 | # Skip logging for stalled downloads 668 | if h in self.stalled_hashes: 669 | del self.warnings[h] 670 | continue 671 | 672 | if h in downloads_map: 673 | logger.info("Download '%s' removed from monitoring (no longer on download list)", downloads_map[h].name) 674 | elif h in self.hash_to_name_map: 675 | logger.info("Download '%s' removed from monitoring (no longer on download list)", self.hash_to_name_map[h]) 676 | else: 677 | logger.info("Download with hash %s... removed from monitoring (no longer on download list)", h[:8]) 678 | del self.warnings[h] 679 | 680 | # Update the warnings as usual 681 | self.previous_warnings = self.previous_warnings.intersection(current_hashes) 682 | self.previous_downloads = list(downloads_map.values()) 683 | 684 | def fetch_emulerr_data() -> List[EmulerrDownload]: 685 | """Retrieve active downloads from server with retry mechanism, filtering by SONARR_CATEGORY or RADARR_CATEGORY""" 686 | session = Session() 687 | retry_strategy = Retry( 688 | total=10, # Maximum number of attempts 689 | backoff_factor=30, # Interval of 30 second between attempts 690 | status_forcelist=[429, 500, 502, 503, 504], 691 | allowed_methods=["GET"] 692 | ) 693 | adapter = HTTPAdapter(max_retries=retry_strategy) 694 | session.mount("http://", adapter) 695 | session.mount("https://", adapter) 696 | 697 | try: 698 | response = session.get(Config.API_URL) 699 | response.raise_for_status() # This throws an exception for incorrect HTTP status codes 700 | 701 | data = response.json() 702 | files = data.get('files', []) 703 | logger.debug("Retrieved %s total file%s", len(files), 's' if len(files) != 1 else '') 704 | 705 | # Log categories of all files 706 | for file in files: 707 | meta = file.get('meta', {}) 708 | category = meta.get('category', 'Category not found') 709 | logger.debug("File category: %s", category) 710 | 711 | # Filter downloads based on category 712 | filtered_downloads = [ 713 | EmulerrDownload(file) for file in files 714 | if file.get('meta', {}).get('category') in [Config.SONARR_CATEGORY, Config.RADARR_CATEGORY] 715 | ] 716 | 717 | return filtered_downloads 718 | except requests.exceptions.RequestException as e: 719 | logger.error("Error retrieving downloads: %s", e) 720 | return [] 721 | 722 | def main(): 723 | stall_checker = StallChecker() 724 | 725 | logger.info("=== Configuration Summary ===") 726 | for attr, value in Config.__dict__.items(): 727 | if not callable(value) and not attr.startswith("__"): 728 | logger.info("%s: %s", attr, value) 729 | logger.info("=== Configuration Summary ===") 730 | 731 | while True: 732 | try: 733 | emulerr_data = fetch_emulerr_data() 734 | 735 | # Apply special case checks 736 | emulerr_downloads_to_remove, sonarr_radarr_downloads_to_remove, sonarr_queue, radarr_queue = check_special_cases(emulerr_data) 737 | 738 | for download in emulerr_downloads_to_remove + sonarr_radarr_downloads_to_remove: 739 | 740 | # If it is an EmulerrDownload, we directly use the 'hash' and 'name' field. 741 | if isinstance(download, EmulerrDownload): 742 | identifier = download.hash # already in the correct format (32 characters) 743 | name = download.name 744 | # If it is a SonarrDownload or RadarrDownload, we use the downloadId, removing the final 8 zeros if present. 745 | elif isinstance(download, (SonarrDownload, RadarrDownload)): 746 | raw_id = download.downloadId 747 | identifier = raw_id[:-8] if raw_id.endswith("00000000") else raw_id 748 | name = download.title 749 | else: 750 | logger.debug("Download type not recognized: %s", download) 751 | continue 752 | 753 | logger.debug("Removal in progress for: %s, identifier: %s", name, identifier) 754 | 755 | # Invokes the function that removes the download from the server. 756 | emulerr_remove_download(identifier, name, Config.DRY_RUN) 757 | 758 | # Manually remove the hash from the monitoring if necessary 759 | if download.hash in stall_checker.warnings: 760 | del stall_checker.warnings[download.hash] 761 | 762 | if download.hash in stall_checker.previous_warnings: 763 | stall_checker.previous_warnings.remove(download.hash) 764 | 765 | # Removal from local emulerr_data list: 766 | # If the download is an EmulerrDownload, we remove it directly. 767 | if isinstance(download, EmulerrDownload): 768 | try: 769 | emulerr_data.remove(download) 770 | logger.debug("Removed correctly %s (EmulerrDownload) from emulerr_data.", name) 771 | except ValueError: 772 | logger.error("Unable to remove %s from emulerr_data.", name) 773 | # If the download is of type SonarrDownload or RadarrDownload, we look for the corresponding EmulerrDownload. 774 | # based on the hash (which is equivalent to the identifier). 775 | elif isinstance(download, (SonarrDownload, RadarrDownload)): 776 | candidate = next( 777 | (d for d in emulerr_data if isinstance(d, EmulerrDownload) and d.hash == identifier), 778 | None 779 | ) 780 | if candidate: 781 | try: 782 | emulerr_data.remove(candidate) 783 | logger.debug("Removed correctly %s (found EmulerrDownload) from emulerr_data.", name) 784 | except ValueError: 785 | logger.error("Error in removing %s from emulerr_data, candidate found: %s", name, candidate) 786 | else: 787 | logger.error("Unable to remove %s from emulerr_data: no EmulerrDownload found with hash %s.", name, identifier) 788 | 789 | # Split emulerr_data into completed and incomplete 790 | incomplete_downloads = [d for d in emulerr_data if d.progress < 100] 791 | completed_downloads = [d for d in emulerr_data if d.progress == 100] 792 | 793 | stall_checker.previous_downloads = emulerr_data 794 | 795 | # Handle incomplete downloads 796 | if incomplete_downloads: 797 | download_states = {} 798 | stalled_downloads = [] 799 | warning_downloads = [] 800 | 801 | # We store the current hashes in warning 802 | current_warning_hashes = set() 803 | 804 | logger.debug("\nChecking %s incomplete file%s", len(incomplete_downloads), 's' if len(incomplete_downloads) != 1 else '') 805 | 806 | # Create a hash->download map for better lookups 807 | downloads_map = {d.hash: d for d in incomplete_downloads} 808 | current_hashes = set(downloads_map.keys()) 809 | 810 | current_hashes = {d.hash for d in incomplete_downloads} 811 | 812 | # Pass the map to cleanup_warnings 813 | stall_checker.cleanup_warnings(current_hashes, downloads_map) 814 | 815 | # Check the status once for each download 816 | for download in incomplete_downloads: 817 | is_stalled, stall_reason, check_count = stall_checker.check_status(download) 818 | download_states[download.hash] = (is_stalled, stall_reason, check_count) 819 | 820 | # Debug output 821 | if logger.getEffectiveLevel() == logging.DEBUG: 822 | for download in incomplete_downloads: 823 | is_stalled, stall_reason, check_count = download_states[download.hash] 824 | status = f"STALLED: {stall_reason}" if is_stalled else "Active" 825 | 826 | last_seen = "Never" if download.last_seen_complete == 0 else \ 827 | datetime.fromtimestamp(download.last_seen_complete).strftime('%Y-%m-%d %H:%M:%S') 828 | logger.debug("Download: %s, Status: %s, Last Seen Complete: %s, Check Count: %s", download.name, status, last_seen, check_count) 829 | 830 | # Process each download 831 | for download in incomplete_downloads: 832 | is_stalled, stall_reason, check_count = download_states[download.hash] 833 | 834 | # If it's not a special case, add to stalled or warning lists 835 | if is_stalled or check_count > Config.STALL_CHECKS: 836 | stalled_downloads.append((download, check_count, stall_reason or "Max checks reached")) 837 | elif check_count > 0: 838 | warning_downloads.append((download, check_count, stall_reason or "Approaching stall threshold")) 839 | 840 | # Show warning downloads 841 | if warning_downloads: 842 | logger.debug("Warning downloads (%s/%s):", len(warning_downloads), len(incomplete_downloads)) 843 | for download, count, warning_reason in warning_downloads: 844 | logger.info("%s -> Warning (%s/%s) - %s", download.name, count, Config.STALL_CHECKS, warning_reason) 845 | # Add the current hash to those in warnings 846 | current_warning_hashes.add(download.hash) 847 | else: # If warning is empty 848 | logger.debug("No warning downloads") 849 | 850 | # Show stalled downloads 851 | if stalled_downloads: 852 | logger.debug("Stalled downloads (%s/%s):", len(stalled_downloads), len(incomplete_downloads)) 853 | for download, check_count, stall_reason in stalled_downloads: 854 | logger.info("%s -> Stalled (%s/%s warnings) - %s", download.name, check_count, Config.STALL_CHECKS, stall_reason) 855 | 856 | send_pushover_notification(f"Download {download.name} marked as stalled: {stall_reason}. Will be removed", dry_run=Config.DRY_RUN) 857 | 858 | if Config.RADARR_CATEGORY is not None and download.category == Config.RADARR_CATEGORY: 859 | host = Config.RADARR_HOST 860 | api_key = Config.RADARR_API_KEY 861 | # Look for the RadarrDownload in the global queue. 862 | matching_item = next( 863 | (item for item in radarr_queue 864 | if (item.downloadId[:-8] == download.hash)), 865 | None 866 | ) 867 | elif Config.SONARR_CATEGORY is not None and download.category == Config.SONARR_CATEGORY: 868 | host = Config.SONARR_HOST 869 | api_key = Config.SONARR_API_KEY 870 | # Search for SonarrDownload in the global queue. 871 | matching_item = next( 872 | (item for item in sonarr_queue 873 | if (item.downloadId[:-8] == download.hash)), 874 | None 875 | ) 876 | else: 877 | logger.debug("Category not recognized for %s: %s", download.name, download.category) 878 | return 879 | 880 | if not matching_item: 881 | logger.error("Queue item not found for %s (hash: %s)", download.name, download.hash) 882 | return 883 | 884 | # Extracts the id to be used for removal (e.g., RadarrDownload.id or SonarrDownload.id). 885 | queue_id = matching_item.id 886 | 887 | # Invokes the function that removes the download from the server. 888 | emulerr_remove_download(download.hash, download.name, Config.DRY_RUN) 889 | 890 | # We immediately remove the download from the previous warnings. 891 | # so that it does not appear as "no longer in warning state" 892 | if download.hash in stall_checker.previous_warnings: 893 | stall_checker.previous_warnings.remove(download.hash) 894 | 895 | # We also remove downloading from current warnings. 896 | if download.hash in current_warning_hashes: 897 | current_warning_hashes.remove(download.hash) 898 | 899 | # Add the hash of the download to the set of stalled downloads. 900 | if not hasattr(stall_checker, 'stalled_hashes'): 901 | stall_checker.stalled_hashes = set() 902 | stall_checker.stalled_hashes.add(download.hash) 903 | 904 | handle_stalled_download(download.name, queue_id, host, api_key, Config.DRY_RUN) 905 | 906 | else: # If stalled is empty 907 | logger.debug("No stalled downloads") 908 | 909 | # Check which downloads were previously in warnings but are no longer in warnings 910 | resolved_warnings = stall_checker.previous_warnings - current_warning_hashes 911 | for hash_value in resolved_warnings: 912 | # Search for the corresponding download to get the name 913 | matching_download = next((d for d in incomplete_downloads if d.hash == hash_value), None) 914 | if matching_download: 915 | logger.info("%s -> No longer in warning state", matching_download.name) 916 | 917 | # Update the set of downloads in warning for the next cycle 918 | stall_checker.previous_warnings = current_warning_hashes 919 | 920 | else: # If incomplete_downloads is empty 921 | logger.debug("No incomplete downloads to check.") 922 | 923 | # Handle completed downloads 924 | if completed_downloads: 925 | logger.debug("Checking %s completed file%s", len(completed_downloads), 's' if len(completed_downloads) != 1 else '') 926 | for download in completed_downloads: 927 | logger.debug("Completed download: %s", download.name) 928 | else: 929 | logger.debug("No completed downloads to check.") 930 | 931 | logger.debug("Waiting %s minute(s) before next check...", Config.CHECK_INTERVAL) 932 | time.sleep(Config.CHECK_INTERVAL * 60) 933 | 934 | except KeyboardInterrupt: 935 | logger.debug("Interrupted by user") 936 | break 937 | except Exception as e: 938 | logger.error("Error in main loop: %s", e) 939 | time.sleep(Config.CHECK_INTERVAL * 60) 940 | 941 | # Call the validation function at the beginning 942 | if __name__ == "__main__": 943 | try: 944 | Config.validate() 945 | main() 946 | except ValueError as e: 947 | logger.error("Configuration error: %s", e) 948 | sys.exit(1) 949 | -------------------------------------------------------------------------------- /eMulerrStalledChecker/requirements.txt: -------------------------------------------------------------------------------- 1 | requests>=2.31.0 -------------------------------------------------------------------------------- /qBittorrentHardlinksChecker/qBittorrentHardlinksChecker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import sys 5 | import argparse 6 | import json 7 | import requests 8 | import yaml 9 | import time 10 | from typing import Dict, Any, Optional, List, Tuple 11 | from urllib.parse import urljoin 12 | from pathlib import Path 13 | from dataclasses import dataclass 14 | from colorama import init, Fore, Style 15 | 16 | init() 17 | 18 | class QBittorrentManager: 19 | def __init__(self, config_file: str, dry_run: bool = False): 20 | self.dry_run = dry_run 21 | self._load_config(config_file) 22 | self._setup_session() 23 | 24 | def _load_config(self, config_file: str) -> None: 25 | """Load and validate the configuration""" 26 | with open(config_file) as f: 27 | config = yaml.safe_load(f) 28 | 29 | # Basic configuration required 30 | self.host = config['qbt_host'] 31 | self.port = config['qbt_port'] 32 | self.username = config['qbt_username'] 33 | self.password = config['qbt_password'] 34 | self.min_seeding_time = config['min_seeding_time'] 35 | 36 | # Optional configuration with default values 37 | self.categories = config.get('categories', []) 38 | self.torrent_type = config.get('torrent_type', '') 39 | self.virtual_path = config.get('virtual_path', '') 40 | self.real_path = config.get('real_path', '') 41 | self.enable_recheck = config.get('enable_recheck', True) 42 | self.enable_orphan_check = config.get('enable_orphan_check', True) 43 | self.orphan_states = config.get('orphan_states', []) 44 | self.min_peers = config.get('min_peers', 1) 45 | 46 | self.base_url = f"{self.host}:{self.port}" 47 | self.session = requests.Session() 48 | 49 | def _setup_session(self) -> None: 50 | """Initialize the HTTP session and log in""" 51 | self.login() 52 | 53 | def login(self) -> None: 54 | """Log in to qBittorrent""" 55 | try: 56 | response = self.session.post( 57 | urljoin(self.base_url, 'api/v2/auth/login'), 58 | data={'username': self.username, 'password': self.password} 59 | ) 60 | if response.text != 'Ok.': 61 | raise Exception("Login failed") 62 | except Exception as e: 63 | print(f"Failed to login: {str(e)}") 64 | sys.exit(1) 65 | 66 | def get_torrent_list(self) -> List[Dict[str, Any]]: 67 | """Gets the list of torrents""" 68 | try: 69 | torrent_list = [] 70 | # If "All" is specified in the categories, it takes all torrents 71 | if "All" in self.categories: 72 | response = self.session.get(urljoin(self.base_url, 'api/v2/torrents/info')) 73 | else: 74 | # Gets the torrents for each specified category 75 | for category in self.categories: 76 | if category == "Uncategorized": 77 | # For torrents without category 78 | response = self.session.get(urljoin(self.base_url, 'api/v2/torrents/info'), 79 | params={'category': ''}) 80 | else: 81 | response = self.session.get(urljoin(self.base_url, 'api/v2/torrents/info'), 82 | params={'category': category}) 83 | torrent_list.extend(response.json()) 84 | return torrent_list 85 | 86 | return response.json() 87 | except Exception as e: 88 | print(f"Failed to get torrent list: {str(e)}") 89 | return [] 90 | 91 | def get_torrent_properties(self, torrent_hash: str) -> Dict[str, Any]: 92 | """Gets the properties of a specific torrent""" 93 | try: 94 | response = self.session.get( 95 | urljoin(self.base_url, 'api/v2/torrents/properties'), 96 | params={'hash': torrent_hash} 97 | ) 98 | return response.json() 99 | except Exception as e: 100 | print(f"Failed to get torrent properties: {str(e)}") 101 | return {} 102 | 103 | def recheck_torrent(self, torrent_hash: str) -> None: 104 | """Double-check a torrent""" 105 | try: 106 | if self.dry_run: 107 | print(f"[DRY-RUN] Would recheck torrent with hash {torrent_hash}") 108 | return 109 | self.session.post( 110 | urljoin(self.base_url, 'api/v2/torrents/recheck'), 111 | data={'hashes': torrent_hash} 112 | ) 113 | print(f"Rechecking torrent with hash {torrent_hash}") 114 | except Exception as e: 115 | print(f"Failed to recheck torrent: {str(e)}") 116 | 117 | def reannounce_torrent(self, torrent_hash: str) -> None: 118 | """Performs reannounce of a torrent""" 119 | try: 120 | if self.dry_run: 121 | print(f"[DRY-RUN] Reannouncing torrent with hash {torrent_hash}") 122 | return 123 | self.session.post( 124 | urljoin(self.base_url, 'api/v2/torrents/reannounce'), 125 | data={'hashes': torrent_hash} 126 | ) 127 | print(f"Reannouncing torrent with hash {torrent_hash}") 128 | except Exception as e: 129 | print(f"Failed to reannounce torrent: {str(e)}") 130 | 131 | def delete_torrent(self, torrent_hash: str) -> None: 132 | """Remove a torrent""" 133 | try: 134 | if self.dry_run: 135 | print(f"[DRY-RUN] Torrent with hash {torrent_hash} deleted") 136 | return 137 | self.session.post( 138 | urljoin(self.base_url, 'api/v2/torrents/delete'), 139 | data={'hashes': torrent_hash, 'deleteFiles': True} 140 | ) 141 | print(f"Torrent with hash {torrent_hash} deleted") 142 | except Exception as e: 143 | print(f"Failed to delete torrent: {str(e)}") 144 | 145 | def check_hardlinks(self, path: str) -> bool: 146 | """Check if a file has hardlinks""" 147 | try: 148 | if os.path.isfile(path): 149 | return os.stat(path).st_nlink > 1 150 | elif os.path.isdir(path): 151 | for root, _, files in os.walk(path): 152 | for file in files: 153 | if os.stat(os.path.join(root, file)).st_nlink > 1: 154 | return True 155 | return False 156 | except Exception as e: 157 | print(f"Failed to check hardlinks: {str(e)}") 158 | return False 159 | 160 | def check_bad_trackers(self, torrent: Dict[str, Any]) -> Dict[str, str]: 161 | """Check problematic trackers""" 162 | bad_trackers = {} 163 | try: 164 | response = self.session.get( 165 | urljoin(self.base_url, 'api/v2/torrents/trackers'), 166 | params={'hash': torrent['hash']} 167 | ) 168 | trackers = response.json() 169 | 170 | for tracker in trackers: 171 | if tracker.get('status') == 4: 172 | bad_trackers[tracker['url']] = tracker.get('msg', 'Unknown error') 173 | 174 | return bad_trackers 175 | except Exception as e: 176 | print(f"Failed to check trackers: {str(e)}") 177 | return {} 178 | 179 | def remove_trackers(self, torrent_hash: str, trackers: Dict[str, str]) -> None: 180 | """Removes specified trackers""" 181 | try: 182 | if self.dry_run: 183 | print(f"- [DRY-RUN] Bad tracker{'s' if len(trackers) > 1 else ''} removed") 184 | return 185 | for tracker in trackers: 186 | self.session.post( 187 | urljoin(self.base_url, 'api/v2/torrents/removeTrackers'), 188 | data={'hash': torrent_hash, 'urls': tracker} 189 | ) 190 | print(f"- Bad tracker{'s' if len(trackers) > 1 else ''} removed") 191 | except Exception as e: 192 | print(f"Failed to remove trackers: {str(e)}") 193 | 194 | def _print_configuration(self) -> None: 195 | """Print the current configuration""" 196 | print("\nCurrent configuration:") 197 | print(f"- Host: {self.host}:{self.port}") 198 | print(f"- Username: {self.username}") 199 | print(f"- Password: {'*' * len(self.password)}") 200 | print(f"- Processing: {'Only ' + self.torrent_type if self.torrent_type else 'Private & Public'} torrents") 201 | print(f"- Categories: {', '.join(self.categories) if self.categories else 'All'}") 202 | print(f"- Minimum seeding time: {self.min_seeding_time} seconds") 203 | print(f"- Minimum peers: {self.min_peers}") 204 | print(f"- Virtual path: {self.virtual_path if self.virtual_path else 'not set'}") 205 | print(f"- Real path: {self.real_path if self.real_path else 'not set'}") 206 | print(f"- Enable recheck: {self.enable_recheck}") 207 | print(f"- Enable orphan check: {self.enable_orphan_check}") 208 | print(f"- Orphan states: {self.orphan_states if self.orphan_states else 'not set'}") 209 | 210 | if self.dry_run: 211 | print(f"{Fore.GREEN}- DRY-RUN mode enabled{Style.RESET_ALL}") 212 | print("\nProcessing only selected torrents...") 213 | 214 | def process_torrents(self) -> None: 215 | """Process all torrents""" 216 | torrents = self.get_torrent_list() 217 | self._print_configuration() 218 | 219 | for torrent in torrents: 220 | 221 | properties = self.get_torrent_properties(torrent['hash']) 222 | is_private = properties.get('is_private', False) 223 | 224 | print(f"\nTorrent -> {Fore.CYAN if is_private else Fore.GREEN}{torrent['name']}{Style.RESET_ALL} ({('private' if is_private else 'public')})") 225 | 226 | if self.torrent_type == 'private' and not is_private: 227 | print("Skipping further checks: torrent is public but only private torrents are configured") 228 | continue 229 | elif self.torrent_type == 'public' and is_private: 230 | print("Skipping further checks: torrent is private but only public torrents are configured") 231 | continue 232 | 233 | # Control recheck 234 | if self.enable_recheck: 235 | print("- Checking for errors ->", end=" ") 236 | if torrent.get('state') == "error": 237 | print(f"{Fore.RED}errors found{Style.RESET_ALL}") 238 | self.recheck_torrent(torrent['hash']) 239 | else: 240 | print(f"{Fore.GREEN}no errors found{Style.RESET_ALL}") 241 | 242 | # Tracker check 243 | if not is_private: 244 | print("- Checking for bad trackers ->", end=" ") 245 | bad_trackers = self.check_bad_trackers(torrent) 246 | if bad_trackers: 247 | print(f"{Fore.YELLOW}{len(bad_trackers)} bad tracker{'s' if len(bad_trackers) > 1 else ''} found:{Style.RESET_ALL}") 248 | for tracker, error in bad_trackers.items(): 249 | print(f" {tracker} -> {Fore.RED}{error}{Style.RESET_ALL}") 250 | self.remove_trackers(torrent['hash'], bad_trackers) 251 | else: 252 | print("no bad trackers found") 253 | 254 | # Orphan check 255 | if self.enable_orphan_check and is_private: 256 | print("- Checking for orphan status ->", end=" ") 257 | trackers = self.session.get( 258 | urljoin(self.base_url, 'api/v2/torrents/trackers'), 259 | params={'hash': torrent['hash']} 260 | ).json() 261 | 262 | is_orphan = False 263 | for tracker in trackers: 264 | if any(state in tracker.get('msg', '').lower() for state in self.orphan_states): 265 | if torrent.get('num_leechs', 0) < self.min_peers: 266 | is_orphan = True 267 | break 268 | 269 | if is_orphan: 270 | print(f"{Fore.RED}orphan detected{Style.RESET_ALL}") 271 | self.reannounce_torrent(torrent['hash']) 272 | time.sleep(2) 273 | self.delete_torrent(torrent['hash']) 274 | else: 275 | print("no orphan detected") 276 | 277 | # Controllo hardlink 278 | content_path = torrent.get('content_path', '') 279 | if content_path: 280 | if torrent['progress'] != 1: 281 | print("- Skipping hardlink check: torrent not downloaded") 282 | continue 283 | 284 | print("- Checking for hardlinks ->", end=" ") 285 | if self.virtual_path and self.real_path: 286 | content_path = content_path.replace(self.virtual_path, self.real_path) 287 | 288 | has_hardlinks = self.check_hardlinks(content_path) 289 | seeding_time = properties['seeding_time'] 290 | 291 | if has_hardlinks: 292 | print("hardlinks found, nothing to do") 293 | continue 294 | else: 295 | if self.min_seeding_time > 0 and seeding_time < self.min_seeding_time: 296 | print(f"no hardlinks found but I can't delete this torrent, seeding time not met -> {seeding_time}/{self.min_seeding_time}") 297 | continue 298 | 299 | print(f"no hardlinks found deleting torrent...") 300 | self.reannounce_torrent(torrent['hash']) 301 | time.sleep(2) 302 | self.delete_torrent(torrent['hash']) 303 | 304 | DEFAULT_CONFIG = """# qBittorrent server configuration 305 | qbt_host: "http://localhost" # Server address (with http/https). 306 | qbt_port: "8081" # Web UI Port 307 | qbt_username: "admin" # Web UI Username 308 | qbt_password: "adminadmin" # Web UI Password 309 | 310 | # Configuration torrent management 311 | # Minimum seeding time in seconds (ex: 259200 = 3 days). 312 | # Set to 0 if you want to disable the min_seeding_time check 313 | min_seeding_time: 864000 314 | 315 | # List of categories to be processed. 316 | # Use ["All"] for all categories. 317 | # Use ["Uncategorized"] for torrents without category. 318 | # Or specify categories: ["movies", "tv", "books"] 319 | categories: 320 | - "All" 321 | 322 | # Type of torrent to be processed 323 | # Options: "private", "public" or blank "" to process all. 324 | torrent_type: "" 325 | 326 | # Configuring paths (useful with Docker) 327 | virtual_path: "" # Examample: "/downloads" in Docker 328 | real_path: "" # Example: "/home/user/downloads" real path on the system 329 | 330 | # Automatic controls 331 | enable_recheck: true # Enable automatic recheck torrent in error. 332 | enable_orphan_check: true # Enable orphan torrent checking, works only on private torrents 333 | 334 | # States that identify a torrent as orphaned. 335 | orphan_states: 336 | - "unregistered" 337 | - "not registered" 338 | - "not found" 339 | 340 | # Minimum number of peers before considering a torrent orphaned. 341 | # Default: 1 342 | min_peers: 1""" 343 | 344 | def create_default_config(config_path: str) -> None: 345 | """Creates a default configuration file""" 346 | if os.path.exists(config_path): 347 | raise FileExistsError(f"Configuration file already exists: {config_path}") 348 | 349 | with open(config_path, 'w') as f: 350 | f.write(DEFAULT_CONFIG) 351 | 352 | print(f"Default configuration file created: {config_path}") 353 | 354 | def get_default_config_name() -> str: 355 | """Get the default configuration file name based on the script name""" 356 | script_name = os.path.basename(sys.argv[0]) 357 | base_name = os.path.splitext(script_name)[0] 358 | return f"{base_name}_config.yaml" 359 | 360 | def validate_config_file(config_path: str) -> None: 361 | """Validates the existence and format of the configuration file""" 362 | path = Path(config_path) 363 | if not path.exists(): 364 | raise FileNotFoundError(f"Configuration file not found: {config_path}") 365 | if not path.suffix.lower() == '.yaml': 366 | raise ValueError("The configuration file must be in YAML format") 367 | 368 | def parse_arguments() -> argparse.Namespace: 369 | """Parsing of command line arguments""" 370 | parser = argparse.ArgumentParser( 371 | description='QBittorrent Manager - Automated torrent management' 372 | ) 373 | 374 | parser.add_argument( 375 | '-c', '--config', 376 | default=get_default_config_name(), 377 | help='YAML configuration file path (default: _config.yaml)' 378 | ) 379 | 380 | parser.add_argument( 381 | '--dry-run', 382 | action='store_true', 383 | help='Run in simulation mode (no actual changes)' 384 | ) 385 | 386 | parser.add_argument( 387 | '--create-config', 388 | action='store_true', 389 | help='Create a default configuration file' 390 | ) 391 | 392 | return parser.parse_args() 393 | 394 | def main() -> None: 395 | try: 396 | args = parse_arguments() 397 | 398 | if args.create_config: 399 | create_default_config(args.config) 400 | return 401 | 402 | validate_config_file(args.config) 403 | manager = QBittorrentManager(args.config, args.dry_run) 404 | manager.process_torrents() 405 | except FileExistsError as e: 406 | print(f"Error: {e}") 407 | sys.exit(1) 408 | except FileNotFoundError as e: 409 | print(f"Error: {e}") 410 | print("Use --create-config to create a default configuration file") 411 | sys.exit(1) 412 | except ValueError as e: 413 | print(f"Configuration error: {e}") 414 | sys.exit(1) 415 | except KeyboardInterrupt: 416 | print("\nOperation aborted by user") 417 | sys.exit(1) 418 | except Exception as e: 419 | print(f"Unexpected error: {e}") 420 | sys.exit(1) 421 | 422 | if __name__ == "__main__": 423 | main() -------------------------------------------------------------------------------- /qBittorrentHardlinksChecker/qBittorrentHardlinksChecker.py.readme.md: -------------------------------------------------------------------------------- 1 | # qBittorrentHardlinksChecker.py 2 | 3 | The idea of this script is very simple, it **checks qBittorrents Hard Links** and run some checks on the downloading torrents. 4 | 5 | In my case it helps, judge for yourself if it helps you. 6 | 7 | For managing the seed times of automatic downloads from the various `*Arr`, I normally use [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents). It is a very complete and useful script that allows me to pick and choose category by category, tracker by tracker, the various torrent removal settings. This is because my space available is not infinite. So I am forced to do a regular cleanup of the various downloads. I always respect the rules of the various private trackers! 8 | 9 | **But let's come to the idea:** Very simply, if the configuration within the automatic downloading programs `*Arr` is set to generate hardlinks, then it means that _until I have deleted both the file from the torrent client and the linked file that is managed automatically_, the space occupied on the disk will be the same. This means that as long as I haven't watched and deleted that movie (etc), I could safely keep the shared downloaded file, because it no longer takes up disk space, being a hardlink. 10 | 11 | With this script, for the categories you set, you can check each download. If there are _two or more_ hardlinks the file will not be deleted from qBittorrent. If on the other hand the file has _only one hardlink_, then the script will consider whether or not to delete the file by checking the minimum seed time that has been set. 12 | 13 | **Here is an example of usage:** Downloads that only end up in the automatic categories, e.g. `movie` for Radarr (or whatever your category is) rather than `tv_show` for Sonarr (or whatever your category is), **before** running [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents) (which is appropriately configured previously)... I run this script and by doing so I make sure that any "duplicates" are not deleted and remain in seed. This helps me with the share ratio and minimum seed time. 14 | 15 | This script runs without needing a connection to the `*Arr`! 16 | 17 | **Requirements:** 18 | - Python 3.8+ 19 | - qBittorrent with WebUI enabled 20 | - requests>=2.25.1 21 | - PyYAML>=5.4.1 22 | - colorama>=0.4.4 23 | - typing>=3.7.4 24 | 25 | **What it can do?** 26 | - Can only work in certain categories of qBittorrent (even all of them) 27 | - Can only work for certain types of torrents (private, public, or both) 28 | - If the torrent is in an error state, it rechecks it 29 | - If the torrent is "orphaned", it deletes it 30 | - If the seed time is satisfied, deletes the torrent 31 | - Removes failed trackers 32 | - Checks the status of hard-links 33 | 34 | **How to use:** 35 | * There is a need for a configuration file, you can create the template tramiet the command `./qBittorrentHardlinksChecker.py --create-config` 36 | * If you are working with only one configuration file, there is no need to specify it each time, otherwise, to specify a configuration file just run the script with `./qBittorrentHardlinksChecker.py --config config.yaml` 37 | * Once the configuration file has been configured, if the file name is `_config.yaml`, then you will not need to call the configuration file, so you can use the script with the simple command `./qBittorrentHardlinksChecker.py`, otherwise you need to specify the configuration file 38 | * If you want to simulate the operation of the script, but without making any changes, then run the script with `./qBittorrentHardlinksChecker.py --dry-run` 39 | 40 | **Configuration file** 41 | *qBittorrent server configuration* 42 | qbt_host: "http://localhost" # Server address (with http/https). 43 | qbt_port: "8081" # Web UI Port 44 | qbt_username: "admin" # Web UI Username 45 | qbt_password: "adminadmin" # Web UI Password 46 | 47 | *Configuration torrent management 48 | Minimum seeding time in seconds (ex: 259200 = 3 days). 49 | Set to 0 if you want to disable the min_seeding_time check* 50 | min_seeding_time: 864000 51 | 52 | *List of categories to be processed. 53 | Use ["All"] for all categories. 54 | Use ["Uncategorized"] for torrents without category. 55 | Or specify categories: ["movies", "tv", "books"]* 56 | categories: 57 | - "All" 58 | 59 | *Type of torrent to be processed 60 | Options: "private", "public" or blank "" to process all.* 61 | torrent_type: "" 62 | 63 | *Configuring paths (useful with Docker)* 64 | virtual_path: "" # Examample: "/downloads" in Docker 65 | real_path: "" # Example: "/home/user/downloads" real path on the system 66 | 67 | *Automatic controls* 68 | enable_recheck: true # Enable automatic recheck torrent in error. 69 | enable_orphan_check: true # Enable orphan torrent checking, works only on private torrents 70 | 71 | *States that identify a torrent as orphaned.* 72 | orphan_states: 73 | - "unregistered" 74 | - "not registered" 75 | - "not found" 76 | 77 | *Minimum number of peers before considering a torrent orphaned. 78 | Default: 1* 79 | min_peers: 1 80 | 81 | **Note:** 82 | * "Orphan" check is performed only on `private` torrents. 83 | * Error tracker checking is performed only on `public` torrents. 84 | 85 | I recommend you use this script with cron or create a timer for `systemd`. I personally use it via timer so runs right after [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents) 86 | 87 | **Cron examples:** 88 | 89 | # Hourly check 90 | 0 * * * * /usr/bin/python3 /path/to/qBittorrentHardlinksChecker.py 91 | 92 | # Daily at midnight 93 | 0 0 * * * /usr/bin/python3 /path/to/qBittorrentHardlinksChecker.py -------------------------------------------------------------------------------- /qBittorrentHardlinksChecker/qBittorrentHardlinksChecker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ########## CONFIGURATIONS ########## 4 | # Host on which qBittorrent runs 5 | qbt_host="http://10.0.0.100" 6 | # Port -> the same port that is inside qBittorrent option -> Web UI -> Web User Interface 7 | qbt_port="8081" 8 | # Username to access to Web UI 9 | qbt_username="admin" 10 | # Password to access to Web UI 11 | qbt_password="adminadmin" 12 | 13 | # Configure here your categories, comma separated, like -> movie,tv_show 14 | categories='Serie_Tv,Film' 15 | 16 | # Minimum seed time before deletion, expressed in seconds, for example 864000 means 10 days 17 | min_seeding_time=864000 18 | 19 | # Using docker it may happen that the path is different from the real one, this allows to replace part of qBittorrent path, turning it into the real one 20 | # In this example, if a download within qBittorren has /oldpath/film as its path, the script will interpret it as /new/path/film 21 | # This allows volumes within qBittorrent to be mounted differently from the actual path on disk 22 | # Leave empty if not needed 23 | virtual_path="oldpath" # The qBittorrent path, or the part you want to change 24 | real_path="new/path" # The new part of the path 25 | 26 | # Check only private torrents? if not true (lowercase) will check all torrents in given categories not only the private one 27 | only_private=true 28 | 29 | # If true, only for private tracker, check the torrent and if is not registered will be deleted 30 | private_torrents_check_orphan=true 31 | 32 | # If true, only for public torrent, check the trackers and the bad one will be eliminated, not the torrent only the trackers 33 | public_torrent_check_bad_trackers=true 34 | 35 | # If true, if there's some torrent in error, a force recheck is actuaded, this try to start again the torrent 36 | receck_erroring_torrent=true 37 | ########## CONFIGURATIONS ########## 38 | 39 | jq_executable="$(command -v jq)" 40 | curl_executable="$(command -v curl)" 41 | 42 | if [[ -z $jq_executable ]]; then 43 | echo -e "\n\e[0;91;1mFail on jq. Aborting.\n\e[0m" 44 | echo "You can find it here: https://stedolan.github.io/jq/" 45 | echo "Or you can install it with -> sudo apt install jq" 46 | exit 1 47 | fi 48 | 49 | if [[ -z $curl_executable ]]; then 50 | echo -e "\n\e[0;91;1mFail on curl. Aborting.\n\e[0m" 51 | echo "You can install it with -> sudo apt install curl" 52 | exit 2 53 | fi 54 | 55 | if [[ "${qbt_host,,}" == *"https"* ]] ;then 56 | curl_executable="${curl_executable} --insecure" 57 | fi 58 | 59 | # Variable to keep track of dryrun mode 60 | dryrun=false 61 | 62 | if [ "$1" == "test" ]; then 63 | dryrun=true 64 | echo "Dryrun mode turned on." 65 | echo "" 66 | fi 67 | 68 | ########## FUNCTIONS ########## 69 | url_encode() { 70 | local string="${1}" 71 | 72 | # Check if xxd is available 73 | if command -v xxd >/dev/null 2>&1; then 74 | # If xxd is available, use xxd for encoding 75 | printf '%s' "$string" | xxd -p | sed 's/\(..\)/%\1/g' | tr -d '\n' 76 | else 77 | # If jq is available, use jq for encoding 78 | jq -nr --arg s "$string" '$s|@uri' 79 | fi 80 | } 81 | 82 | get_cookie () { 83 | encoded_username=$(url_encode "$qbt_username") 84 | encoded_password=$(url_encode "$qbt_password") 85 | 86 | # If encoding fails, exit the function 87 | if [ $? -ne 0 ]; then 88 | echo "Error during URL encoding" >&2 89 | return 1 90 | fi 91 | 92 | qbt_cookie=$($curl_executable --silent --fail --show-error \ 93 | --header "Referer: ${qbt_host}:${qbt_port}" \ 94 | --cookie-jar - \ 95 | --data "username=${encoded_username}&password=${encoded_password}" ${qbt_host}:${qbt_port}/api/v2/auth/login) 96 | } 97 | 98 | get_torrent_list () { 99 | [[ -z "$qbt_cookie" ]] && get_cookie 100 | torrent_list=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 101 | --cookie - \ 102 | --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/info") 103 | } 104 | 105 | delete_torrent () { 106 | hash="$1" 107 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 108 | -d "hashes=${hash}&deleteFiles=true" \ 109 | --cookie - \ 110 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/delete" 111 | echo "Deleted" 112 | } 113 | 114 | recheck_torrent () { 115 | hash="$1" 116 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 117 | -d "hashes=${hash}" \ 118 | --cookie - \ 119 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/recheck" 120 | echo "Command executed" 121 | } 122 | 123 | reannounce_torrent () { 124 | hash="$1" 125 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 126 | -d "hashes=${hash}" \ 127 | --cookie - \ 128 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/reannounce" 129 | } 130 | 131 | remove_bad_tracker () { 132 | hash="$1" 133 | single_url="$2" 134 | echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 135 | -d "hash=${hash}&urls=${single_url}" \ 136 | --cookie - \ 137 | --request POST "${qbt_host}:${qbt_port}/api/v2/torrents/removeTrackers" 138 | } 139 | 140 | unset_array () { 141 | array_element="$1" 142 | unset torrent_name_array[$array_element] 143 | unset torrent_hash_array[$array_element] 144 | unset torrent_path_array[$array_element] 145 | unset torrent_seeding_time_array[$array_element] 146 | unset torrent_progress_array[$array_element] 147 | unset private_torrent_array[$array_element] 148 | unset torrent_trackers_array[$array_element] 149 | unset torrent_category_array[$array_element] 150 | } 151 | 152 | wait() { 153 | w=$1 154 | echo "I'll wait ${w}s to be sure the reannunce going well..." 155 | while [ $w -gt 0 ]; do 156 | echo -ne "$w\033[0K\r" 157 | sleep 1 158 | w=$((w-1)) 159 | done 160 | } 161 | 162 | check_hardlinks() { 163 | local path="$1" 164 | local more_hard_links=false 165 | 166 | if [ -d "$path" ]; then 167 | # È una directory, controlla i file all'interno 168 | while IFS= read -r -d $'\0' file; do 169 | if [ "$(stat -c %h "$file")" -gt 1 ]; then 170 | more_hard_links=true 171 | break 172 | fi 173 | done < <(find "$path" -type f -print0) 174 | else 175 | # È un file 176 | if [ "$(stat -c %h "$path")" -gt 1 ]; then 177 | more_hard_links=true 178 | fi 179 | fi 180 | 181 | echo "$more_hard_links" 182 | } 183 | ########## FUNCTIONS ########## 184 | 185 | get_torrent_list 186 | 187 | if [ -z "$torrent_list" ]; then 188 | echo "No torrents founds to check" 189 | exit 190 | fi 191 | 192 | echo "Collecting data from qBittorrent, wait..." 193 | 194 | torrent_name_array=() 195 | torrent_hash_array=() 196 | torrent_path_array=() 197 | torrent_seeding_time_array=() 198 | torrent_progress_array=() 199 | private_torrent_array=() 200 | torrent_trackers_array=() 201 | torrent_category_array=() 202 | 203 | while IFS= read -r line; do 204 | torrent_name_array+=("$line") 205 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .name') 206 | 207 | while IFS= read -r line; do 208 | torrent_hash_array+=("$line") 209 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .hash') 210 | 211 | while IFS= read -r line; do 212 | torrent_path_array+=("$line") 213 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .content_path') 214 | 215 | while IFS= read -r line; do 216 | torrent_seeding_time_array+=("$line") 217 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .seeding_time') 218 | 219 | while IFS= read -r line; do 220 | torrent_progress_array+=("$line") 221 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .progress') 222 | 223 | while IFS= read -r line; do 224 | torrent_category_array+=("$line") 225 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .category') 226 | 227 | while IFS= read -r line; do 228 | torrent_state_array+=("$line") 229 | done < <(echo $torrent_list | $jq_executable --raw-output '.[] | .state') 230 | 231 | for i in "${!torrent_hash_array[@]}"; do 232 | torrent_trackers_array[$i]=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 233 | --cookie - \ 234 | --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/trackers?hash=${torrent_hash_array[$i]}") 235 | done 236 | 237 | for i in "${!torrent_hash_array[@]}"; do 238 | private_torrent_array[$i]=$(echo "$qbt_cookie" | $curl_executable --silent --fail --show-error \ 239 | --cookie - \ 240 | --request GET "${qbt_host}:${qbt_port}/api/v2/torrents/properties?hash=${torrent_hash_array[$i]}" | $jq_executable --raw-output '.is_private') 241 | done 242 | 243 | if [ -n "$categories" ]; then 244 | echo "Checking hardlinks:" 245 | for j in ${categories//,/ }; do 246 | test=$(echo "$torrent_list" | $jq_executable --raw-output --arg tosearch "$j" '.[] | select(.category == "\($tosearch)") | .name') 247 | 248 | if [[ -z "$test" ]]; then 249 | echo "There's no categories named ${j} or is empty" 250 | continue 251 | else 252 | echo "#####################################" 253 | echo "Checking category ${j}:" 254 | echo "#####################################" 255 | echo "" 256 | 257 | for i in "${!torrent_hash_array[@]}"; do 258 | if [[ $only_private == true ]]; then 259 | if [[ ${torrent_category_array[$i]} == ${j} ]] && [[ ${private_torrent_array[$i]} == true ]]; then 260 | echo "Analyzing torrent -> ${torrent_name_array[$i]}" 261 | 262 | if awk "BEGIN {exit !(${torrent_progress_array[$i]} < 1)}rent"; then 263 | printf "Torrent incomplete, nothing to do -> %0.3g%%\n" $(awk -v var="${torrent_progress_array[$i]}" 'BEGIN{print var * 100}') 264 | else 265 | 266 | if [ -z "$virtual_path" ] || [ -z "$real_path" ]; then 267 | result="${torrent_path_array[$i]}" 268 | else 269 | result=$(echo "${torrent_path_array[$i]/$virtual_path/"$real_path"}") 270 | fi 271 | 272 | more_hard_links=$(check_hardlinks "$result") 273 | 274 | if [ "$more_hard_links" = true ]; then 275 | echo "More than 1 hardlinks found in $result" 276 | else 277 | echo "No additional hardlinks found in $result" 278 | fi 279 | 280 | if [[ $more_hard_links == false ]]; then 281 | echo "Found 1 hardlinks, checking seeding time:" 282 | if [ ${torrent_seeding_time_array[$i]} -gt $min_seeding_time ]; then 283 | echo "I can delete this torrent, seeding time more than $min_seeding_time seconds" 284 | 285 | if [[ $dryrun == true ]]; then 286 | echo "Simulation (dryrun)..." 287 | echo "reannounce torrent" 288 | echo "wait 15 seconds..." 289 | echo "delete torrent ${torrent_name_array[$i]}" 290 | unset_array $i 291 | else 292 | reannounce_torrent ${torrent_hash_array[$i]} 293 | wait 15 294 | delete_torrent ${torrent_hash_array[$i]} 295 | unset_array $i 296 | fi 297 | 298 | else 299 | echo "I can't delete this torrent, seeding time not meet -> ${torrent_seeding_time_array[$i]}/${min_seeding_time}" 300 | fi 301 | else 302 | echo "More than 1 hardlinks found, nothing to do" 303 | fi 304 | fi 305 | echo "------------------------------" 306 | fi 307 | else 308 | if [[ ${torrent_category_array[$i]} == ${j} ]]; then 309 | echo "Analyzing torrent -> ${torrent_name_array[$i]}" 310 | 311 | if awk "BEGIN {exit !(${torrent_progress_array[$i]} < 1)}rent"; then 312 | printf "Torrent incomplete, nothing to do -> %0.3g%%\n" $(awk -v var="${torrent_progress_array[$i]}" 'BEGIN{print var * 100}') 313 | else 314 | 315 | if [ -z "$virtual_path" ] || [ -z "$real_path" ]; then 316 | result="${torrent_path_array[$i]}" 317 | else 318 | result=$(echo "${torrent_path_array[$i]/$virtual_path/"$real_path"}") 319 | fi 320 | 321 | more_hard_links=$(check_hardlinks "$result") 322 | 323 | if [ "$more_hard_links" = true ]; then 324 | echo "More than 1 hardlinks found in $result" 325 | else 326 | echo "No additional hardlinks found in $result" 327 | fi 328 | 329 | if [[ $more_hard_links == false ]]; then 330 | echo "Found 1 hardlinks, checking seeding time:" 331 | if [ ${torrent_seeding_time_array[$i]} -gt $min_seeding_time ]; then 332 | echo "I can delete this torrent, seeding time more than $min_seeding_time seconds" 333 | 334 | if [[ $dryrun == true ]]; then 335 | echo "Simulation (dryrun)..." 336 | echo "reannounce torrent" 337 | echo "wait 15 seconds..." 338 | echo "delete torrent ${torrent_name_array[$i]}" 339 | unset_array $i 340 | else 341 | reannounce_torrent ${torrent_hash_array[$i]} 342 | wait 15 343 | delete_torrent ${torrent_hash_array[$i]} 344 | unset_array $i 345 | fi 346 | else 347 | echo "I can't delete this torrent, seeding time not meet -> ${torrent_seeding_time_array[$i]}/${min_seeding_time}" 348 | fi 349 | else 350 | echo "More than 1 hardlinks found, nothing to do" 351 | fi 352 | fi 353 | echo "------------------------------" 354 | fi 355 | fi 356 | done 357 | fi 358 | done 359 | echo "Harklinks check completed" 360 | echo "------------------------------" 361 | else 362 | echo "Categories list empty" 363 | echo "------------------------------" 364 | fi 365 | 366 | if [[ $private_torrents_check_orphan == true ]]; then 367 | echo "Checking for orphan torrents:" 368 | 369 | for i in "${!torrent_hash_array[@]}"; do 370 | orphan_torrent=$(echo ${torrent_trackers_array[$i]} | $jq_executable --raw-output '.[] | select((.status == 4) and (.num_peers < 1) and ((.msg|test("unregistered"; "i")) or (.msg|test("not registered"; "i")))) | any') 371 | if [[ $orphan_torrent == true ]]; then 372 | echo "Found orphan torrent -> ${torrent_name_array[$i]}, deleting" 373 | 374 | if [[ $dryrun == true ]]; then 375 | echo "Simulation (dryrun)..." 376 | echo "delete torrent ${torrent_name_array[$i]}" 377 | unset_array $i 378 | else 379 | delete_torrent ${torrent_hash_array[$i]} 380 | unset_array $i 381 | fi 382 | 383 | echo "------------------------------" 384 | fi 385 | done 386 | echo "Orphan check completed" 387 | echo "------------------------------" 388 | fi 389 | 390 | if [[ $public_torrent_check_bad_trackers == true ]]; then 391 | echo "Checking for bad trackers:" 392 | 393 | for i in "${!torrent_hash_array[@]}"; do 394 | if [[ ${private_torrent_array[$i]} != true ]]; then 395 | url_list=$(echo ${torrent_trackers_array[$i]} | $jq_executable --raw-output '.[] | select((.status == 4) and (.num_peers < 1)) .url') 396 | 397 | if [[ ! -z "$url_list" ]]; then 398 | echo "Some problem found on -> ${torrent_name_array[$i]}" 399 | echo "fixing..." 400 | 401 | if [[ $dryrun == true ]]; then 402 | echo "Simulation (dryrun)..." 403 | echo "removing bad tracker for torrent ${torrent_name_array[$i]}" 404 | else 405 | remove_bad_tracker ${torrent_hash_array[$i]} $(echo $url_list | tr '\n' ' ' | tr ' ' '|' | rev | cut -c2- | rev) 406 | fi 407 | 408 | echo "------------------------------" 409 | else 410 | continue 411 | fi 412 | fi 413 | done 414 | echo "Bad trackers check completed" 415 | echo "------------------------------" 416 | fi 417 | 418 | if [[ $receck_erroring_torrent == true ]]; then 419 | echo "Checking for errored torrent:" 420 | 421 | for i in "${!torrent_hash_array[@]}"; do 422 | if [[ ${torrent_state_array[$i]} == "error" ]]; then 423 | echo "Found erroring torrent -> ${torrent_name_array[$i]}, I'll recheck it" 424 | 425 | if [[ $dryrun == true ]]; then 426 | echo "Simulation (dryrun)..." 427 | echo "checking torrent ${torrent_name_array[$i]}" 428 | else 429 | recheck_torrent ${torrent_hash_array[$i]} 430 | fi 431 | 432 | echo "------------------------------" 433 | fi 434 | done 435 | echo "Error check completed" 436 | echo "------------------------------" 437 | fi -------------------------------------------------------------------------------- /qBittorrentHardlinksChecker/qBittorrentHardlinksChecker.sh.readme.md: -------------------------------------------------------------------------------- 1 | # qBittorrentHardlinksChecker.sh 2 | 3 | The idea of this script is very simple, it **checks qBittorrents Hard Links**. 4 | 5 | In my case it helps, judge for yourself if it helps you. 6 | 7 | For managing the seed times of automatic downloads from the various `*Arr`, I normally use [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents). It is a very complete and useful script that allows me to pick and choose category by category, tracker by tracker, the various torrent removal settings. This is because my space available is not infinite. So I am forced to do a regular cleanup of the various downloads. I always respect the rules of the various private trackers! 8 | 9 | **But let's come to the idea:** Very simply, if the configuration within the automatic downloading programs `*Arr` is set to generate hardlinks, then it means that _until I have deleted both the file from the torrent client and the linked file that is managed automatically_, the space occupied on the disk will be the same. This means that as long as I haven't watched and deleted that movie (etc), I could safely keep the shared downloaded file, because it no longer takes up disk space, being a hardlink. 10 | 11 | With this script, for the categories you set, you can check each download. If there are _two or more_ hardlinks the file will not be deleted from qBittorrent. If on the other hand the file has _only one hardlink_, then the script will consider whether or not to delete the file by checking the minimum seed time that has been set. 12 | 13 | **Here is an example of usage:** Downloads that only end up in the automatic categories, e.g. `movie` for Radarr (or whatever your category is) rather than `tv_show` for Sonarr (or whatever your category is), **before** running [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents) (which is appropriately configured previously)... I run this script and by doing so I make sure that any "duplicates" are not deleted and remain in seed. This helps me with the share ratio and minimum seed time. 14 | 15 | 16 | **How to use:** 17 | * First make sure your Radarr/Sonarr user can execute the script with something like this: 18 | * `chown USER:GROUP qBittorrentHardlinksChecker.sh` where `USER:GROUP` is the user and group of Radarr/Sonarr. 19 | * Then be sure it is executable: `chmod +x AddqBittorrentTrackers.sh` 20 | 21 | **Note:** not being a script that is called from `*Arr` it's not strictly necessary to change user and group, just make sure that the script can be executed by the user concerned. 22 | 23 | * Modify the scripts `########## CONFIGURATIONS ##########` section: 24 | * `qbt_username` -> username to access to qBittorrent Web UI. 25 | * `qbt_password` -> username to access to qBittorrent Web UI. 26 | * Note that if the script runs on the same device that runs qBittorrent, you can set `Bypass authentication for clients on localhost`. When the script executes, the username and password are not required. 27 | * `qbt_host` -> if the script is on the same device as qBittorrent use `http://localhost`, otherwise, set this to the remote device. 28 | * `qbt_port` -> is the Web UI port. 29 | * `category_list` -> is the list of categories upon which the script performs the check. 30 | * `min_seeding_time` -> is the minimum seed time expressed in seconds. 31 | * `only_private` -> if true, the script will only check the torrents that are from private trackers. In this way you can set [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents) in order to remove only the remaining public trackers. This help the share ratio and helps you to find and remove torrents from public trackers with your own rules. 32 | * `private_torrents_check_orphan` -> This is only for private trackers. If `true`, check the torrent and if is not registered, it will be deleted. 33 | * `public_torrent_check_bad_trackers` -> Only for public torrents. If `true`, check the trackers and the bad one/s will be eliminated, but _not_ the torrent itself, _only_ the trackers. Be patient, this can be a "slow" function during the deleting/ion phase. 34 | 35 | I recommend you use this script with cron or create a timer for `systemd`. I personally use it via timer so runs right after [autoremove-torrent](https://github.com/jerrymakesjelly/autoremove-torrents) 36 | -------------------------------------------------------------------------------- /radarr_cleanup_packed_torrent.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Examples for testing 4 | # radarr_moviefile_sourcefolder="/data/torrent/movies/Justice.League.2017.1080p.HDRip.X264.AAC-m2g" radarr_moviefile_sourcepath="/data/torrent/movies/Justice.League.2017.1080p.HDRip.X264.AAC-m2g/Justice.League.2017.1080p.HDRip.X264.AAC-m2g.mkv" 5 | 6 | # Instructions 7 | # Put this script somewhere on your file system like /usr/local/bin and make it executable. 8 | # 9 | # In Radarr, Settings -> Connect add a Custom Script 10 | # On Grab: No 11 | # On Download: Yes 12 | # On Upgrade: Yes 13 | # On Rename: No 14 | # Path: /path/to/where/script/is/radarr_cleanup_packed_torrent.sh 15 | # Arguments: 16 | 17 | # Tune values below to protect your torrents w/ small rar files or non-torrent download client. 18 | 19 | # In *bytes*, the biggest rar file size limit to prevent video deletion from torrents with unrelated rar files (like subs) 20 | # 25 * 1024 * 1024 21 | rar_min_size=26214400 22 | 23 | # Seconds to wait between size checks for in progress unpack 24 | unpack_time=5 25 | 26 | # The final base directory torrents end up in, for example "movies" from /data/torrents/movies 27 | radarr_final_dir="Film" 28 | 29 | # Identifiable portion of path to torrents, so it will only run on torrents. 30 | # For example, with a path of "/data/torrents/movies", "torrents" is a good choice. 31 | torrent_path_portion="Automatici" 32 | 33 | # Test that this is a download event, so we don't run on grab or rename. 34 | # shellcheck disable=SC2154 35 | if [[ "${radarr_eventtype}" != "Download" ]]; then 36 | echo "[Torrent Cleanup] Sonarr Event Type is NOT Download, exiting." 37 | exit 38 | fi 39 | 40 | # Test this file exists, no point running on a file that isn't there. 41 | # shellcheck disable=SC2154 42 | if ! [[ -f "${radarr_moviefile_sourcepath}" ]]; then 43 | echo "[Torrent Cleanup] File ${radarr_moviefile_sourcepath} does not exist, exiting." 44 | exit 45 | fi 46 | 47 | # Test that this is a torrent, so we don't run on usenet downloads. 48 | # shellcheck disable=SC2154 49 | if ! [[ "${radarr_moviefile_sourcepath}" =~ ${torrent_path_portion} ]]; then 50 | echo "[Torrent Cleanup] Path ${radarr_moviefile_sourcepath} does not contain \"torrent\", exiting." 51 | exit 52 | fi 53 | 54 | # Test that this is a multi-file torrent, so we don't run on single file torrents. 55 | # shellcheck disable=SC2154 56 | base_dir=$( basename "${radarr_moviefile_sourcefolder}" ) 57 | if [ "${base_dir}" == "${radarr_final_dir}" ]; then 58 | echo "[Torrent Cleanup] Single file torrent, exiting." 59 | exit 60 | fi 61 | 62 | # We might run while the unpack is still happening, so wait for that before removing. 63 | echo "[Torrent Cleanup] Starting wait for ${radarr_moviefile_sourcepath} unpacking..." 64 | file_size_start=$( stat --printf="%s" "${radarr_moviefile_sourcepath}" ) 65 | sleep ${unpack_time} 66 | file_size_end=$( stat --printf="%s" "${radarr_moviefile_sourcepath}" ) 67 | until [[ ${file_size_start} -eq ${file_size_end} ]]; do 68 | file_size_start=$( stat --printf="%s" "${radarr_moviefile_sourcepath}" ) 69 | sleep ${unpack_time} 70 | file_size_end=$( stat --printf="%s" "${radarr_moviefile_sourcepath}" ) 71 | done 72 | echo "[Torrent Cleanup] Finished wait for ${radarr_moviefile_sourcepath} unpacking..." 73 | 74 | # Test for rar and r## files and check the *size* of the biggest one so we don't run due to packed subs or something. 75 | # shellcheck disable=SC2154 76 | if find "${radarr_moviefile_sourcefolder}" -type f -iregex '.*\.r[0-9a][0-9r]$' | grep -Eq '.*'; then 77 | # shellcheck disable=SC2154 78 | rar_size="$( find "${radarr_moviefile_sourcefolder}" -type f -iregex '.*\.r[0-9a][0-9r]$' -ls | sort -nk 7 | tail -1 | awk '{ print $7 }' )" 79 | if [[ ${rar_size} -gt ${rar_min_size} ]]; then 80 | echo "[Torrent Cleanup] Rar file size ${rar_size} exceeds minimum of ${rar_min_size}, deleting video file." 81 | rm "${radarr_moviefile_sourcepath}" 82 | else 83 | echo "[Torrent Cleanup] Rar file size ${rar_size} DOES NOT MEET minimum of ${rar_min_size}, skipping deletion of video file." 84 | fi 85 | else 86 | echo "[Torrent Cleanup] No rar files, exiting." 87 | fi 88 | -------------------------------------------------------------------------------- /sonarr_cleanup_packed_torrent.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Examples for testing 4 | # sonarr_episodefile_sourcefolder="/data/torrent/tv/Penny.Dreadful.S01E01.720p.HDTV.x264-2HD" sonarr_episodefile_sourcepath="/data/torrent/tv/Penny.Dreadful.S01E01.720p.HDTV.x264-2HD/penny.dreadful.s01e01.720p.hdtv.x264-2hd.mkv" 5 | 6 | # Instructions 7 | # Put this script somewhere on your file system like /usr/local/bin and make it executable. 8 | # 9 | # In Sonarr, Settings -> Connect add a Custom Script 10 | # On Grab: No 11 | # On Download: Yes 12 | # On Upgrade: Yes 13 | # On Rename: No 14 | # Path: /path/to/where/script/is/sonarr_cleanup_packed_torrent.sh 15 | # Arguments: 16 | 17 | # Tune values below to protect your torrents w/ small rar files or non-torrent download client. 18 | 19 | # In *bytes*, the biggest rar file size limit to prevent video deletion from torrents with unrelated rar files (like subs) 20 | # 25 * 1024 * 1024 21 | rar_min_size=26214400 22 | 23 | # Seconds to wait between size checks for in progress unpack 24 | unpack_time=5 25 | 26 | # The final base directory torrents end up in, for example "tv" from /data/torrents/tv 27 | sonarr_final_dir="Serie_Tv" 28 | 29 | # Identifiable portion of path to torrents, so it will only run on torrents. 30 | # For example, a path of "/data/torrents/tv", "torrents" is a good choice. 31 | torrent_path_portion="Automatici" 32 | 33 | # Test that this is a download event, so we don't run on grab or rename. 34 | # shellcheck disable=SC2154 35 | if [[ "${sonarr_eventtype}" != "Download" ]]; then 36 | echo "[Torrent Cleanup] Sonarr Event Type is NOT Download, exiting." 37 | exit 38 | fi 39 | 40 | # Test this file exists, no point running on a file that isn't there. 41 | # shellcheck disable=SC2154 42 | if ! [[ -f "${sonarr_episodefile_sourcepath}" ]]; then 43 | echo "[Torrent Cleanup] File ${sonarr_episodefile_sourcepath} does not exist, exiting." 44 | exit 45 | fi 46 | 47 | # Test that this is a torrent, so we don't run on usenet downloads. 48 | # shellcheck disable=SC2154 49 | if ! [[ "${sonarr_episodefile_sourcepath}" =~ ${torrent_path_portion} ]]; then 50 | echo "[Torrent Cleanup] Path ${sonarr_episodefile_sourcepath} does not contain \"torrent\", exiting." 51 | exit 52 | fi 53 | 54 | # Test that this is a multi-file torrent, so we don't run on single file torrents. 55 | # shellcheck disable=SC2154 56 | base_dir=$( basename "${sonarr_episodefile_sourcefolder}" ) 57 | if [[ "${base_dir}" == "${sonarr_final_dir}" ]]; then 58 | echo "[Torrent Cleanup] Single file torrent, exiting." 59 | exit 60 | fi 61 | 62 | # We might run while the unpack is still happening, so wait for that before removing. 63 | echo "[Torrent Cleanup] Starting wait for ${sonarr_episodefile_sourcepath} unpacking..." 64 | file_size_start=$( stat --printf="%s" "${sonarr_episodefile_sourcepath}" ) 65 | sleep ${unpack_time} 66 | file_size_end=$( stat --printf="%s" "${sonarr_episodefile_sourcepath}" ) 67 | until [[ ${file_size_start} -eq ${file_size_end} ]]; do 68 | file_size_start=$( stat --printf="%s" "${sonarr_episodefile_sourcepath}" ) 69 | sleep ${unpack_time} 70 | file_size_end=$( stat --printf="%s" "${sonarr_episodefile_sourcepath}" ) 71 | done 72 | echo "[Torrent Cleanup] Finished wait for ${sonarr_episodefile_sourcepath} unpacking..." 73 | 74 | # Test for rar and r## files and check the *size* of the biggest one so we don't run due to packed subs or something. 75 | # shellcheck disable=SC2154 76 | if find "${sonarr_episodefile_sourcefolder}" -type f -iregex '.*\.r[0-9a][0-9r]$' | grep -Eq '.*'; then 77 | # shellcheck disable=SC2154 78 | rar_size="$( find "${sonarr_episodefile_sourcefolder}" -type f -iregex '.*\.r[0-9a][0-9r]$' -ls | sort -nk 7 | tail -1 | awk '{ print $7 }' )" 79 | if [[ ${rar_size} -gt ${rar_min_size} ]]; then 80 | echo "[Torrent Cleanup] Rar file size ${rar_size} exceeds minimum of ${rar_min_size}, deleting video file." 81 | rm "${sonarr_episodefile_sourcepath}" 82 | else 83 | echo "[Torrent Cleanup] Rar file size ${rar_size} DOES NOT MEET minimum of ${rar_min_size}, skipping deletion of video file." 84 | fi 85 | else 86 | echo "[Torrent Cleanup] No rar files, exiting." 87 | fi 88 | --------------------------------------------------------------------------------