├── .env.sample ├── .github └── workflows │ └── shellcheck.yml ├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── backup_config.sh ├── dupe.sh ├── f2b-dump.sh ├── merge_folders.py ├── notifiarr-branch-builder.sh ├── omegabrr_upgrade.sh ├── pic-update.sh ├── pmm-update.sh ├── qbm-qbit.sh ├── qbm-update.sh ├── radarr_dupefinder.sh ├── servarr └── servarr_bot_merge.sh ├── sonarr_dupefinder.sh └── zfsburn.sh /.env.sample: -------------------------------------------------------------------------------- 1 | # .env 2 | # Environment Configuration StarrScripts Sample 2024-04 3 | # Rename this file to .env and fill in the values accordingly. 4 | # Xseed 5 | ## Download Client Names 6 | ### For multiple clients, use format: "client1,client2,client3" 7 | ### Do not include extra spaces around commas! 8 | TORRENT_CLIENTS="" # Examples: "Qbit", "Qbit,Deluge" 9 | USENET_CLIENTS="" # Examples: "SABnzbd", "SABnzbd,SABnzbd Anime" 10 | ## Cross Seed API configuration 11 | XSEED_HOST="" # Example: "crossseed" 12 | XSEED_PORT="" # Example: "2468" 13 | ## API Key for Cross Seed, if applicable 14 | XSEED_APIKEY="" # Example: "your-api-key" 15 | ## Path to store the script's database of prior searches 16 | LOG_FILE="" # Example: "/config/xseed_db.log" 17 | LOGID_FILE="" # Example: "/config/xseed-id.log" 18 | # ZFS Destory 19 | VERBOSE=0 20 | MAX_FREQ=2 21 | MAX_HOURLY=2 22 | MAX_DAILY=1 23 | MAX_WEEKLY=0 24 | MAX_MONTHLY=0 25 | # Jdupes 26 | JDUPES_OUTPUT_LOG="" # Example: "/.config/jdupes.log" 27 | JDUPES_SOURCE_DIR="" # Example: "/mnt/data/media/" 28 | JDUPES_DESTINATION_DIR="" # Example: "/mnt/data/torrents/" 29 | JDUPES_HASH_DB="" # Example: "/.config/jdupes_hashdb" 30 | # Qbittorrent Manage 31 | QBIT_MANAGE_LOCK_FILE_PATH="" # Example: "/var/lock/qbm-qbit.lock" 32 | QBIT_MANAGE_PATH="" # Example: "/opt/qbit-manage" 33 | QBIT_MANAGE_VENV_PATH="" # Example: "/opt/qbit-manage/.venv" 34 | QBIT_MANAGE_CONFIG_PATH="" # Example: "/opt/qbit-manage/config.yml" 35 | QBIT_MANAGE_OPTIONS="" # Example: "-cs -re -cu -tu -ru -sl -r" 36 | -------------------------------------------------------------------------------- /.github/workflows/shellcheck.yml: -------------------------------------------------------------------------------- 1 | name: ShellCheck 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | shellcheck: 7 | name: Run ShellCheck 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout code 11 | uses: actions/checkout@v4 12 | 13 | - name: Install ShellCheck 14 | run: sudo apt-get install -y shellcheck 15 | 16 | - name: Run ShellCheck 17 | run: find . -type f -name "*.sh" -exec shellcheck {} + 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.rsuser 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Mono auto generated files 17 | mono_crash.* 18 | 19 | # Build results 20 | [Dd]ebug/ 21 | [Dd]ebugPublic/ 22 | [Rr]elease/ 23 | [Rr]eleases/ 24 | x64/ 25 | x86/ 26 | [Aa][Rr][Mm]/ 27 | [Aa][Rr][Mm]64/ 28 | bld/ 29 | [Bb]in/ 30 | [Oo]bj/ 31 | [Ll]og/ 32 | [Ll]ogs/ 33 | 34 | # Visual Studio 2015/2017 cache/options directory 35 | .vs/ 36 | # Uncomment if you have tasks that create the project's static files in wwwroot 37 | #wwwroot/ 38 | 39 | # Visual Studio 2017 auto generated files 40 | Generated\ Files/ 41 | 42 | # MSTest test Results 43 | [Tt]est[Rr]esult*/ 44 | [Bb]uild[Ll]og.* 45 | 46 | # NUnit 47 | *.VisualState.xml 48 | TestResult.xml 49 | nunit-*.xml 50 | 51 | # Build Results of an ATL Project 52 | [Dd]ebugPS/ 53 | [Rr]eleasePS/ 54 | dlldata.c 55 | 56 | # Benchmark Results 57 | BenchmarkDotNet.Artifacts/ 58 | 59 | # .NET Core 60 | project.lock.json 61 | project.fragment.lock.json 62 | artifacts/ 63 | 64 | # StyleCop 65 | StyleCopReport.xml 66 | 67 | # Files built by Visual Studio 68 | *_i.c 69 | *_p.c 70 | *_h.h 71 | *.ilk 72 | *.meta 73 | *.obj 74 | *.iobj 75 | *.pch 76 | *.pdb 77 | *.ipdb 78 | *.pgc 79 | *.pgd 80 | *.rsp 81 | *.sbr 82 | *.tlb 83 | *.tli 84 | *.tlh 85 | *.tmp 86 | *.tmp_proj 87 | *_wpftmp.csproj 88 | *.log 89 | *.vspscc 90 | *.vssscc 91 | .builds 92 | *.pidb 93 | *.svclog 94 | *.scc 95 | 96 | # Chutzpah Test files 97 | _Chutzpah* 98 | 99 | # Visual C++ cache files 100 | ipch/ 101 | *.aps 102 | *.ncb 103 | *.opendb 104 | *.opensdf 105 | *.sdf 106 | *.cachefile 107 | *.VC.db 108 | *.VC.VC.opendb 109 | 110 | # Visual Studio profiler 111 | *.psess 112 | *.vsp 113 | *.vspx 114 | *.sap 115 | 116 | # Visual Studio Trace Files 117 | *.e2e 118 | 119 | # TFS 2012 Local Workspace 120 | $tf/ 121 | 122 | # Guidance Automation Toolkit 123 | *.gpState 124 | 125 | # ReSharper is a .NET coding add-in 126 | _ReSharper*/ 127 | *.[Rr]e[Ss]harper 128 | *.DotSettings.user 129 | 130 | # TeamCity is a build add-in 131 | _TeamCity* 132 | 133 | # DotCover is a Code Coverage Tool 134 | *.dotCover 135 | 136 | # AxoCover is a Code Coverage Tool 137 | .axoCover/* 138 | !.axoCover/settings.json 139 | 140 | # Visual Studio code coverage results 141 | *.coverage 142 | *.coveragexml 143 | 144 | # NCrunch 145 | _NCrunch_* 146 | .*crunch*.local.xml 147 | nCrunchTemp_* 148 | 149 | # MightyMoose 150 | *.mm.* 151 | AutoTest.Net/ 152 | 153 | # Web workbench (sass) 154 | .sass-cache/ 155 | 156 | # Installshield output folder 157 | [Ee]xpress/ 158 | 159 | # DocProject is a documentation generator add-in 160 | DocProject/buildhelp/ 161 | DocProject/Help/*.HxT 162 | DocProject/Help/*.HxC 163 | DocProject/Help/*.hhc 164 | DocProject/Help/*.hhk 165 | DocProject/Help/*.hhp 166 | DocProject/Help/Html2 167 | DocProject/Help/html 168 | 169 | # Click-Once directory 170 | publish/ 171 | 172 | # Publish Web Output 173 | *.[Pp]ublish.xml 174 | *.azurePubxml 175 | # Note: Comment the next line if you want to checkin your web deploy settings, 176 | # but database connection strings (with potential passwords) will be unencrypted 177 | *.pubxml 178 | *.publishproj 179 | 180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 181 | # checkin your Azure Web App publish settings, but sensitive information contained 182 | # in these scripts will be unencrypted 183 | PublishScripts/ 184 | 185 | # NuGet Packages 186 | *.nupkg 187 | # NuGet Symbol Packages 188 | *.snupkg 189 | # The packages folder can be ignored because of Package Restore 190 | **/[Pp]ackages/* 191 | # except build/, which is used as an MSBuild target. 192 | !**/[Pp]ackages/build/ 193 | # Uncomment if necessary however generally it will be regenerated when needed 194 | #!**/[Pp]ackages/repositories.config 195 | # NuGet v3's project.json files produces more ignorable files 196 | *.nuget.props 197 | *.nuget.targets 198 | 199 | # Microsoft Azure Build Output 200 | csx/ 201 | *.build.csdef 202 | 203 | # Microsoft Azure Emulator 204 | ecf/ 205 | rcf/ 206 | 207 | # Windows Store app package directories and files 208 | AppPackages/ 209 | BundleArtifacts/ 210 | Package.StoreAssociation.xml 211 | _pkginfo.txt 212 | *.appx 213 | *.appxbundle 214 | *.appxupload 215 | 216 | # Visual Studio cache files 217 | # files ending in .cache can be ignored 218 | *.[Cc]ache 219 | # but keep track of directories ending in .cache 220 | !?*.[Cc]ache/ 221 | 222 | # Others 223 | ClientBin/ 224 | ~$* 225 | *~ 226 | *.dbmdl 227 | *.dbproj.schemaview 228 | *.jfm 229 | *.pfx 230 | *.publishsettings 231 | orleans.codegen.cs 232 | 233 | # Including strong name files can present a security risk 234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 235 | #*.snk 236 | 237 | # Since there are multiple workflows, uncomment next line to ignore bower_components 238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 239 | #bower_components/ 240 | 241 | # RIA/Silverlight projects 242 | Generated_Code/ 243 | 244 | # Backup & report files from converting an old project file 245 | # to a newer Visual Studio version. Backup files are not needed, 246 | # because we have git ;-) 247 | _UpgradeReport_Files/ 248 | Backup*/ 249 | UpgradeLog*.XML 250 | UpgradeLog*.htm 251 | ServiceFabricBackup/ 252 | *.rptproj.bak 253 | 254 | # SQL Server files 255 | *.mdf 256 | *.ldf 257 | *.ndf 258 | 259 | # Business Intelligence projects 260 | *.rdl.data 261 | *.bim.layout 262 | *.bim_*.settings 263 | *.rptproj.rsuser 264 | *- [Bb]ackup.rdl 265 | *- [Bb]ackup ([0-9]).rdl 266 | *- [Bb]ackup ([0-9][0-9]).rdl 267 | 268 | # Microsoft Fakes 269 | FakesAssemblies/ 270 | 271 | # GhostDoc plugin setting file 272 | *.GhostDoc.xml 273 | 274 | # Node.js Tools for Visual Studio 275 | .ntvs_analysis.dat 276 | node_modules/ 277 | 278 | # Visual Studio 6 build log 279 | *.plg 280 | 281 | # Visual Studio 6 workspace options file 282 | *.opt 283 | 284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 285 | *.vbw 286 | 287 | # Visual Studio LightSwitch build output 288 | **/*.HTMLClient/GeneratedArtifacts 289 | **/*.DesktopClient/GeneratedArtifacts 290 | **/*.DesktopClient/ModelManifest.xml 291 | **/*.Server/GeneratedArtifacts 292 | **/*.Server/ModelManifest.xml 293 | _Pvt_Extensions 294 | 295 | # Paket dependency manager 296 | .paket/paket.exe 297 | paket-files/ 298 | 299 | # FAKE - F# Make 300 | .fake/ 301 | 302 | # CodeRush personal settings 303 | .cr/personal 304 | 305 | # Python Tools for Visual Studio (PTVS) 306 | __pycache__/ 307 | *.pyc 308 | 309 | # Cake - Uncomment if you are using it 310 | # tools/** 311 | # !tools/packages.config 312 | 313 | # Tabs Studio 314 | *.tss 315 | 316 | # Telerik's JustMock configuration file 317 | *.jmconfig 318 | 319 | # BizTalk build output 320 | *.btp.cs 321 | *.btm.cs 322 | *.odx.cs 323 | *.xsd.cs 324 | 325 | # OpenCover UI analysis results 326 | OpenCover/ 327 | 328 | # Azure Stream Analytics local run output 329 | ASALocalRun/ 330 | 331 | # MSBuild Binary and Structured Log 332 | *.binlog 333 | 334 | # NVidia Nsight GPU debugger configuration file 335 | *.nvuser 336 | 337 | # MFractors (Xamarin productivity tool) working folder 338 | .mfractor/ 339 | 340 | # Local History for Visual Studio 341 | .localhistory/ 342 | 343 | # BeatPulse healthcheck temp database 344 | healthchecksdb 345 | 346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 347 | MigrationBackup/ 348 | 349 | # Ionide (cross platform F# VS Code tools) working folder 350 | .ionide/ 351 | 352 | # Ignore .env 353 | .env 354 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.words": [ 3 | "hashdb", 4 | "zfsburn" 5 | ] 6 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 bakerboy448 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # StarrScripts 2 | 3 | A curated collection of scripts to optimize and manage various functions related to Starr applications and associated tools. 4 | Occasionally holds random useful scripts as well. 5 | These scripts are designed to enhance functionality, improve management, and automate routine tasks. 6 | All scripts are **Created By: [Bakerboy448](https://github.com/bakerboy448/) unless otherwise noted.** 7 | 8 | **Warning**: Do not edit files on Windows that will be executed on Linux without ensuring the line-endings are set to `LF`. 9 | This means that `CRLF` cannot be used in .sh scripts. Bash scripts will not execute properly and you will receive an 10 | error. 11 | 12 | ## Scripts Overview 13 | 14 | ### Cross-Seed Category Filter for Qbittorrent 15 | 16 | > This script has been removed and is no longer supported as part of Bakerboy448's v4.0.0 purge. 17 | > Please see the [v4.0.0 Release Notes](https://github.com/bakerboy448/StarrScripts/releases/tag/v4.0.0) 18 | 19 | - **Script:** `xseed_qbit_cat_filter.sh` 20 | - **Description:** Filters cross-seed requests by categories or trackers for execution by Qbittorrent. 21 | - **Instructions:** Open the file in a text editor and modify the variable definitions at the top of the script, then replace the "Execute on completion" command in qBittorrent with the one given in the notes at the top of the script. 22 | - **Creator:** [zakkarry](https://github.com/zakkarry) 23 | - **Usage:** Execute the script to filter categories or trackers as needed in Qbittorrent setups. 24 | 25 | ### Cross-Seed Trigger for Starr Apps 26 | 27 | > This script has been removed and is no longer supported. 28 | > Please see the [v4.0.0 Release Notes](https://github.com/bakerboy448/StarrScripts/releases/tag/v4.0.0) 29 | 30 | - **Script:** `xseed.sh` 31 | - **Description:** Triggers a cross-seed search post-import or post-upgrade in Starr applications. 32 | - **Creator:** [Bakerboy448](https://github.com/bakerboy448/) with assistance and many improvements from [zakkarry](https://github.com/zakkarry) 33 | - **Instructions** 34 | 1. If using environmental variables file, copy `.env.sample` to `.env`. 35 | 2. If not using `.env`, open the script in a text editor and modify the required values under "# Xseed" header. 36 | 3. Ensure that your download client's _NAME_ in the Download Client section of the respective \*arr match the variables in the script. 37 | 4. Docker Users: Mount `.env` and `xseed.sh` to your Starr's `/config` mount. 38 | 5. In your \*arr, navigate to `Settings` -> `Connect` and add a "Custom Script" for the "On Import Complete" for Sonarr and "On File Import" and "On File Upgrade" for Radarr. 39 | 6. Test and Save. 40 | 41 | ### Cross-Seed Updater 42 | 43 | > This script has been removed and is no longer supported. 44 | > Please see the [v4.0.0 Release Notes](https://github.com/bakerboy448/StarrScripts/releases/tag/v4.0.0) 45 | 46 | - **Script:** `xseed-update.sh` 47 | - **Description:** Updates the [Cross-Seed](https://github.com/cross-seed/cross-seed) tool to its latest version. 48 | 49 | ### Duplicate File Manager 50 | 51 | - **Script:** `dupe.sh` 52 | - **Description:** Executes `jdupes` to find and manage duplicate files in the specified directory. 53 | - **Instructions:** 54 | 1. Copy `.env.sample` to `.env`. 55 | 2. Populate required values under "# Jdupes" header. 56 | 3. Review and adjust script parameters to fit your use case. 57 | - **Output:** Results are saved to a file as specified in the script. 58 | 59 | ### Merge Folders Utility 60 | 61 | - **Script:** `merge_folders.py` 62 | - **Description:** A robust utility designed for merging multiple directories into a single target directory, ensuring that no existing files are overwritten in the process. This script uses a recursive function to efficiently merge content, while providing detailed logging of each step to monitor the creation, movement, and skipping of files and directories. 63 | - **Features:** 64 | - **Recursive Merging:** Seamlessly combines contents of source directories into a target directory. 65 | - **Non-destructive:** Preserves existing files by not overwriting them. 66 | - **Error Handling:** Captures and logs errors during the merging process, ensuring reliable operations. 67 | - **Detailed Logging:** Tracks and logs every file and directory operation, providing clear visibility into the process. 68 | - **Usage Case:** Ideal for consolidating data in scenarios like organizing media libraries, merging data backups, or simplifying file system structures. 69 | - **Instructions:** 70 | 1. Update `source_dirs` and uncomment the variable 71 | 2. Update `target_dir` and uncomment the variable 72 | 3. Uncomment `atomic_moves` to engage the movement operation 73 | 4. Run the script with `python3 merge_folders.py` 74 | 75 | ### Notifiarr Branch Builder 76 | 77 | - **Script:** `notifiarr-branch-builder.sh` 78 | - **Description:** Reinstalls [Notifiarr](https://github.com/Notifiarr/notifiarr) and allows selection of the installation branch, from apt or built from source. 79 | - **Review:** Validate script parameters to ensure compatibility with your system. 80 | 81 | ### Plex Image Cleanup Updater 82 | 83 | - **Script:** `pic-update.sh` 84 | - **Description:** Updates [Plex-Image-Cleanup](https://github.com/meisnate12/Plex-Image-Cleanup) to the latest branch. 85 | - **Review:** Check that script parameters are suitable for your environment. 86 | 87 | ### Plex Meta Manager Updater 88 | 89 | - **Script:** `pmm-update.sh` 90 | - **Description:** Updates [Plex Meta Manager](https://github.com/meisnate12/Plex-Meta-Manager) to the latest branch. 91 | - **Review:** Confirm script parameters align with your configuration. 92 | 93 | ### Qbittorrent Management Trigger 94 | 95 | - **Script:** `qbm-qbit.sh` 96 | - **Description:** Executes [QbitManage](https://github.com/StuffAnThings/qbit_manage) upon download completion in Qbittorrent. 97 | - **Instructions:** 98 | 1. Copy `.env.sample` to `.env`. 99 | 2. Populate required values under "# Qbittorrent Manage" header. 100 | 101 | ### QbitManage Updater 102 | 103 | - **Script:** `qbm-update.sh` 104 | - **Description:** Updates [QbitManage](https://github.com/StuffAnThings/qbit_manage) to the latest branch. 105 | - **Review:** Ensure script parameters match your setup before execution. 106 | 107 | ### Servarr Bot Merger 108 | 109 | - **Script:** `servarr/servarr_bot_merge.sh` 110 | - **Description:** Merges the latest changes from the Servarr Wiki Bot Branch into the Prowlarr Indexers Wiki Master. 111 | 112 | ### ZFS Snapshot Cleanup 113 | 114 | - **Script:** `zfsburn.sh` 115 | - **Description:** Deletes ZFS autosnapshots older than a specified number of days. 116 | - **Instructions:** 117 | 1. Copy `.env.sample` to `.env`. 118 | 2. Fill in the required values under "# ZFS Destroy" header. 119 | 120 | ## Contributions 121 | 122 | Contributions to improve or expand the scripts collection are welcome. Please refer to the [contribution guidelines](https://github.com/bakerboy448/StarrScripts/blob/main/CONTRIBUTING.md) for more information. 123 | 124 | ## License 125 | 126 | This project is licensed under the MIT License - see the [LICENSE](https://github.com/bakerboy448/StarrScripts/blob/main/LICENSE) file for details. 127 | -------------------------------------------------------------------------------- /backup_config.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Set variables 4 | SOURCE_DIR="/.config" 5 | TEMP_BACKUP_DIR="/tmp" 6 | REMOTE_BACKUP_DIR="/mnt/backup-server/.config" 7 | LOG_FILE="/var/log/rsync-config-backup.log" 8 | TIMESTAMP=$(date +'%Y-%m-%d_%H%M%S') 9 | ARCHIVE_NAME="config_backup_$TIMESTAMP.tar.gz" 10 | EXCLUDE_PATTERNS=( 11 | '--exclude=*.jpg' 12 | '--exclude=*.jpeg' 13 | '--exclude=*.png' 14 | '--exclude=*.gif' 15 | '--exclude=*.mp3' 16 | '--exclude=*.mp4' 17 | '--exclude=*.avi' 18 | '--exclude=*.mkv' 19 | '--exclude=*.flac' 20 | '--exclude=plexmediaserver/*' 21 | ) 22 | 23 | # Create the archive in /tmp and check if the archive was created successfully 24 | if tar -czvf "$TEMP_BACKUP_DIR/$ARCHIVE_NAME" "${EXCLUDE_PATTERNS[@]}" -C "$SOURCE_DIR" . >"$LOG_FILE" 2>&1; then 25 | echo "Archive created successfully: $TEMP_BACKUP_DIR/$ARCHIVE_NAME" >>"$LOG_FILE" 26 | 27 | # Sync the archive to the remote backup directory 28 | if rsync -av "$TEMP_BACKUP_DIR/$ARCHIVE_NAME" "$REMOTE_BACKUP_DIR" >>"$LOG_FILE" 2>&1; then 29 | echo "Backup synced successfully: $REMOTE_BACKUP_DIR/$ARCHIVE_NAME" >>"$LOG_FILE" 30 | # Optionally, remove the local archive after successful sync 31 | rm "$TEMP_BACKUP_DIR/$ARCHIVE_NAME" 32 | else 33 | echo "Failed to sync the backup to the remote server" >>"$LOG_FILE" 34 | fi 35 | else 36 | echo "Failed to create the archive" >>"$LOG_FILE" 37 | fi 38 | -------------------------------------------------------------------------------- /dupe.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Load environment variables from .env file if it exists 4 | # in the same directory as this bash script 5 | 6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 7 | ENV_PATH="$SCRIPT_DIR/.env" 8 | if [ -f "$ENV_PATH" ]; then 9 | # shellcheck source=.env 10 | echo "Loading environment variables from $ENV_PATH file" 11 | # shellcheck disable=SC1090 # shellcheck sucks 12 | if source "$ENV_PATH"; then 13 | echo "Environment variables loaded successfully" 14 | else 15 | echo "Error loading environment variables" >&2 16 | exit 1 17 | fi 18 | else 19 | echo ".env file not found in script directory ($ENV_PATH)" 20 | fi 21 | 22 | # Default Variables 23 | JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-"/mnt/data/jdupes.log"} 24 | JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-"/mnt/data/media/"} 25 | JDUPES_DESTINATION_DIR=${JDUPES_DESTINATION_DIR:-"/mnt/data/torrents/"} 26 | JDUPES_HASH_DB=${JDUPES_HASH_DB:-"/.config/jdupes_hashdb"} 27 | JDUPES_COMMAND=${JDUPES_COMMAND:-"/usr/bin/jdupes"} 28 | JDUPES_EXCLUDE_DIRS=${JDUPES_EXCLUDE_DIRS:-"-X nostr:.RecycleBin -X nostr:.trash"} 29 | JDUPES_INCLUDE_EXT=${JDUPES_INCLUDE_EXT:-"mp4,mkv,avi"} 30 | DEBUG=${DEBUG:-"false"} 31 | 32 | find_duplicates() { 33 | local log_file="$JDUPES_OUTPUT_LOG" 34 | local start_time=$(date +%s) 35 | echo "[$(date +"%Y-%m-%d %H:%M:%S")] Duplicate search started" | tee -a "$log_file" 36 | 37 | if [ "$DEBUG" == "true" ]; then 38 | echo "Running jdupes with:" | tee -a "$log_file" 39 | echo "$JDUPES_COMMAND $JDUPES_EXCLUDE_DIRS -X onlyext:$JDUPES_INCLUDE_EXT -r -M -y $JDUPES_HASH_DB $JDUPES_SOURCE_DIR $JDUPES_DESTINATION_DIR" | tee -a "$log_file" 40 | fi 41 | 42 | local results 43 | results=$("$JDUPES_COMMAND" $JDUPES_EXCLUDE_DIRS -X onlyext:"$JDUPES_INCLUDE_EXT" -r -M -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR") 44 | 45 | if [[ $results != *"No duplicates found."* ]]; then 46 | "$JDUPES_COMMAND" $JDUPES_EXCLUDE_DIRS -X onlyext:"$JDUPES_INCLUDE_EXT" -r -L -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$log_file" 47 | fi 48 | 49 | if [ "$DEBUG" == "true" ]; then 50 | echo -e "jdupes output: ${results}" | tee -a "$log_file" 51 | fi 52 | 53 | parse_jdupes_output "$results" "$log_file" 54 | local finish_time=$(date +%s) 55 | local run_time=$((finish_time - start_time)) 56 | echo "[$(date +"%Y-%m-%d %H:%M:%S")] Duplicate search completed in ${run_time}s" | tee -a "$log_file" 57 | } 58 | 59 | parse_jdupes_output() { 60 | local results="$1" 61 | local log_file="$2" 62 | 63 | if [[ $results != *"No duplicates found."* ]]; then 64 | field_message="❌ Unlinked files discovered..." 65 | parsed_log=$(echo "$results" | awk -F/ '{print $NF}' | sort -u) 66 | else 67 | field_message="✅ No unlinked files discovered..." 68 | parsed_log="No hardlinks created" 69 | fi 70 | 71 | if [ "$DEBUG" == "true" ]; then 72 | echo -e "$field_message" | tee -a "$log_file" 73 | echo -e "Parsed log: ${parsed_log}" | tee -a "$log_file" 74 | fi 75 | } 76 | 77 | find_duplicates 78 | -------------------------------------------------------------------------------- /f2b-dump.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Temporary file to hold the Fail2Ban configuration details 4 | temp_file=$(mktemp) 5 | 6 | # Function to add content to the temporary file 7 | add_content() { 8 | # shellcheck disable=SC2129 9 | echo -e "\n$1\n" >>"$temp_file" 10 | cat "$2" >>"$temp_file" 2>/dev/null 11 | echo -e "\n" >>"$temp_file" 12 | } 13 | 14 | # List all active jails 15 | echo "Listing all active jails:" >>"$temp_file" 16 | fail2ban-client status >>"$temp_file" 17 | 18 | # Get status for each jail 19 | jails=$(fail2ban-client status | grep 'Jail list:' | sed -E 's/^[^:]+:\s+//;s/,//g') 20 | for jail in $jails; do 21 | echo -e "\nStatus of $jail jail:\n" >>"$temp_file" 22 | fail2ban-client status "$jail" >>"$temp_file" 23 | done 24 | 25 | # Global configurations 26 | add_content "Global Fail2Ban Configuration (/etc/fail2ban/jail.conf):" "/etc/fail2ban/jail.conf" 27 | add_content "Custom Global Configuration (/etc/fail2ban/jail.local):" "/etc/fail2ban/jail.local" 28 | 29 | # Custom jail configurations in jail.d/ 30 | echo "Custom Jail Configurations in /etc/fail2ban/jail.d/:" >>"$temp_file" 31 | for config_file in /etc/fail2ban/jail.d/*; do 32 | [ -e "$config_file" ] || continue 33 | add_content "Custom Jail Configuration ($config_file):" "$config_file" 34 | done 35 | 36 | # Upload to termbin 37 | echo "Uploading to Termbin..." >>"$temp_file" 38 | nc termbin.com 9999 <"$temp_file" 39 | 40 | # Cleanup 41 | rm "$temp_file" 42 | -------------------------------------------------------------------------------- /merge_folders.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import logging 4 | 5 | # Setup logging 6 | logging.basicConfig( 7 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" 8 | ) 9 | 10 | 11 | def merge_directories(src, dst): 12 | """ 13 | Recursively merges directories from src to dst without overwriting existing files. 14 | """ 15 | for item in os.listdir(src): 16 | src_path = os.path.join(src, item) 17 | dst_path = os.path.join(dst, item) 18 | 19 | if os.path.isdir(src_path): 20 | # If it's a directory, recurse into it 21 | if not os.path.exists(dst_path): 22 | os.makedirs(dst_path) 23 | logging.info(f"Directory created: {dst_path}") 24 | merge_directories(src_path, dst_path) 25 | else: 26 | # It's a file, check if it exists in the destination 27 | if not os.path.exists(dst_path): 28 | # Move the file atomically if on the same filesystem 29 | shutil.move(src_path, dst_path) 30 | logging.info(f"File moved: {src_path} to {dst_path}") 31 | else: 32 | logging.info(f"File skipped (already exists): {dst_path}") 33 | 34 | 35 | def atomic_moves(source_directories, target_directory): 36 | """ 37 | Handles atomic moving from multiple source directories to a single target directory. 38 | Ensures that source_directories is a list. 39 | """ 40 | if not isinstance(source_directories, list): 41 | raise TypeError("source_directories must be a list of strings.") 42 | for src in source_directories: 43 | logging.info(f"Processing source directory: {src}") 44 | try: 45 | # Start the merging process for each source directory 46 | merge_directories(src, target_directory) 47 | except Exception as e: 48 | logging.error(f"Error during moving process from {src}: {e}") 49 | 50 | 51 | # Example use case (commented out for safety - remove "# " to uncomment): 52 | # source_dirs = ['/mnt/data/media/tv-slade', '/mnt/data/media/tv-tmp'] 53 | # target_dir = '/mnt/data/media/tv' 54 | # atomic_moves(source_dirs, target_dir) 55 | -------------------------------------------------------------------------------- /notifiarr-branch-builder.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Extend the PATH to include the go binary directory 4 | export PATH=$PATH:/usr/local/go/bin 5 | 6 | # Function to display error messages and exit with status 1 7 | handle_error() { 8 | echo "Error: $1" >&2 9 | exit 1 10 | } 11 | 12 | # Function to display usage information 13 | display_help() { 14 | echo "Usage: $0 [options]" 15 | echo "Options:" 16 | echo " -h, --help Display this help message" 17 | echo " --repo-url URL Set the repository URL (default: https://github.com/Notifiarr/notifiarr.git)" 18 | echo " --repo-dir DIR Set the repository directory (default: /opt/notifiarr-repo)" 19 | echo " --bin-path PATH Set the binary path (default: /usr/bin/notifiarr)" 20 | echo " --branch BRANCH Set the branch (default: master)" 21 | echo " --reinstall-apt Reinstall Notifiarr using apt without prompting." 22 | exit 0 23 | } 24 | 25 | # Function to check and prompt for installation of a required tool 26 | ensure_tool_installed() { 27 | local tool=$1 28 | local install_cmd=$2 29 | if ! command -v "$tool" &>/dev/null; then 30 | read -r -p "$tool is not installed. Do you want to install it? [Y/n] " response 31 | if [[ "$response" =~ ^[Yy] ]]; then 32 | eval "$install_cmd" || handle_error "Failed to install $tool." 33 | else 34 | echo "$tool is required for this script. Exiting." 35 | exit 1 36 | fi 37 | fi 38 | } 39 | 40 | # Default parameters 41 | repo_url="https://github.com/Notifiarr/notifiarr.git" 42 | repo_dir="/opt/notifiarr-repo" 43 | bin_path="/usr/bin/notifiarr" 44 | branch="master" 45 | apt_reinstall=false 46 | 47 | # Parse command line options 48 | while [[ $# -gt 0 ]]; do 49 | case "$1" in 50 | -h | --help) 51 | display_help 52 | ;; 53 | --repo-url) 54 | repo_url="$2" 55 | shift 56 | ;; 57 | --repo-dir) 58 | repo_dir="$2" 59 | shift 60 | ;; 61 | --bin-path) 62 | bin_path="$2" 63 | shift 64 | ;; 65 | --branch) 66 | branch="$2" 67 | shift 68 | ;; 69 | --reinstall-apt) 70 | apt_reinstall=true 71 | ;; 72 | *) 73 | echo "Invalid option: $1. Use -h for help." 74 | exit 1 75 | ;; 76 | esac 77 | shift 78 | done 79 | 80 | # Ensure required tools are installed 81 | ensure_tool_installed "make" "sudo apt update && sudo apt install -y make" 82 | 83 | # Reinstallation condition handling 84 | reinstall_notifiarr() { 85 | # shellcheck disable=SC2015 86 | sudo apt update && sudo apt install --reinstall notifiarr || handle_error "Failed to reinstall Notifiarr using apt." 87 | } 88 | 89 | [[ $apt_reinstall == true ]] && reinstall_notifiarr 90 | 91 | # Repository management 92 | if [[ ! -d "$repo_dir" ]]; then 93 | git clone "$repo_url" "$repo_dir" || handle_error "Failed to clone repository." 94 | else 95 | git -C "$repo_dir" fetch --all --prune || handle_error "Failed to fetch updates from remote." 96 | fi 97 | 98 | # Branch handling and updating 99 | current_branch=$(git -C "$repo_dir" rev-parse --abbrev-ref HEAD) 100 | read -r -p "Do you want to use the current branch ($current_branch)? [Y/n] " choice 101 | if [[ "$choice" =~ ^[Nn] ]]; then 102 | branches=$(git -C "$repo_dir" branch -r | sed 's/origin\///;s/* //') 103 | echo "Available branches:" 104 | echo "$branches" 105 | while true; do 106 | read -r -p "Enter the branch name you want to use: " branch 107 | if [[ $branches =~ $branch ]]; then 108 | git -C "$repo_dir" checkout "$branch" || handle_error "Failed to checkout branch $branch." 109 | break 110 | else 111 | echo "Invalid choice. Please select a valid branch." 112 | fi 113 | done 114 | fi 115 | 116 | git -C "$repo_dir" pull || handle_error "Failed to pull latest changes." 117 | make --directory="$repo_dir" || handle_error "Failed to compile." 118 | 119 | # Service management 120 | echo "Stopping notifiarr..." 121 | sudo systemctl stop notifiarr 122 | 123 | if [[ -f "$bin_path" ]]; then 124 | sudo mv "$bin_path" "$repo_dir".old && echo "Old binary moved to $repo_dir.old" 125 | fi 126 | 127 | sudo mv "$repo_dir/notifiarr" "$bin_path" && echo "New binary moved to $bin_path" 128 | sudo chown root:root "$bin_path" 129 | 130 | echo "Starting Notifiarr..." 131 | sudo systemctl start notifiarr 132 | 133 | if sudo systemctl is-active –quiet notifiarr; then 134 | echo "Notifiarr service started and is currently running" 135 | else 136 | handle_error "Failed to start Notifiarr service" 137 | fi 138 | 139 | exit 0 140 | -------------------------------------------------------------------------------- /omegabrr_upgrade.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Define service name as a variable 4 | service_name="omegabrr@bakerboy448" 5 | 6 | # Function to handle errors and exit 7 | handle_error() { 8 | echo "Error: $1" >&2 9 | exit 1 10 | } 11 | 12 | # Get the old version of omegabrr 13 | old_version=$(omegabrr version) 14 | 15 | # Fetch the URL of the latest release for linux_x86_64 16 | dlurl=$(curl -s https://api.github.com/repos/autobrr/omegabrr/releases/latest | 17 | grep -E 'browser_download_url.*linux_x86_64' | cut -d\" -f4) 18 | 19 | # Validate the download URL 20 | if [ -z "$dlurl" ]; then 21 | handle_error "Failed to fetch download URL." 22 | fi 23 | 24 | # Download the latest release 25 | wget "$dlurl" -O omegabrr_latest.tar.gz || handle_error "Failed to download the latest version." 26 | 27 | # Extract the downloaded archive 28 | sudo tar -xzf omegabrr_latest.tar.gz -C /usr/bin/ || handle_error "Failed to extract files." 29 | 30 | # Clean up downloaded files 31 | rm omegabrr_latest.tar.gz 32 | 33 | # Display old and new versions 34 | new_version=$(omegabrr version) 35 | echo "Omegabrr updated from $old_version to $new_version" 36 | 37 | # Restart the specified service 38 | sudo systemctl restart $service_name || handle_error "Failed to restart the service $service_name." 39 | 40 | echo "Update and restart successful!" 41 | -------------------------------------------------------------------------------- /pic-update.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -o pipefail 4 | 5 | force_update=${1:-false} 6 | 7 | # Constants 8 | PIC_PATH="/opt/Plex-Image-Cleanup" 9 | PIC_VENV_PATH="/opt/.venv/pmm-image" 10 | PIC_SERVICE_NAME="pmm-image" 11 | PIC_UPSTREAM_GIT_REMOTE="origin" 12 | PIC_VERSION_FILE="$PIC_PATH/VERSION" 13 | PIC_REQUIREMENTS_FILE="$PIC_PATH/requirements.txt" 14 | CURRENT_UID=$(id -u) 15 | 16 | # Check if Plex-Image-Cleanup is installed and the current user owns it 17 | check_pic_installation() { 18 | if [ -d "$PIC_PATH" ]; then 19 | local pic_repo_owner 20 | pic_repo_owner=$(stat -c '%u' "$PIC_PATH") 21 | if [ "$pic_repo_owner" != "$CURRENT_UID" ]; then 22 | echo "You do not own the Plex-Image-Cleanup repo. Please run this script as the user that owns the repo [$pic_repo_owner]." 23 | exit 1 24 | fi 25 | else 26 | echo "Plex-Image-Cleanup folder does not exist. Please install Plex-Image-Cleanup before running this script." 27 | exit 1 28 | fi 29 | } 30 | 31 | # Update Plex-Image-Cleanup if necessary 32 | update_pic() { 33 | current_branch=$(git -C "$PIC_PATH" rev-parse --abbrev-ref HEAD) 34 | echo "Current Branch: $current_branch. Checking for updates..." 35 | git -C "$PIC_PATH" fetch 36 | if [ "$(git -C "$PIC_PATH" rev-parse HEAD)" = "$(git -C "$PIC_PATH" rev-parse @'{u}')" ] && [ "$force_update" != true ]; then 37 | current_version=$(cat "$PIC_VERSION_FILE") 38 | echo "=== Already up to date $current_version on $current_branch ===" 39 | exit 0 40 | fi 41 | git -C "$PIC_PATH" reset --hard "$PIC_UPSTREAM_GIT_REMOTE/$current_branch" 42 | } 43 | 44 | # Update venv if necessary 45 | update_venv() { 46 | current_requirements=$(sha1sum "$PIC_REQUIREMENTS_FILE" | awk '{print $1}') 47 | new_requirements=$(sha1sum "$PIC_REQUIREMENTS_FILE" | awk '{print $1}') 48 | if [ "$current_requirements" != "$new_requirements" ] || [ "$force_update" = true ]; then 49 | echo "=== Requirements changed, updating venv ===" 50 | "$PIC_VENV_PATH/bin/python3" "$PIC_VENV_PATH/bin/pip" install -r "$PIC_REQUIREMENTS_FILE" 51 | fi 52 | } 53 | 54 | # Restart the Plex-Image-Cleanup service 55 | restart_service() { 56 | echo "=== Restarting Plex-Image-Cleanup Service ===" 57 | sudo systemctl restart "$PIC_SERVICE_NAME" 58 | new_version=$(cat "$PIC_VERSION_FILE") 59 | echo "=== Updated to $new_version on $current_branch ===" 60 | } 61 | 62 | # Main script execution 63 | check_pic_installation 64 | update_pic 65 | update_venv 66 | restart_service 67 | -------------------------------------------------------------------------------- /pmm-update.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -o pipefail 4 | 5 | force_update=${1:-false} 6 | 7 | # Constants 8 | PMM_PATH="/opt/Plex-Meta-Manager" 9 | PMM_VENV_NAME="pmm-venv" 10 | PMM_SERVICE_NAME="pmm" 11 | PMM_UPSTREAM_GIT_REMOTE="origin" 12 | PMM_VERSION_FILE="$PMM_PATH/VERSION" 13 | PMM_REQUIREMENTS_FILE="$PMM_PATH/requirements.txt" 14 | PMM_VENV_PATH="/opt/.venv/$PMM_VENV_NAME" 15 | CURRENT_UID=$(id -u) 16 | 17 | # Check if PMM is installed and the current user owns it 18 | check_pmm_installation() { 19 | if [ -d "$PMM_PATH" ]; then 20 | pmm_repo_owner=$(stat -c '%u' "$PMM_PATH") 21 | if [ "$pmm_repo_owner" != "$CURRENT_UID" ]; then 22 | echo "You do not own the Plex Meta Manager repo. Please run this script as the user that owns the repo [$pmm_repo_owner]." 23 | exit 1 24 | fi 25 | else 26 | echo "Plex Meta Manager folder does not exist. Please install Plex Meta Manager before running this script." 27 | exit 1 28 | fi 29 | } 30 | 31 | # Update PMM if necessary 32 | update_pmm() { 33 | current_branch=$(git -C "$PMM_PATH" rev-parse --abbrev-ref HEAD) 34 | echo "Current Branch: $current_branch. Checking for updates..." 35 | git -C "$PMM_PATH" fetch 36 | if [ "$(git -C "$PMM_PATH" rev-parse HEAD)" = "$(git -C "$PMM_PATH" rev-parse @'{u}')" ] && [ "$force_update" != true ]; then 37 | current_version=$(cat "$PMM_VERSION_FILE") 38 | echo "=== Already up to date $current_version on $current_branch ===" 39 | exit 0 40 | fi 41 | git -C "$PMM_PATH" reset --hard "$PMM_UPSTREAM_GIT_REMOTE/$current_branch" 42 | } 43 | 44 | # Update venv if necessary 45 | update_venv() { 46 | current_requirements=$(sha1sum "$PMM_REQUIREMENTS_FILE" | awk '{print $1}') 47 | new_requirements=$(sha1sum "$PMM_REQUIREMENTS_FILE" | awk '{print $1}') 48 | if [ "$current_requirements" != "$new_requirements" ] || [ "$force_update" = true ]; then 49 | echo "=== Requirements changed, updating venv ===" 50 | "$PMM_VENV_PATH/bin/python3" "$PMM_VENV_PATH/bin/pip" install -r "$PMM_REQUIREMENTS_FILE" 51 | fi 52 | } 53 | 54 | # Restart the PMM service 55 | restart_service() { 56 | echo "=== Restarting PMM Service ===" 57 | sudo systemctl restart "$PMM_SERVICE_NAME" 58 | new_version=$(cat "$PMM_VERSION_FILE") 59 | echo "=== Updated to $new_version on $current_branch" 60 | } 61 | 62 | # Main script execution 63 | check_pmm_installation 64 | update_pmm 65 | update_venv 66 | restart_service 67 | -------------------------------------------------------------------------------- /qbm-qbit.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Check if lockfile command exists 4 | if ! command -v lockfile &>/dev/null; then 5 | echo "Error: lockfile command not found. Please install the procmail package." >&2 6 | exit 1 7 | fi 8 | 9 | # Load environment variables from .env file if it exists 10 | # in the same directory as this bash script 11 | 12 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 13 | ENV_PATH="$SCRIPT_DIR/.env" 14 | if [ -f "$ENV_PATH" ]; then 15 | # shellcheck source=.env 16 | echo "Loading environment variables from $ENV_PATH file" 17 | # shellcheck disable=SC1090 # shellcheck sucks 18 | if source "$ENV_PATH"; then 19 | echo "Environment variables loaded successfully" 20 | else 21 | echo "Error loading environment variables" >&2 22 | exit 1 23 | fi 24 | else 25 | echo ".env file not found in script directory ($ENV_PATH)" 26 | fi 27 | 28 | # Use environment variables with descriptive default values 29 | QBQBM_LOCK=${QBIT_MANAGE_LOCK_FILE_PATH:-/var/lock/qbm-qbit.lock} 30 | QBQBM_PATH_QBM=${QBIT_MANAGE_PATH:-/opt/qbit-manage} 31 | QBQBM_VENV_PATH=${QBIT_MANAGE_VENV_PATH:-/opt/qbit-manage/.venv} 32 | QBQBM_CONFIG_PATH=${QBIT_MANAGE_CONFIG_PATH:-/opt/qbit-manage/config.yml} 33 | QBQBM_QBIT_OPTIONS=${QBIT_MANAGE_OPTIONS:-"-re -cu -tu -ru -sl -r"} 34 | QBQBM_SLEEP_TIME=600 35 | QBQBM_LOCK_TIME=3600 36 | 37 | # Function to remove the lock file 38 | remove_lock() { 39 | rm -f "$LOCK" 40 | } 41 | 42 | # Function to handle detection of another running instance 43 | another_instance() { 44 | echo "There is another instance running, exiting." 45 | exit 1 46 | } 47 | 48 | echo "Acquiring Lock" 49 | # Acquire a lock to prevent concurrent execution, with a timeout and lease time 50 | lockfile -r 0 -l "$QBQBM_LOCK_TIME" "$QBQBM_LOCK" || another_instance 51 | 52 | # Ensure the lock is removed when the script exits 53 | trap remove_lock EXIT 54 | 55 | echo "sleeping for $QBQBM_SLEEP_TIME" 56 | # Pause the script to wait for any pending operations (i.e. Starr Imports) 57 | 58 | sleep $QBQBM_SLEEP_TIME 59 | 60 | # Execute qbit_manage with configurable options 61 | echo "Executing Command" 62 | "$QBQBM_VENV_PATH"/bin/python "$QBQBM_PATH_QBM"/qbit_manage.py "$QBQBM_QBIT_OPTIONS" --config-file "$QBQBM_CONFIG_PATH" 63 | -------------------------------------------------------------------------------- /qbm-update.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -o pipefail 4 | 5 | force_update=${1:-false} 6 | 7 | # Constants 8 | QBM_PATH="/opt/qbit_manage" 9 | QBM_VENV_PATH="/opt/.venv/qbm-venv" 10 | QBM_SERVICE_NAME="qbmanage" 11 | QBM_UPSTREAM_GIT_REMOTE="origin" 12 | QBM_VERSION_FILE="$QBM_PATH/VERSION" 13 | QBM_REQUIREMENTS_FILE="$QBM_PATH/pyproject.toml" 14 | CURRENT_UID=$(id -un) 15 | 16 | # Check if QBM is installed and if the current user owns it 17 | check_qbm_installation() { 18 | if [ -d "$QBM_PATH" ]; then 19 | qbm_repo_owner=$(stat --format='%U' "$QBM_PATH") 20 | qbm_repo_group=$(stat --format='%G' "$QBM_PATH") 21 | if [ "$qbm_repo_owner" != "$CURRENT_UID" ]; then 22 | echo "You do not own the QbitManage repo. Please run this script as the user that owns the repo [$qbm_repo_owner]." 23 | echo "use 'sudo -u $qbm_repo_owner -g $qbm_repo_group qbm-update'" 24 | exit 1 25 | fi 26 | else 27 | echo "QbitManage folder does not exist. Please install QbitManage before running this script." 28 | exit 1 29 | fi 30 | } 31 | 32 | # Update QBM if necessary 33 | update_qbm() { 34 | current_branch=$(git -C "$QBM_PATH" rev-parse --abbrev-ref HEAD) 35 | echo "Current Branch: $current_branch. Checking for updates..." 36 | git -C "$QBM_PATH" fetch 37 | if [ "$(git -C "$QBM_PATH" rev-parse HEAD)" = "$(git -C "$QBM_PATH" rev-parse @'{u}')" ] && [ "$force_update" != true ]; then 38 | current_version=$(cat "$QBM_VERSION_FILE") 39 | echo "=== Already up to date $current_version on $current_branch ===" 40 | exit 0 41 | fi 42 | current_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}') 43 | git -C "$QBM_PATH" reset --hard "$QBM_UPSTREAM_GIT_REMOTE/$current_branch" 44 | } 45 | 46 | # Update virtual environment if requirements have changed 47 | update_venv() { 48 | new_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}') 49 | if [ "$current_requirements" != "$new_requirements" ] || [ "$force_update" = true ]; then 50 | echo "=== Requirements changed, updating venv ===" 51 | "$QBM_VENV_PATH/bin/python" -m pip install --upgrade "$QBM_PATH" 52 | fi 53 | } 54 | 55 | # Restart the QBM service 56 | restart_service() { 57 | echo "=== Restarting QBM Service ===" 58 | sudo systemctl restart "$QBM_SERVICE_NAME" 59 | new_version=$(cat "$QBM_VERSION_FILE") 60 | echo "=== Updated to $new_version on $current_branch" 61 | } 62 | 63 | # Main script execution 64 | check_qbm_installation 65 | update_qbm 66 | update_venv 67 | restart_service 68 | -------------------------------------------------------------------------------- /radarr_dupefinder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | directory=${1:-.} # Use provided directory or default to current directory 4 | 5 | find "$directory" -type d | while read -r dir; do 6 | file_count=$(find "$dir" -maxdepth 1 -type f \( -iname "*.mp4" -o -iname "*.mkv" -o -iname "*.avi" -o -iname "*.mov" -o -iname "*.wmv" -o -iname "*.flv" -o -iname "*.webm" -o -iname "*.mpg" -o -iname "*.mpeg" \) | wc -l) 7 | if [[ $file_count -gt 1 ]]; then 8 | echo "$dir" 9 | fi 10 | done 11 | -------------------------------------------------------------------------------- /servarr/servarr_bot_merge.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Define variables 4 | REPO_URL="git@github.com:Servarr/Wiki.git" # URL for the repository 5 | TARGET_BRANCH="master" 6 | COMMIT_BRANCH="update-wiki-supported-indexers" 7 | REPO_DIR="/mnt/raid/_development/servarr.wiki" 8 | 9 | # Function to log messages 10 | log() { 11 | echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" 12 | } 13 | 14 | # Check and configure git remote 15 | configure_remote() { 16 | # Check if the remote is set and set it if not 17 | if git remote | grep -q "origin"; then 18 | git remote set-url origin $REPO_URL 19 | else 20 | git remote add origin $REPO_URL 21 | fi 22 | } 23 | 24 | # Navigate to the repository's directory 25 | cd $REPO_DIR || { 26 | log "Failed to change directory to $REPO_DIR. Exiting." 27 | exit 1 28 | } 29 | 30 | # Configure git remote 31 | configure_remote 32 | 33 | # Fetch the latest updates from the repository 34 | log "fetching and purning origin" 35 | git fetch --all --prune 36 | 37 | log "checking out and pulling $COMMIT_BRANCH. Also pulling origin/$TARGET_BRANCH" 38 | git checkout -B $TARGET_BRANCH 39 | git checkout -B $COMMIT_BRANCH 40 | 41 | git_branch=$(git branch --show-current) 42 | log "git branch is $git_branch" 43 | # Rebase the commit onto the target branch 44 | log "Rebasing....on origin/$TARGET_BRANCH" 45 | if git rebase origin/$TARGET_BRANCH; then 46 | log "Rebase successful." 47 | 48 | # Switch back to the target branch 49 | git checkout $TARGET_BRANCH 50 | 51 | # Merge the commit branch into the target branch to bring the rebased commit into target 52 | # This is assuming the rebase has made commit branch ahead of target and can be fast-forwarded 53 | log "Merging into $COMMIT_BRANCH with --ff-only" 54 | LOCAL_HASH=$(git rev-parse "$COMMIT_BRANCH") 55 | REMOTE_HASH=$(git rev-parse "origin/$COMMIT_BRANCH") 56 | 57 | if [ "$LOCAL_HASH" != "$REMOTE_HASH" ]; then 58 | git merge --ff-only $COMMIT_BRANCH 59 | else 60 | echo "Local branch $COMMIT_BRANCH is the same as origin/$COMMIT_BRANCH. No action needed." 61 | fi 62 | # Now push the updated TARGET_BRANCH to the remote 63 | if [ "$LOCAL_HASH" != "$REMOTE_HASH" ] && git push origin $TARGET_BRANCH; then 64 | log "Rebase, merge, and push to $TARGET_BRANCH completed successfully." 65 | # Check if the branch exists on the remote 66 | if git ls-remote --heads origin | grep -q "refs/heads/$COMMIT_BRANCH"; then 67 | echo "Branch $COMMIT_BRANCH exists on origin. Deleting..." 68 | git push origin --delete "$COMMIT_BRANCH" 69 | echo "Branch $COMMIT_BRANCH deleted from origin." 70 | else 71 | echo "Branch $COMMIT_BRANCH does not exist on origin." 72 | fi 73 | git branch -d $COMMIT_BRANCH 74 | log "Deleted Local Branch $COMMIT_BRANCH" 75 | else 76 | log "Updates are on the target branch, no pull request needed." 77 | fi 78 | else 79 | log "Rebase encountered conflicts. Resolve them manually and then continue the rebase process." 80 | fi 81 | -------------------------------------------------------------------------------- /sonarr_dupefinder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | directory=${1:-.} # Use provided directory or default to current directory 4 | 5 | find "$directory" -type d | while read -r dir; do 6 | # Extract all matching filenames in the directory 7 | files=($(find "$dir" -maxdepth 1 -type f -regextype posix-extended \ 8 | \( -iname "*.mp4" -o -iname "*.mkv" -o -iname "*.avi" -o -iname "*.mov" -o -iname "*.wmv" -o -iname "*.flv" -o -iname "*.webm" -o -iname "*.mpg" -o -iname "*.mpeg" \) \ 9 | -regex ".*\([0-9]{4}\).*S[0-9]{2}E([0-9]{2}).*" | sed -E 's/.*E([0-9]{2}).*/\1/')) 10 | 11 | # Count occurrences of each episode number 12 | declare -A ep_count 13 | for ep in "${files[@]}"; do 14 | ((ep_count[$ep]++)) 15 | done 16 | 17 | # Check if any episode appears more than once 18 | matched=0 19 | for count in "${ep_count[@]}"; do 20 | if [[ $count -gt 1 ]]; then 21 | matched=1 22 | break 23 | fi 24 | done 25 | 26 | # Print the directory if it has matching files 27 | if [[ $matched -eq 1 ]]; then 28 | echo "$dir" 29 | fi 30 | 31 | # Clear the associative array for the next directory 32 | unset ep_count 33 | done 34 | -------------------------------------------------------------------------------- /zfsburn.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Load environment variables from .env file if it exists 4 | # in the same directory as this bash script 5 | 6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 7 | ENV_PATH="$SCRIPT_DIR/.env" 8 | if [ -f "$ENV_PATH" ]; then 9 | # shellcheck source=.env 10 | echo "Loading environment variables from $ENV_PATH file" 11 | # shellcheck disable=SC1090 # shellcheck sucks 12 | if source "$ENV_PATH"; then 13 | echo "Environment variables loaded successfully" 14 | else 15 | echo "Error loading environment variables" >&2 16 | exit 1 17 | fi 18 | else 19 | echo ".env file not found in script directory ($ENV_PATH)" 20 | fi 21 | 22 | VERBOSE=${VERBOSE:-1} 23 | MAX_FREQ=${MAX_FREQ:-4} 24 | MAX_HOURLY=${MAX_HOURLY:-2} 25 | MAX_DAILY=${MAX_DAILY:-7} 26 | MAX_WEEKLY=${MAX_WEEKLY:-4} 27 | MAX_MONTHLY=${MAX_MONTHLY:-3} 28 | 29 | # Logging function based on verbosity level 30 | log() { 31 | local level="$1" 32 | local message="$2" 33 | if ((level == 0)) || ((VERBOSE == 1 && level == 1)); then 34 | echo "$message" 35 | fi 36 | } 37 | 38 | # Bytes to Human Formatting 39 | bytes_to_human_readable() { 40 | local bytes=$1 41 | local units=('B' 'KB' 'MB' 'GB' 'TB' 'PB' 'EB' 'ZB' 'YB') 42 | local unit=0 43 | 44 | while ((bytes > 1024)); do 45 | ((bytes /= 1024)) 46 | ((unit++)) 47 | done 48 | 49 | echo "${bytes} ${units[unit]}" 50 | } 51 | 52 | # Function to retrieve snapshot counts for a specific snapshot type 53 | get_snapshot_count() { 54 | local snapshot_type="$1" 55 | local dataset="$2" 56 | local snapshot_count=0 57 | 58 | # Filter snapshots based on the snapshot type and count them 59 | snapshot_count=$(sudo zfs list -t snapshot -o name -r "$dataset" | grep -cE "$dataset@.*$snapshot_type-[0-9]{4}-[0-9]{2}-[0-9]{2}-[0-9]{4}$") 60 | # Return the snapshot count as a variable 61 | echo "$snapshot_count" 62 | } 63 | 64 | # Function to delete snapshots based on frequency limits 65 | delete_snapshots() { 66 | local dataset="$1" 67 | local snapshots=() 68 | local deleted=0 69 | local space_gained=0 70 | 71 | # Retrieve all snapshots for the dataset 72 | readarray -t snapshots < <(sudo zfs list -t snapshot -H -o name -r "$dataset") 73 | 74 | # Info log prior to filtering 75 | log 0 "Total snapshots before filtering: [${#snapshots[@]}]" 76 | 77 | # Loop through snapshots and delete based on frequency limits 78 | for snapshot in "${snapshots[@]}"; do 79 | log 1 "Filtering snapshot: [$snapshot]" 80 | 81 | local snapshot_name=${snapshot##*/} 82 | local snapshot_type=${snapshot_name#*_} 83 | snapshot_type=${snapshot_type%%-*} 84 | 85 | if [[ "$snapshot_type" == "frequent" || "$snapshot_type" == "hourly" || "$snapshot_type" == "daily" || "$snapshot_type" == "weekly" || "$snapshot_type" == "monthly" ]]; then 86 | log 0 "Processing snapshot: [$snapshot]" 87 | 88 | local max_count=0 89 | local current_count=0 90 | 91 | if [[ "$snapshot_type" == "frequent" ]]; then 92 | max_count=$MAX_FREQ 93 | current_count=$frequent_count 94 | elif [[ "$snapshot_type" == "hourly" ]]; then 95 | max_count=$MAX_HOURLY 96 | current_count=$hourly_count 97 | elif [[ "$snapshot_type" == "daily" ]]; then 98 | max_count=$MAX_DAILY 99 | current_count=$daily_count 100 | elif [[ "$snapshot_type" == "weekly" ]]; then 101 | max_count=$MAX_WEEKLY 102 | current_count=$weekly_count 103 | elif [[ "$snapshot_type" == "monthly" ]]; then 104 | max_count=$MAX_MONTHLY 105 | current_count=$monthly_count 106 | fi 107 | 108 | log 1 "Current snapshot count: [$current_count]" 109 | log 1 "Maximum allowed: [$max_count]" 110 | 111 | if ((current_count > max_count || max_count == 0)); then 112 | log 0 "Deleting snapshot: [$snapshot]" 113 | 114 | local snapshot_space 115 | snapshot_space=$(sudo zfs list -o used -H -p "$snapshot" | awk '{print $1}') 116 | 117 | if sudo zfs destroy "$snapshot"; then 118 | ((deleted++)) 119 | ((space_gained += snapshot_space)) 120 | snapshot_space_formatted=$(bytes_to_human_readable "$snapshot_space") 121 | log 0 "Space gained: $snapshot_space_formatted" 122 | else 123 | log 0 "Error deleting snapshot: [$snapshot]" 124 | fi 125 | fi 126 | else 127 | log 1 "Skipped processing snapshot: [$snapshot] - no match to type: [$snapshot_type]" 128 | fi 129 | done 130 | 131 | space_gained_formatted=$(bytes_to_human_readable "$space_gained") 132 | log 0 "Deleted $deleted snapshots for dataset: [$dataset]. Total space gained: $space_gained_formatted" 133 | } 134 | 135 | # Usage: ./zfsburn.sh 136 | if [[ $# -lt 1 ]]; then 137 | echo "Usage: ./zfsburn.sh " 138 | exit 1 139 | fi 140 | 141 | # Capture the dataset as a variable 142 | datasets="$1" 143 | 144 | # Capture snapshot counts as variables 145 | frequent_count=$(get_snapshot_count "frequent" "$datasets") 146 | hourly_count=$(get_snapshot_count "hourly" "$datasets") 147 | daily_count=$(get_snapshot_count "daily" "$datasets") 148 | weekly_count=$(get_snapshot_count "weekly" "$datasets") 149 | monthly_count=$(get_snapshot_count "monthly" "$datasets") 150 | 151 | # Use the snapshot counts as needed in the rest of the script 152 | log 0 "Frequent Snapshot Count: [$frequent_count]" 153 | log 0 "Hourly Snapshot Count: [$hourly_count]" 154 | log 0 "Daily Snapshot Count: [$daily_count]" 155 | log 0 "Weekly Snapshot Count: [$weekly_count]" 156 | log 0 "Monthly Snapshot Count: [$monthly_count]" 157 | 158 | delete_snapshots "$datasets" 159 | --------------------------------------------------------------------------------