├── monitor_uptime.sh ├── website_health_check.sh ├── system_update_cleanup.sh ├── resize_image.sh ├── check_internet.sh ├── delete_files.sh ├── service_status_check.sh ├── check_ssl_expiry.sh ├── auto_git_commit.sh ├── find_top_cpu_processes.sh ├── find_top_memory_processes.sh ├── disk_usage_alert.sh ├── cpu_usage_alert.sh ├── backup.sh ├── top_largest_files.sh ├── publish.sh ├── send_email.sh ├── parse_log.sh ├── install_docker.sh ├── publish.ps1 └── README.md /monitor_uptime.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | LOG_FILE="/var/log/uptime.log" 4 | 5 | UPTIME=$(uptime -p) 6 | 7 | echo "$(date): $UPTIME" >> "$LOG_FILE" -------------------------------------------------------------------------------- /website_health_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | URL="https://example.com" 4 | 5 | if curl -s --head --request GET $URL | grep "200 OK" > /dev/null; then 6 | echo "$URL is up." 7 | else 8 | echo "$URL is down!" 9 | fi -------------------------------------------------------------------------------- /system_update_cleanup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt update && sudo apt upgrade -y 4 | 5 | sudo apt autoclean 6 | 7 | sudo apt autoremove -y 8 | 9 | echo "System updated and cleaned on $(date)" >> /var/log/sys_update.log -------------------------------------------------------------------------------- /resize_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR="/path/to/images" 4 | WIDTH=800 5 | 6 | for IMAGE in $DIR/*.{jpg,jpeg,png}; do 7 | convert "$IMAGE" -resize ${WIDTH} "$IMAGE" 8 | done 9 | 10 | echo "All images resized to $WIDTH px width." -------------------------------------------------------------------------------- /check_internet.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # check_internet.sh 3 | # 4 | # Description: 5 | # Checks for internet connectivity by pinging Google's DNS (8.8.8.8). 6 | # 7 | # Usage: 8 | # ./check_internet.sh 9 | # 10 | # Output: 11 | # "Internet is working" or "No internet" 12 | 13 | ping -q -c 1 -W 1 8.8.8.8 > /dev/null && echo "✅ Internet is working" || echo "❌ No internet" 14 | -------------------------------------------------------------------------------- /delete_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # delete_files.sh 3 | # 4 | # Description: 5 | # Deletes files older than a month in a directory. 6 | # 7 | # Usage: 8 | # ./delete_files.sh /path/to/folder 7 9 | # 10 | # Example: 11 | # ./delete_files.sh /tmp 30 12 | 13 | DIR="/path/to/directory" 14 | 15 | find $DIR -type f -mtime +30 -exec rm {} \; 16 | 17 | echo "Deleted files older than 30 days in $DIR." 18 | -------------------------------------------------------------------------------- /service_status_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Variables 4 | SERVICE="nginx" 5 | EMAIL="admin@example.com" 6 | 7 | # Check service status 8 | if ! systemctl is-active --quiet "$SERVICE"; then 9 | # Restart service 10 | systemctl restart "$SERVICE" 11 | 12 | # Send alert 13 | echo "$SERVICE was down and has been restarted on $(hostname)" | mail -s "$SERVICE Restart Alert" "$EMAIL" 14 | fi -------------------------------------------------------------------------------- /check_ssl_expiry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # check_ssl_expiry.sh 3 | # 4 | # Description: 5 | # Checks the expiration date of an SSL certificate for a domain. 6 | # 7 | # Usage: 8 | # ./check_ssl_expiry.sh example.com 9 | # 10 | # Output: 11 | # Shows the certificate's "notBefore" and "notAfter" dates. 12 | 13 | DOMAIN=$1 14 | echo | openssl s_client -servername "$DOMAIN" -connect "$DOMAIN:443" 2>/dev/null | openssl x509 -noout -dates 15 | -------------------------------------------------------------------------------- /auto_git_commit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # auto_git_commit.sh 3 | # 4 | # Description: 5 | # Automatically commits and push all changes in the current Git repo. 6 | # 7 | # Usage: 8 | # ./auto_git_commit.sh 9 | # 10 | # Example: 11 | # ./auto_git_commit.sh 12 | 13 | MESSAGE=$1 14 | 15 | if [ -z "$MESSAGE" ]; then 16 | MESSAGE="Auto-commit on $(date)" 17 | fi 18 | 19 | git add . 20 | git commit -m "$MESSAGE" 21 | git push origin main 22 | 23 | echo "Changes pushed to the repository with message: $MESSAGE" 24 | -------------------------------------------------------------------------------- /find_top_cpu_processes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | LIMIT=10 5 | 6 | usage() { 7 | echo "Usage: $0 [-n ]" 8 | echo " -n: Number of top CPU-consuming processes to display (default is 10)" 9 | exit 1 10 | } 11 | 12 | while getopts "n:" opt; do 13 | case "$opt" in 14 | n) LIMIT="$OPTARG" ;; 15 | *) usage ;; 16 | esac 17 | done 18 | 19 | echo "Finding the top $LIMIT CPU-consuming processes..." 20 | 21 | 22 | ps -eo pid,ppid,cmd,%cpu,%mem --sort=-%cpu | head -n "$((LIMIT + 1))" 23 | -------------------------------------------------------------------------------- /find_top_memory_processes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | LIMIT=10 5 | 6 | 7 | usage() { 8 | echo "Usage: $0 [-n ]" 9 | echo " -n: Number of top memory-consuming processes to display (default is 10)" 10 | exit 1 11 | } 12 | 13 | 14 | while getopts "n:" opt; do 15 | case "$opt" in 16 | n) LIMIT="$OPTARG" ;; 17 | *) usage ;; 18 | esac 19 | done 20 | 21 | echo "Finding the top $LIMIT memory-consuming processes..." 22 | 23 | # Fetch and display processes sorted by memory usage 24 | ps -eo pid,ppid,cmd,%mem,%cpu --sort=-%mem | head -n "$((LIMIT + 1))" 25 | -------------------------------------------------------------------------------- /disk_usage_alert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # disk_usage_alert.sh 3 | # 4 | # Description: 5 | # Monitors disk space and alerts when usage exceeds a specified percentage. 6 | # Change Email value with your email. 7 | # 8 | # Usage: 9 | # ./disk_usage_alert.sh 10 | # 11 | # Example: 12 | # ./disk_usage_alert.sh 13 | 14 | THRESHOLD=80 15 | EMAIL="nikoo.a.14@gmail.com" 16 | 17 | DISK_USAGE=$(df / | grep / | awk '{print $5}' | sed 's/%//g') 18 | if [ "$DISK_USAGE" -gt "$THRESHOLD" ]; then 19 | echo "Disk usage is above $THRESHOLD%. Current usage: $DISK_USAGE%" | mail -s "Disk Usage Alert" "$EMAIL" 20 | fi 21 | -------------------------------------------------------------------------------- /cpu_usage_alert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # cpu_usage_alert.sh 3 | # 4 | # Description: 5 | # Sends an alert if CPU usage exceeds a defined threshold. 6 | # 7 | # Usage: 8 | # ./cpu_usage_alert.sh 9 | # 10 | # Example: 11 | # ./cpu_usage_alert.sh 12 | 13 | THRESHOLD=80 14 | 15 | CPU_USAGE=$(top -bn1 | grep "Cpu(s)" | sed "s/.*, *\([0-9.]*\)%* id.*/\1/" | awk '{print 100 - $1}') 16 | 17 | if (( $(echo "$CPU_USAGE > $THRESHOLD" |bc -l) )); then 18 | echo "High CPU usage detected: $CPU_USAGE%" | mail -s "CPU Usage Alert" nikoo.a.14@gmail.com 19 | echo "Alert sent! CPU usage is $CPU_USAGE%." 20 | else 21 | echo "CPU usage is normal: $CPU_USAGE%." 22 | fi 23 | -------------------------------------------------------------------------------- /backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # backup.sh 3 | # 4 | # Description: 5 | # Takes source and destination directories as command-line arguments, 6 | # then creates a compressed backup archive with the current date. 7 | # 8 | # Usage: 9 | # ./backup.sh /path/to/source /path/to/backup 10 | # 11 | # Example: 12 | # ./backup.sh ~/Documents /mnt/backups 13 | 14 | if [ "$#" -ne 2 ]; then 15 | echo "❌ Usage: $0 " 16 | exit 1 17 | fi 18 | 19 | SOURCE_DIR="$1" 20 | BACKUP_DIR="$2" 21 | DATE=$(date +%Y-%m-%d) 22 | BACKUP_NAME="backup-$DATE.tar.gz" 23 | 24 | # Create the backup 25 | tar -czf "$BACKUP_DIR/$BACKUP_NAME" "$SOURCE_DIR" 26 | 27 | # Log the backup 28 | echo "Backup of $SOURCE_DIR completed at $DATE" >> "$BACKUP_DIR/backup.log" 29 | 30 | echo "Backup completed: $BACKUP_DIR/$BACKUP_NAME" 31 | -------------------------------------------------------------------------------- /top_largest_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | DIRECTORY="/" 5 | LIMIT=10 6 | 7 | 8 | usage() { 9 | echo "Usage: $0 [-d ] [-n ]" 10 | echo " -d: Directory to search (default is /)" 11 | echo " -n: Number of largest files to display (default is 10)" 12 | exit 1 13 | } 14 | 15 | 16 | while getopts "d:n:" opt; do 17 | case "$opt" in 18 | d) DIRECTORY="$OPTARG" ;; 19 | n) LIMIT="$OPTARG" ;; 20 | *) usage ;; 21 | esac 22 | done 23 | 24 | # Check if the directory exists 25 | if [ ! -d "$DIRECTORY" ]; then 26 | echo "Error: Directory '$DIRECTORY' not found." 27 | exit 1 28 | fi 29 | 30 | echo "Searching for the $LIMIT largest files in '$DIRECTORY'..." 31 | 32 | # Find and display the largest files 33 | find "$DIRECTORY" -type f -exec du -h {} + 2>/dev/null | sort -rh | head -n "$LIMIT" 34 | -------------------------------------------------------------------------------- /publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ZIP_FILE="publish.zip" 4 | SERVER_USER="your_username" 5 | SERVER_ADDRESS="your_server_address" 6 | REMOTE_PORT=22 7 | REMOTE_PATH="/path/on/server" 8 | 9 | 10 | zip -r $ZIP_FILE . -x "appsettings.*.json" "web.config" 11 | 12 | if [ $? -eq 0 ]; then 13 | echo "Files successfully zipped as $ZIP_FILE." 14 | 15 | scp -P $REMOTE_PORT $ZIP_FILE $SERVER_USER@$SERVER_ADDRESS:$REMOTE_PATH 16 | 17 | if [ $? -eq 0 ]; then 18 | echo "File successfully copied to $SERVER_USER@$SERVER_ADDRESS:$REMOTE_PATH." 19 | 20 | ssh -p $REMOTE_PORT $SERVER_USER@$SERVER_ADDRESS "cd $REMOTE_PATH && unzip -o $ZIP_FILE && echo 'File unzipped successfully on the server.'" 21 | 22 | if [ $? -eq 0 ]; then 23 | echo "File unzipped successfully on the server." 24 | else 25 | echo "Failed to unzip the file on the server." 26 | fi 27 | else 28 | echo "Failed to copy the file to the server." 29 | fi 30 | else 31 | echo "Failed to create the zip file." 32 | fi 33 | -------------------------------------------------------------------------------- /send_email.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | usage() { 5 | echo "Usage: $0 -t -s -b " 6 | echo " -t: Recipient's email address" 7 | echo " -s: Subject of the email" 8 | echo " -b: File containing the body of the email" 9 | exit 1 10 | } 11 | 12 | 13 | while getopts "t:s:b:" opt; do 14 | case "$opt" in 15 | t) TO_EMAIL="$OPTARG" ;; 16 | s) SUBJECT="$OPTARG" ;; 17 | b) BODY_FILE="$OPTARG" ;; 18 | *) usage ;; 19 | esac 20 | done 21 | 22 | 23 | if [ -z "$TO_EMAIL" ] || [ -z "$SUBJECT" ] || [ -z "$BODY_FILE" ]; then 24 | echo "Error: Missing required arguments." 25 | usage 26 | fi 27 | 28 | if [ ! -f "$BODY_FILE" ]; then 29 | echo "Error: Body file '$BODY_FILE' not found." 30 | exit 1 31 | fi 32 | 33 | 34 | cat "$BODY_FILE" | mail -s "$SUBJECT" "$TO_EMAIL" 35 | 36 | if [ $? -eq 0 ]; then 37 | echo "Email sent successfully to $TO_EMAIL" 38 | else 39 | echo "Failed to send email." 40 | exit 1 41 | fi 42 | -------------------------------------------------------------------------------- /parse_log.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | usage() { 4 | echo "Usage: $0 -f [-d ] [-k ] [-l ] [-o ]" 5 | echo " -f: Path to the log file" 6 | echo " -d: Filter logs by date (e.g., 2024-12-09)" 7 | echo " -k: Filter logs by keyword (e.g., 'ERROR')" 8 | echo " -l: Filter logs by log level (e.g., 'INFO', 'DEBUG', 'ERROR')" 9 | echo " -o: Save filtered logs to output file (optional)" 10 | exit 1 11 | } 12 | 13 | while getopts "f:d:k:l:o:" opt; do 14 | case "$opt" in 15 | f) LOGFILE="$OPTARG" ;; 16 | d) DATE="$OPTARG" ;; 17 | k) KEYWORD="$OPTARG" ;; 18 | l) LOGLEVEL="$OPTARG" ;; 19 | o) OUTPUTFILE="$OPTARG" ;; 20 | *) usage ;; 21 | esac 22 | done 23 | 24 | # Ensure logfile is provided 25 | if [ -z "$LOGFILE" ]; then 26 | echo "Error: Log file is required." 27 | usage 28 | fi 29 | 30 | # Check if the log file exists 31 | if [ ! -f "$LOGFILE" ]; then 32 | echo "Error: Log file '$LOGFILE' not found." 33 | exit 1 34 | fi 35 | 36 | FILTER_CMD="cat $LOGFILE" 37 | 38 | if [ -n "$DATE" ]; then 39 | FILTER_CMD="$FILTER_CMD | grep '$DATE'" 40 | fi 41 | 42 | 43 | if [ -n "$KEYWORD" ]; then 44 | FILTER_CMD="$FILTER_CMD | grep '$KEYWORD'" 45 | fi 46 | 47 | 48 | if [ -n "$LOGLEVEL" ]; then 49 | FILTER_CMD="$FILTER_CMD | grep '$LOGLEVEL'" 50 | fi 51 | 52 | 53 | echo "Running filter command: $FILTER_CMD" 54 | FILTERED_LOGS=$(eval "$FILTER_CMD") 55 | 56 | 57 | if [ -n "$OUTPUTFILE" ]; then 58 | echo "$FILTERED_LOGS" > "$OUTPUTFILE" 59 | echo "Filtered logs saved to '$OUTPUTFILE'." 60 | else 61 | echo "Filtered logs:" 62 | echo "$FILTERED_LOGS" 63 | fi 64 | -------------------------------------------------------------------------------- /install_docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | error_exit() { 4 | echo "Error: $1" 5 | exit 1 6 | } 7 | 8 | 9 | echo "Updating package index..." 10 | sudo apt-get update || error_exit "Failed to update package index." 11 | 12 | echo "Installing required packages..." 13 | sudo apt-get install -y apt-transport-https ca-certificates curl software-properties-common || error_exit "Failed to install prerequisites." 14 | 15 | echo "Adding Docker’s official GPG key..." 16 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg || error_exit "Failed to add Docker's GPG key." 17 | 18 | echo "Adding Docker repository..." 19 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null || error_exit "Failed to add Docker repository." 20 | 21 | echo "Updating package index for Docker repository..." 22 | sudo apt-get update || error_exit "Failed to update package index after adding Docker repository." 23 | 24 | 25 | echo "Installing Docker..." 26 | sudo apt-get install -y docker-ce docker-ce-cli containerd.io || error_exit "Failed to install Docker." 27 | 28 | echo "Starting and enabling Docker service..." 29 | sudo systemctl start docker || error_exit "Failed to start Docker service." 30 | sudo systemctl enable docker || error_exit "Failed to enable Docker service." 31 | 32 | echo "Adding the current user to the Docker group..." 33 | sudo usermod -aG docker $USER || error_exit "Failed to add user to the Docker group." 34 | 35 | 36 | echo "Verifying Docker installation..." 37 | docker --version || error_exit "Docker is not installed correctly." 38 | sudo docker run hello-world || error_exit "Failed to run Docker test container." 39 | 40 | echo "Docker installation and setup complete! Please log out and log back in for group changes to take effect." 41 | -------------------------------------------------------------------------------- /publish.ps1: -------------------------------------------------------------------------------- 1 | # Variables 2 | $LocalPath = "." 3 | $ZipFileName = "publish.zip" 4 | $ServerUser = "your_username" 5 | $ServerAddress = "your_server_address" 6 | $RemotePort = 22 7 | $RemotePath = "/path/on/server" 8 | $AppPoolName = "YourAppPoolName" 9 | $SSHKeyPath = "~/.ssh/id_rsa" # Path to your private key for SSH 10 | 11 | 12 | Write-Host "Creating ZIP file..." 13 | $ExcludePatterns = @("appsettings.*.json", "web.config") 14 | Get-ChildItem -Path $LocalPath -Recurse | Where-Object { 15 | $_.FullName -notmatch "appsettings\..*\.json" -and $_.FullName -notmatch "web\.config" 16 | } | Compress-Archive -DestinationPath $ZipFileName -Force 17 | 18 | if (-Not (Test-Path $ZipFileName)) { 19 | Write-Error "Failed to create ZIP file." 20 | exit 1 21 | } 22 | 23 | Write-Host "ZIP file created: $ZipFileName" 24 | 25 | # Copy the ZIP file to the remote server using SCP 26 | Write-Host "Copying ZIP file to remote server..." 27 | scp -P $RemotePort -i $SSHKeyPath $ZipFileName "$ServerUser@$ServerAddress:$RemotePath" 28 | 29 | if ($LASTEXITCODE -ne 0) { 30 | Write-Error "Failed to copy ZIP file to remote server." 31 | exit 1 32 | } 33 | 34 | Write-Host "ZIP file copied successfully." 35 | 36 | Write-Host "Executing remote commands on server..." 37 | $RemoteCommands = @" 38 | echo "Stopping IIS application pool: $AppPoolName" 39 | powershell -Command "Import-Module WebAdministration; Stop-WebAppPool -Name '$AppPoolName'" 40 | echo "Unzipping the file..." 41 | cd $RemotePath && unzip -o $ZipFileName 42 | echo "Starting IIS application pool: $AppPoolName" 43 | powershell -Command "Import-Module WebAdministration; Start-WebAppPool -Name '$AppPoolName'" 44 | echo "Deployment completed successfully." 45 | "@ 46 | 47 | ssh -p $RemotePort -i $SSHKeyPath "$ServerUser@$ServerAddress" $RemoteCommands 48 | 49 | if ($LASTEXITCODE -ne 0) { 50 | Write-Error "Failed during remote operations on the server." 51 | exit 1 52 | } 53 | 54 | Write-Host "Deployment completed successfully!" 55 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Bash Scripts Collection 3 | 4 | This repository contains a collection of useful Bash scripts for Linux systems. Each script serves a specific purpose, 5 | ranging from system maintenance to monitoring and automation tasks. 6 | Below is a description of each script along with instructions on how to use them. 7 | 8 | ## 🔧 Scripts Overview 9 | 10 | | Script Name | Purpose | 11 | |----------------------------------|----------------------------------------------------------------------------------------------| 12 | | `auto_git_commit.sh` | Automatically commits staged changes with a standardized commit message. | 13 | | `backup.sh` | Backs up a specified directory into a dated folder. | 14 | | `cpu_usage_alert.sh` | Monitors CPU usage and sends an alert (e.g., via email) when usage exceeds a threshold. | 15 | | `disk_usage_alert.sh` | Checks disk usage and alerts if it passes a set limit. | 16 | | `find_top_cpu_processes.sh` | Lists processes consuming the most CPU. | 17 | | `find_top_memory_processes.sh` | Lists processes consuming the most memory. | 18 | | `install_docker.sh` | Automates Docker installation on the host. | 19 | | `monitor_uptime.sh` | Records system uptime periodically (ideal via cron). | 20 | | `parse_log.sh` | Extracts and summarizes key data from log files. | 21 | | `publish.sh` & `publish.ps1` | Automates publishing tasks; supports both Bash and PowerShell environments. | 22 | | `resize_image.sh` | Resizes images to predefined dimensions. | 23 | | `send_email.sh` | Sends an email with attachments or notifications. | 24 | | `service_status_check.sh` | Monitors and restarts a service if it’s down. | 25 | | `system_update_cleanup.sh` | Updates the system and cleans up unused packages. | 26 | | `top_largest_files.sh` | Identifies and lists the largest files in a directory tree. | 27 | | `website_health_check.sh` | Tests website availability and alerts on downtime. | 28 | 29 | --- 30 | 31 | ## Ensure executables: 32 | ```bash 33 | chmod +x *.sh 34 | ``` 35 | 36 | ## Run: 37 | ```bash 38 | ./script_name.sh 39 | ``` 40 | 41 | ## 1. Backup Script (`backup.sh`) 42 | 43 | This script creates a backup of a specified directory and stores it in a backup directory with the current date. 44 | 45 | - **Usage**: Update the `SOURCE_DIR` and `BACKUP_DIR` variables with the paths you want to back up and where you want to store the backup. 46 | - **Command**: 47 | ```bash 48 | ./backup.sh 49 | 50 | ## 2. Disk Usage Alert (disk_usage_alert.sh) 51 | 52 | This script checks the disk usage of the root partition and sends an alert email if it exceeds a specified threshold. 53 | - **Usage**: Set the THRESHOLD variable to the desired disk usage limit (in percentage) and update the EMAIL variable with the recipient’s email address. 54 | - **Command**: 55 | ```bash 56 | ./disk_usage_alert.sh 57 | 58 | ## 3. System Update and Cleanup (system_update_cleanup.sh) 59 | This script updates the system, cleans up unnecessary files, and removes unused packages to free up space. 60 | 61 | - **Usage**: Run the script with root privileges to update and clean the system. 62 | - **Command**: 63 | ```bash 64 | sudo ./system_update_cleanup.sh 65 | 66 | ## 4. Monitor Server Uptime (monitor_uptime.sh) 67 | This script logs the server uptime to a file. It can be set to run at regular intervals using a cron job. 68 | - **Usage**: Run the script to log the uptime periodically. 69 | - **Command**: 70 | ```bash 71 | ./monitor_uptime.sh 72 | 73 | ## 6. Service Status Check (service_status_check.sh) 74 | This script checks the status of a specified service and restarts it if it is not running. 75 | - **Usage**: Set the SERVICE variable to the service you want to monitor (e.g., nginx) and update the EMAIL variable with the recipient’s email address. 76 | - **Command**: 77 | ```bash 78 | .sudo ./service_status_check.sh 79 | 80 |
81 | 82 | - Ensure you have the required permissions to run the scripts (use chmod +x script_name.sh to make them executable). 83 | - Some scripts may require root privileges; use sudo where applicable. 84 | 85 | 86 |
87 | 88 | # Setting Up Bash Scripts as Cron Jobs : 89 | 90 | This guide provides step-by-step instructions to add various Bash scripts as cron jobs on Linux systems. 91 | Cron jobs allow you to schedule scripts to run automatically at specified intervals, making it easier to manage system maintenance, monitoring, and automation tasks. 92 | 93 | ## Prerequisites 94 | 95 | - Ensure the scripts are executable: 96 | ```bash 97 | chmod +x /path/to/your_script.sh 98 | 99 | - Some scripts may require root privileges. Use sudo where applicable. 100 | 101 | - Add Scripts as Cron Jobs: 102 | 103 | ```bash 104 | crontab -e 105 | 106 | - Add entries to the crontab file in the following format: 107 |

* * * * * /path/to/your_script.sh

108 | 109 | Cron Scheduling Patterns: 110 | -

* * * * *: Runs every minute.

111 | -

0 * * * *: Runs at the start of every hour.

112 | -

0 0 * * *: Runs daily at midnight.

113 | -

0 2 * * 0: Runs every Sunday at 2:00 AM.

114 | -

*/5 * * * *: Runs every 5 minutes.

115 | --------------------------------------------------------------------------------