├── COPYING ├── README.md ├── authdns.sh ├── backup_report.sh ├── chkservd_errors.pl ├── chkservd_grep.pl ├── download_file_from_cpanel_user_homedir ├── ea-precheck.sh ├── ns_record_report.sh ├── spam_check.sh └── sslhunter.sh /COPYING: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012, cPanel, Inc. 2 | All rights reserved. 3 | http://cpanel.net 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | * Neither the name of cPanel, Inc. nor the 16 | names of its contributors may be used to endorse or promote products 17 | derived from this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 20 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 21 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 22 | IN NO EVENT SHALL cPanel, Inc BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 24 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 25 | OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 26 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | TechScripts 2 | =========== 3 | 4 | A Repository for Miscellaneous Scripts 5 | 6 | | Script | Description | 7 | | ------------- |:-------------:| 8 | | authdns.sh | Show authoritative DNS server information | 9 | | backup_report.sh | Show if backups are enabled/successful | 10 | | chkservd_errors.pl | Show only errors from chservd log file | 11 | | chkservd_grep.pl | Show only errors from chservd log file | 12 | | download_file_from_cpanel_user_homedir | Use this on workstation to download files | 13 | | ea-precheck.sh | EasyApache Preflight Check Script | 14 | | spam_check.sh | Verify spam in overloaded servers | 15 | | sslhunter.sh | Find matching SSL key for a certificate | 16 | -------------------------------------------------------------------------------- /authdns.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Get nameservers for a domain name from the TLD servers. 3 | # Also get the GLUE records if they exist. 4 | # If glue records do not exist, find them manually. 5 | # 6 | # This script is meant to be run from the local Desktop as a quick reference. 7 | # 8 | # Description: 9 | # https://staffwiki.cpanel.net/LinuxSupport/GuideToDns#Bash_script_to_show_authoritative_nameservers_and_GLUE_records 10 | # 11 | # How to download and use: 12 | # curl -O https://raw.githubusercontent.com/cPanelTechs/TechScripts/master/authdns.sh > authdns.sh; chmod u+x authdns.sh 13 | # ./authdns.sh cpanel.net 14 | # 15 | # Todo: check for two-part tlds, like .xx.co or .com.br (3753229) 16 | # need to check if 2nd to last is legit tld, then run it. 17 | # http://stackoverflow.com/questions/14460680/how-to-get-a-list-of-tlds-using-bash-for-building-a-regex 18 | # http://data.iana.org/TLD/tlds-alpha-by-domain.txt 19 | # http://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_names.dat?raw=1 20 | # 21 | # Todo#2: check for responses from all the auth ns's, instead of just the top one 22 | 23 | function debug() { 24 | debug="off" 25 | if [ "$debug" = "on" ]; then 26 | echo $1 27 | fi 28 | } 29 | # example: 30 | # debug "variable_name is ${variable_name}" 31 | 32 | # Check for dig commannd 33 | verify_tools() { 34 | command -v dig >/dev/null 2>&1 || { echo >&2 "Oops! The dig command is necessary for this script, but was not found on this system :( Aborting."; exit 1; } 35 | } 36 | 37 | # Check input 38 | check_input() { 39 | if [ -z ${dom} ]; then 40 | echo 'Please specify a domain.'; exit 1; 41 | fi 42 | } 43 | 44 | # Get input, initial variables 45 | dom=${1} 46 | #tld=${dom#*.} 47 | tld=$(echo $dom | awk -F. '{print $NF}') 48 | debug "tld is ${tld}" 49 | options="+noall +authority +additional +comments" 50 | multi_check_done=0 51 | 52 | # Functions 53 | try_sec_level_domain() { 54 | num_parts=$(echo $dom | awk -F"." '{print NF}') 55 | if [ $num_parts > 2 ]; then 56 | debug "Starting multi part domain check" 57 | regex=$(curl -s http://data.iana.org/TLD/tlds-alpha-by-domain.txt | sed '1d; s/^ *//; s/ *$//; /^$/d' | awk '{print length" "$0}' | sort -rn | cut -d' ' -f2- | tr '[:upper:]' '[:lower:]' | awk '{print "^"$0"$"}' | tr '\n' '|' | sed 's/\|$//') 58 | let sec_lev_tld_pos=$num_parts-1 59 | debug "sec_lev_tld_pos is $sec_lev_tld_pos, num_parts is $num_parts" 60 | sec_lev_tld=$(echo $dom | cut -d. -f$sec_lev_tld_pos) 61 | debug "sec_lev_tld is $sec_lev_tld" 62 | is_legit=$(echo $sec_lev_tld | awk -v reg=$regex '$0~reg'); 63 | if [ "$is_legit" ]; then tld=$(echo $dom | cut -d. -f$sec_lev_tld_pos,$num_parts); fi 64 | multi_check_done=1 65 | debug "multicheck is done. the new tld is $tld" 66 | fi 67 | } 68 | 69 | create_dig_oneliner() { 70 | tld_server=$(dig NS ${tld}. +short | head -n1) 71 | dig_oneliner="dig @${tld_server} ${dom}. ${options}" 72 | } 73 | 74 | get_result() { 75 | dig_result=`${dig_oneliner}` 76 | } 77 | 78 | set_colors() { 79 | # Colors and formatting 80 | greenbold='\033[1;32m' 81 | clroff="\033[0m"; 82 | } 83 | 84 | get_nameservers() { 85 | # nameserver names and possibly IP's from TLD servers 86 | auth_ns=$(${dig_oneliner} | awk '/AUTHORITY SECTION/,/^[ ]*$/' | awk '{print $NF}' | sed -e 1d -e 's/.$//') 87 | debug "auth_ns is ${auth_ns} multi_check_done is $multi_check_done" 88 | ns_check_ip=$(echo $auth_ns | egrep '([[:digit:]]{1,3}\.){3}[[:digit:]]{1,3}') 89 | ns_check_name=$(echo $auth_ns | egrep [a-zA-Z]) 90 | debug "ns_check_ip is ${ns_check_ip}" 91 | debug "ns_check_name is ${ns_check_name}" 92 | if [ ! "$ns_check_name" -a ! "$ns_check_ip" -a $multi_check_done -lt 1 ]; then 93 | try_sec_level_domain 94 | create_dig_oneliner 95 | get_result 96 | get_nameservers 97 | fi 98 | additional_ips=$(${dig_oneliner} | awk '/ADDITIONAL SECTION/,0' | awk '{print $NF}' | sed 1d) 99 | } 100 | 101 | get_nameserver_ips() { 102 | # get bare IP's of nameservers 103 | if [ "$additional_ips" ]; 104 | then bare_result=$additional_ips; 105 | else bare_result=` 106 | for auth_ips in "${auth_ns[@]}"; do 107 | dig +short $auth_ips 108 | echo "(Warning: these IP's had to be resolved manually, so glue records are bad)" 109 | done;` 110 | fi; 111 | } 112 | 113 | print_results() { 114 | printf "%b\n" "${greenbold}\n# dig NS ${tld}. +short | head -n1${clroff}" 115 | printf "%b\n" "$tld_server" 116 | printf "%b\n" "${greenbold}\n# ${dig_oneliner}${clroff}" 117 | printf "%b\n" "${dig_result}\n" 118 | printf "%b\n" "${greenbold}authoritative nameserver names:\n${clroff}${auth_ns}\n" 119 | printf "%b\n" "${greenbold}authoritative nameserver IPs:\n${clroff}${bare_result}\n" 120 | } 121 | 122 | 123 | 124 | # Run code 125 | verify_tools 126 | check_input 127 | create_dig_oneliner 128 | get_result 129 | set_colors 130 | get_nameservers 131 | # get_nameservers also includes a check that tries 2nd level domains 132 | get_nameserver_ips 133 | print_results 134 | -------------------------------------------------------------------------------- /backup_report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Quick backup report script written by: Marco Ferrufino 3 | # 4 | # Description: https://staffwiki.cpanel.net/bin/view/LinuxSupport/CPanelBackups 5 | # 6 | # How to run this script: 7 | # curl -s --insecure https://raw.githubusercontent.com/cPanelTechs/TechScripts/master/backup_report.sh | sh 8 | 9 | # this shows backups enabled or disabled but i need to return the value to the check functions 10 | 11 | backlogdir=/usr/local/cpanel/logs/cpbackup; 12 | 13 | 14 | # check if new backups are enabled 15 | function check_new_backups() { 16 | echo -e "\n\n\033[36m[ cPTech Backup Report v2.1 ]\033[0m"; 17 | new_enabled=$(grep BACKUPENABLE /var/cpanel/backups/config 2>/dev/null | awk -F"'" '{print $2}') 18 | new_cron=$(crontab -l | grep bin\/backup | awk '{print $1,$2,$3,$4,$5}') 19 | if [ "$new_enabled" = "yes" ]; then new_status='\033[1;32m'Enabled'\033[0m' 20 | else new_status='\033[1;31m'Disabled'\033[0m' 21 | fi 22 | echo -e "New Backups = $new_status\t\t(cron time: $new_cron)\t\t/var/cpanel/backups/config" 23 | } 24 | 25 | # check if legacy or new backups are enabled. if each one is, then show how many users are skipped 26 | function check_legacy_backups() { 27 | legacy_enabled=$(grep BACKUPENABLE /etc/cpbackup.conf | awk '{print $2'}) 28 | legacy_cron=$(crontab -l | grep cpbackup | awk '{print $1,$2,$3,$4,$5}') 29 | if [ $legacy_enabled = "yes" ]; then legacy_status='\033[1;32m'Enabled'\033[0m' 30 | else legacy_status='\033[1;31m'Disabled'\033[0m' 31 | fi 32 | echo -e "Legacy Backups = $legacy_status\t(cron time: $legacy_cron)\t\t/etc/cpbackup.conf" 33 | } 34 | 35 | # For the ftp backup server checks. I couldn't do this with normal arrays, so using this eval hack 36 | hput () { 37 | eval hash"$1"='$2' 38 | } 39 | hget () { 40 | eval echo '${hash'"$1"'#hash}' 41 | } 42 | 43 | # Check if any active FTP backups 44 | function check_new_ftp_backups() { 45 | any_ftp_backups=$(\grep 'disabled: 0' /var/cpanel/backups/*backup_destination 2>/dev/null) 46 | if [ -n "$any_ftp_backups" ]; then ftp_backup_status='Enabled' 47 | else ftp_backup_status='Disabled' 48 | fi 49 | echo -e "\nNew FTP Backups = $ftp_backup_status\t(as of v2.0, this script only checks for new ftp backups, not legacy)" 50 | 51 | # Normal arrays 52 | declare -a ftp_server_files=($(\ls /var/cpanel/backups/*backup_destination)); 53 | declare -a ftp_server_names=($(for i in ${ftp_server_files[@]}; do echo $i | cut -d/ -f5 | rev | cut -d_ -f4,5,6,7,8 | rev; done)); 54 | # Array hack is storing 'Disabled' status in $srvr_SERVER_NAME 55 | for i in ${ftp_server_files[@]}; do hput srvr_$(echo $i | cut -d/ -f5 | rev | cut -d_ -f4,5,6,7,8 | rev) $(\grep disabled $i | awk '{print $2}'); done 56 | 57 | # Print 58 | for i in ${ftp_server_names[@]}; do 59 | echo -n "Backup FTP Server: "$i" = " 60 | srvr_status=$(hget srvr_$i) 61 | if [ $srvr_status = 0 ]; then 62 | echo -e '\033[1;32m'Enabled'\033[0m'; 63 | else echo -e '\033[1;31m'Disabled'\033[0m'; 64 | fi 65 | done 66 | } 67 | 68 | # look at start, end times. print number of users where backup was attempted 69 | function print_start_end_times () { 70 | echo -e "\n\033[36m[ Current Backup Logs in "$backlogdir" ]\033[0m"; 71 | if [ -e $backlogdir ]; then 72 | cd $backlogdir; 73 | for i in `\ls`; do 74 | echo -n $i": "; grep "Started" $i; echo -n "Ended "; 75 | \ls -lrth | grep $i | awk '{print $6" "$7" "$8}'; 76 | echo -ne " Number of users backed up:\t"; grep "user :" $i | wc -l; 77 | done; 78 | fi; 79 | } 80 | 81 | function print_num_expected_users () { 82 | echo -e "\n\033[36m[ Expected Number of Users ]\033[0m"; 83 | wc -l /etc/trueuserdomains; 84 | } 85 | 86 | function exceptions_heading() { 87 | echo -e "\n\033[36m[ A count of users enabled/disabled ]\033[0m"; 88 | } 89 | 90 | function list_legacy_exceptions() { 91 | legacy_users=$(grep "LEGACY_BACKUP=1" /var/cpanel/users/* | wc -l); 92 | if [ $legacy_enabled == "yes" ]; then 93 | oldxs=$(egrep "LEGACY_BACKUP=0" /var/cpanel/users/* | wc -l); 94 | skip_file_ct=$(wc -l /etc/cpbackup-userskip.conf 2>/dev/null) 95 | if [ $oldxs -gt 0 -o "$skip_file_ct" ]; then 96 | echo -e "Legacy Backups:"; 97 | fi 98 | if [ $oldxs -gt 0 ]; then echo -e "Number of real Legacy backup users disabled: \033[1;31m$oldxs\033[0m\n"; fi; 99 | if [ -n "$skip_file_ct" ]; then echo -e "Extra Information: This skip file should no longer be used\n"$skip_file_ct"\n"; fi 100 | elif [ $legacy_users -gt 0 -a $legacy_status == "Disabled" ]; then 101 | echo -e "\nExtra Information: Legacy Backups are disabled as a whole, but there are $legacy_users users ready to use them." 102 | echo 103 | fi 104 | } 105 | 106 | function list_new_exceptions() { 107 | # TODO: math 108 | newsuspended=$(egrep "=1" /var/cpanel/users/* | grep "SUSPENDED" | wc -l); 109 | if [ "$newsuspended" != 0 ]; then 110 | echo -e "Users suspended: \033[1;31m$newsuspended\033[0m"; 111 | fi 112 | 113 | if [ "$new_enabled" == "yes" ]; then 114 | newxs=$(egrep "BACKUP=0" /var/cpanel/users/* | grep ":BACK" | wc -l); 115 | echo -e "New Backup users disabled: \033[1;31m$newxs\033[0m"; 116 | newen=$(egrep "BACKUP=1" /var/cpanel/users/* | grep ":BACK" | wc -l); 117 | echo -e "New Backup users enabled: \033[1;32m$newen\033[0m" 118 | fi 119 | } 120 | 121 | function count_local_new_backups() { 122 | echo -e "\n\033[36m[ A count of the backup files on local disk currently ]\033[0m"; 123 | new_backup_dir=$(awk '/BACKUPDIR/ {print $2}' /var/cpanel/backups/config 2>/dev/null) 124 | if [ -n "$new_backup_dir" ]; then 125 | number_new_backups=$(\ls /backup/*/accounts 2>/dev/null | egrep -v ":$" | awk NF | wc -l) 126 | echo -e "New backups in $new_backup_dir/*/accounts: "$number_new_backups 127 | else echo "0 - No new backup directory configured" 128 | fi 129 | } 130 | 131 | function count_local_legacy_backups() { 132 | legacy_backup_dir=$(awk '/BACKUPDIR/ {print $2}' /etc/cpbackup.conf) 133 | echo -e "\nLegacy backups in $legacy_backup_dir/cpbackup: " 134 | for freq in daily weekly monthly; do 135 | echo -n $freq": "; 136 | \ls $legacy_backup_dir/cpbackup/$freq | egrep -v "^dirs$|^files$|cpbackup|status" | sed 's/\.tar.*//g' | sort | uniq | wc -l; 137 | done 138 | } 139 | 140 | function show_recent_errors() { 141 | # Errors from backup log directory 142 | echo -e "\n\033[36m[ Count of Recent Errors ]\033[0m"; 143 | for i in `\ls $backlogdir`; do 144 | echo -n $backlogdir"/"$i" Ended "; 145 | \ls -lrth $backlogdir | grep $i | awk '{print $6" "$7" "$8}'; 146 | \egrep -i "failed|error|load to go down|Unable" $backlogdir/$i | cut -c -180 | sort | uniq -c ; 147 | done | tail; 148 | # Errors from cPanel error log 149 | echo -e "\n/usr/local/cpanel/logs/error_log:" 150 | egrep "(warn|die|panic) \[backup" /usr/local/cpanel/logs/error_log | awk '{printf $1"] "; for (i=4;i<=20;i=i+1) {printf $i" "}; print ""}' | uniq -c | tail -3 151 | 152 | #any_ftp_backups=$(\grep 'disabled: 0' /var/cpanel/backups/*backup_destination 2>/dev/null) 153 | if [ -n "$any_ftp_backups" ]; then 154 | # Errors from FTP backups 155 | echo -e "\n/usr/local/cpanel/logs/cpbackup_transporter.log:" 156 | egrep '] warn|] err' /usr/local/cpanel/logs/cpbackup_transporter.log | tail -5 157 | fi 158 | } 159 | 160 | # Run all functions 161 | check_new_backups 162 | check_legacy_backups 163 | check_new_ftp_backups 164 | print_start_end_times 165 | print_num_expected_users 166 | exceptions_heading 167 | list_legacy_exceptions 168 | list_new_exceptions 169 | count_local_new_backups 170 | count_local_legacy_backups 171 | show_recent_errors 172 | echo; echo 173 | -------------------------------------------------------------------------------- /chkservd_errors.pl: -------------------------------------------------------------------------------- 1 | #!/usr/local/cpanel/3rdparty/perl/514/bin/perl 2 | 3 | use strict; 4 | use warnings; 5 | use Time::Piece; 6 | use Time::Seconds; 7 | use File::ReadBackwards; 8 | 9 | # Variables 10 | my $verbose = 1; 11 | my $file = '/var/log/chkservd.log'; 12 | my $checks_per_day; 13 | chomp(my $every_n_sec = `grep chkservd_check_interval /var/cpanel/cpanel.config | cut -d= -f2`); 14 | my $every_n_min; 15 | my @lines; 16 | my $started=0; 17 | my $current_record = ''; 18 | my $line_has_date = 2; 19 | my $lastdate = ''; 20 | my $curdate; 21 | my $tz; 22 | my $tz_num; 23 | my $curdate_printable; 24 | my $duration; 25 | my $duration_min; 26 | my $duration_reported; 27 | my $regex_error_bucket; 28 | my $regex_known_full_lines; 29 | 30 | # Set search time for 'system too slow' check 31 | # IDK why this didn't work: 32 | #if ( !$every_n_sec =~ /\D/ ) \{ 33 | #if ( !looks_like_number $every_n_sec || $every_n_sec < 1 ) \{ 34 | if ( $every_n_sec < 1 ) { 35 | &debug("every_n_sec is not an acceptable digit, using default 300 = 10 min"); 36 | $every_n_sec = 300; 37 | $checks_per_day = ( 24*(60/($every_n_sec/60)) ); 38 | } 39 | else { 40 | &debug("every_n_sec is a digit, using it"); 41 | $checks_per_day = ( 24*(60/($every_n_sec/60)) ); 42 | &debug("checks_per_day is: $checks_per_day"); 43 | } 44 | # Add a 5 minute cushion to lower number of reports 45 | $every_n_min = (($every_n_sec/60)+5); 46 | 47 | ## Open log file 48 | # Get number of days to check 49 | my $days = shift or die "Please enter number of previous days (this is juat an estimate) as an argument.\n"; 50 | # Get number of lines. This is a guessed average (#lines per check seem to be ~5-8, so lets use 6.5) 51 | my $lines_to_check = ($days*$checks_per_day*6.5); 52 | &debug("lines_to_check is: $lines_to_check"); 53 | 54 | # Tail the file (opeing the whole thing is ridonculous time-wise) 55 | @lines = &tail_file(); 56 | 57 | # 58 | # While loop reads the file 59 | # This is the main section of code 60 | # 61 | while (@lines) { 62 | &debug("While loop started"); 63 | my $line = shift(@lines); 64 | # Look for line with a date 65 | if ($line =~ /\[(\d{4}(-\d{2}){2} \d{2}(:\d{2}){2} [+-]\d{4})\].*/) { 66 | $line_has_date = 1; 67 | &debug("line_has_date is now on: $line_has_date"); 68 | $duration_reported = 0; 69 | &debug("Date string found, one is $1"); 70 | 71 | &debug("Before check, started is $started"); 72 | if ($started == 1) { 73 | &check_record($current_record); 74 | $started = 0; 75 | } 76 | &debug("After 1st check, started is $started"); 77 | if ($started == 0) { 78 | &set_date($1); 79 | $started = 1; 80 | $current_record = $line; 81 | } 82 | } 83 | else { 84 | chomp($current_record); 85 | $current_record .= " " . $line; 86 | } 87 | 88 | &debug("While loop finished\n"); 89 | } 90 | 91 | # 92 | # Debug lines print (debug) before output 93 | # 94 | sub debug { 95 | my $debug_toggle = "no"; 96 | # not sure why, but these checks silences warnings 97 | #if( ($debug_toggle eq "yes") && (defined $debug_toggle) && $_[1] ) { 98 | if( ($debug_toggle eq "yes") && (defined $debug_toggle) ) { 99 | print "(debug) @_\n"; 100 | } 101 | } 102 | 103 | # 104 | # Tail the file only so many lines. Saves time. 105 | # Right now, it's not completely accurate. 106 | # 107 | sub tail_file { 108 | my $lim = $lines_to_check; 109 | my $bw = File::ReadBackwards->new( $file ) or die "can't read $file: $!\n" ; 110 | 111 | my $line; 112 | my @lines; 113 | while( defined( my $line = $bw->readline ) ) { 114 | push @lines, $line; 115 | last if --$lim <= 0; 116 | } 117 | reverse @lines; 118 | } 119 | 120 | # 121 | # Use Time::Piece object to set date 122 | # 123 | sub set_date { 124 | # very manually adjusting timezone 125 | $curdate = Time::Piece->strptime($1, "%Y-%m-%d %H:%M:%S %z"); 126 | &debug("curdate is now $curdate"); 127 | &debug("lastdate is $lastdate"); 128 | $tz = $curdate->strftime("%z"); 129 | &debug("tz is $tz"); 130 | $tz_num = ($tz + 0)/100; 131 | &debug("tz_num is $tz_num"); 132 | $curdate += $tz_num*ONE_HOUR; 133 | &debug("after tz adjustment, curdate is now $curdate"); 134 | $curdate_printable=$curdate->strftime("%Y-%m-%d %H:%M:%S $tz"); 135 | &debug("curdate_printable is $curdate_printable"); 136 | 137 | # Calculate time difference between this & last check 138 | # If this is the first time run, establish the starting values 139 | # note to self: the cPanel way (although I'd lose my debug): $lastdate ||= $curdate; 140 | if (!$lastdate) { 141 | $lastdate = $curdate; 142 | &debug ("after setting first occurence, lastdate is ", $lastdate, "\n"); 143 | } 144 | else { 145 | $duration = $curdate - $lastdate; 146 | &debug("duration is $duration"); 147 | &debug ("duration is ", $duration->minutes, " minutes"); 148 | &debug ("duration is ", $duration->hours, " hours"); 149 | $duration_min=$duration->minutes; 150 | &debug ("duration_min is ", $duration_min); 151 | } 152 | 153 | &debug("line_has_date, after if loop, is $line_has_date"); 154 | } 155 | 156 | # 157 | # The main check 158 | # 159 | sub check_record { 160 | # Regex for errors 161 | $regex_error_bucket = 'Restarting|nable|\*\*|imeout|ailure|terrupt|100%|9[89]%|second'; 162 | 163 | # If these are seen, something needs to be added to the error_bucket 164 | if ( ($current_record !~ /$regex_error_bucket/) && ($current_record =~ /:-]/) ){ 165 | print "[$curdate_printable] ....\n"; 166 | } 167 | # Main search 168 | &debug ("just before error regex, current_record is ", $current_record); 169 | if ($current_record =~ /$regex_error_bucket/){ 170 | &debug ("line is ", $current_record); 171 | my @array_fields = split /(\.){2,}/,$current_record; 172 | &debug ("num fields is ", scalar(@array_fields)); 173 | if (scalar(@array_fields) > 0){ 174 | foreach (@array_fields) { 175 | # This is main search. Every thing else is exceptions. If happy face can't find it, it's weird. 176 | &debug("line_has_date, in foreach, is $line_has_date"); 177 | if ( /:-]/ ) { 178 | chomp; 179 | print "[$curdate_printable] ", substr($_,0,100), "...\n"; 180 | &debug("line_has_date, in if_foreach, is $line_has_date"); 181 | } 182 | # Output for broken lines 183 | elsif ( ($_ =~ /$regex_error_bucket/) && ($line_has_date == 1) ){ 184 | chomp; 185 | print "[$curdate_printable] ", substr($_,0,100), "...\n"; 186 | } 187 | # This should no longer be necessary 188 | elsif ( (/$regex_error_bucket/) && ($verbose == 1) ){ 189 | &debug("line_has_date, in if_error_bucket & verbose, is $line_has_date"); 190 | chomp; 191 | print "[ ] ", substr($_,0,100), "...\n"; 192 | } 193 | } 194 | } 195 | } 196 | 197 | # check if duration is too long 198 | &debug ("duration_min is ", $duration_min); 199 | &debug ("duration_reported is ", $duration_reported); 200 | if( (defined $duration_min) && ($duration_reported == 0) ){ 201 | if($duration_min > $every_n_min) { 202 | printf "[$curdate_printable] %.0f minutes since last check\n", $duration_min; 203 | $duration_reported = 1; 204 | &debug ("duration_reported is ", $duration_reported); 205 | } 206 | } 207 | 208 | # Set lastdate for next round 209 | if ($line_has_date == 1) { 210 | $lastdate = $curdate; 211 | } 212 | # Reset so we can check again 213 | $line_has_date = 2; 214 | &debug("line_has_date is now off: $line_has_date"); 215 | } 216 | -------------------------------------------------------------------------------- /chkservd_grep.pl: -------------------------------------------------------------------------------- 1 | #!/usr/local/cpanel/3rdparty/bin/perl 2 | 3 | use strict; 4 | use warnings; 5 | 6 | # #!/usr/local/cpanel/3rdparty/perl/514/bin/perl 7 | # TIP: better to use the symlink to perl instead of a specific version since 8 | # perl version will change when cPanel switches to a newer version 9 | # 10 | 11 | # 12 | #TODO: convert this into a module that allows for parsing most any log file 13 | # 14 | 15 | # 16 | # Conventions and general tips 17 | # 18 | # CONVENTION: constrain the width of code to 80 columns 19 | # 20 | # CONVENTION: put spaces before and after equal signs 21 | # 22 | # CONVENTION: the preferred way to format else statements is to place the word 23 | # on its own line like so: 24 | # if () { 25 | # } 26 | # else { 27 | # } 28 | # 29 | # TIP: use verbose comments that fully explain any complexity or ambiguity so 30 | # that someone unfamiliar with the code can understand what is going on 31 | # 32 | # TIP: for readability and to reduce ambiguity, err on the side of verbose 33 | # variable names. Ex: $current_date instead of $curdate 34 | # 35 | # TIP: subroutines should all appear in the same section of your code, most 36 | # commonly after the main-line code. 37 | # 38 | # TIP: Using the '&' for function calls (i.e. &debug) should be avoided as there 39 | # are a few instances where it can cause unexpected results. On the other 40 | # hand, such issues are rare, so you are usuall fine using it to visually 41 | # differentiate user-defined subroutine calls from built-in functions. 42 | # 43 | # TIP: You can leave off parentheses if doing so does not introduce any 44 | # ambiguity, such as debug ''; instead of debug(''); 45 | # 46 | # TIP: It is dangerous to rely on global variables. Better to pass an argument 47 | # and then use a local variable inside the subroutine 48 | # 49 | # TIP: Name your functions carefully to avoid confusion. For example, if the 50 | # function, reverse_lines(), reads a file backwards in order to grab the 51 | # end of a file, its purpose is actually to tail the log--the reversal of 52 | # lines was simply one step in achieving that end. Therefore, a better name# would be tail(). 53 | # 54 | # TIP: Avoid running shell commands for portability and speed (and fun!) 55 | # 56 | # TIP: Sometimes a problem can be more efficiently solved by writing a short 57 | # subroutine rather than requiring an external module. 58 | # 59 | 60 | # cPanel-specific tips 61 | # 62 | # Tip: When using external modules, try to use on that cpanel installs for 63 | # its version of perl. For example, Time::Piece appears to already be 64 | # installed on cpanel servers, while Date::Parse is not always installed. 65 | # 66 | use Time::Local; 67 | #use Time::Piece; 68 | #use Time::Seconds; 69 | #use File::ReadBackwards; 70 | #use Date::Parse; 71 | 72 | use Data::Dumper; # For easier debugging 73 | 74 | # 75 | # TIP: Normally, you should avoid global variables that are used inside 76 | # subroutines, but toggling debugging on/off globally is probably ok. 77 | # 78 | # Global variables 79 | # 80 | # Toggle debugging messages 81 | my $DEBUG = 1; 82 | 83 | # 84 | # Usage: 85 | # chkservd_grep days [search_string] 86 | # 87 | my ($days, $filter_pattern) = @ARGV; 88 | #debug($days, $filter_pattern);exit; 89 | 90 | die "usage: chkservd_grep number_of_days [search_string]" unless $days; 91 | 92 | # 93 | # Main program 94 | # 95 | 96 | # 97 | # TIP: This script does not use the 'chkservd_check_interval' value from 98 | # /var/cpanel/cpanel.config. However, I'm including the the get_config_value() 99 | # function to illustrate how you would want to go about extracting a value from 100 | # a configuration file using perl. 101 | # 102 | # TIP: Instead of hard-coding a particular variable and filename, generalize the 103 | # operation with a subroutine that accepts arguments. This will make your code 104 | # more scalable, readable and recycle-able 105 | # 106 | # 107 | #chomp(my $interval = `grep chkservd_check_interval /var/cpanel/cpanel.config | cut -d= -f2`); 108 | my $interval = get_config_value('/var/cpanel/cpanel.config', 109 | 'chkservd_check_interval', 110 | '='); 111 | #debug('Value of chkservd_check_interval: ' . $interval); 112 | 113 | # 114 | # PERFORMANCE CONSIDERATIONS 115 | # 116 | # TIP: Instead of tailing the log and then re-looping through it again, it 117 | # would be more efficient to perform necessary filtering and time calculation 118 | # as you read it the first time (easier said than done!). This script 119 | # uses the functions seek() and read() to work backwards fromt he end of the 120 | # log file and perform the necessary filtering and time calculations in one 121 | # pass. 122 | # 123 | # TIP: Avoid running expensive operations multiple times. Obviously, don't 124 | # loop within a loop if there is no real need to do so. Less obviously, 125 | # run just one regex and extract the needed info in one pass. 126 | # 127 | # TIP: With regular expressions, you should generally follow the "simpler is 128 | # better" rule. Instead of using a regex that maps the timestamp parts to 129 | # date, time and timezone, you can just look for lines matching 130 | # [xxxx-xx-xx ? ?] and then split on the space character to get the time and 131 | # timezone. I'm assuming that splitting on a single character is significantly 132 | # faster than doing an extract-by-regex operation, but I've never tested that 133 | # theory. 134 | # 135 | # Pattern that matches the beginning of each log entry in /var/log/chkservd.log 136 | # The timestamp portion must be enclosed in () to permit extraction 137 | #my $chkservd_entry_start_pattern = '^\[([\d]{4}\-[\d]{2}\-[\d]{2} [^\]]*)\]'; 138 | my $chkservd_entry_start_pattern = '^\[([\d]{4}\-[\d]{2}\-[\d]{2} [0-9\:\+ ]{14})\]'; 139 | 140 | # 141 | # Call main function that grabs chkservd errors going back X days 142 | # 143 | my @entries = chkservd_grep($days); 144 | debug(@entries); 145 | exit; 146 | foreach my $entry (@entries) { 147 | #debug($entry); 148 | print "$entry->{'text'}\n"; 149 | if ($entry->{'last_seen'}) { 150 | keys %{$entry->{'last_seen'}}; 151 | while (my ($label, $time) = each %{$entry->{'last_seen'}}) { 152 | next unless $time; 153 | printf( 154 | "Time since previous %s: %s\n", 155 | $label, 156 | seconds_to_human_readable_time( 157 | $entry->{'time'} - $time 158 | ) 159 | ); 160 | } 161 | } 162 | print "\n"; 163 | } 164 | # foreach my $last_seen_event (@{$entry->{'last_seen'}}) { 165 | #debug($last_seen_event); 166 | # my $label = (keys %{$last_seen_event})[0]; 167 | # printf("Time since last $label: %s\n", 168 | # seconds_to_human_readable_time( 169 | # $entry->{'time'} - $last_seen_event->{'time'} 170 | # )); 171 | # } 172 | # } 173 | # print "\n"; 174 | #} 175 | 176 | # 177 | # Subroutines 178 | # 179 | 180 | # 181 | # TIP: write a general-purpose subroutine, such as get_entries_from_log(), that 182 | # accomplishes the general task (in this case, dividing a log into entries while 183 | # keeping track of the time elapsed between matching entries). Then, use a 184 | # more user-friendly function that accepts fewer arguments to accomplish a 185 | # more specific task (like parsing chkservd). 186 | # 187 | # This has various benefits some of which are: 188 | # - Code is more recyclable 189 | # - Code is more flexible 190 | # - Generalization forces you to think conceptually and thus more powerfully 191 | # 192 | # Chkservd-specific function that calls the main log parser with the appropriate 193 | # arguments 194 | sub chkservd_grep { 195 | my $days = shift; 196 | my $log_file = '/var/log/chkservd.log'; 197 | 198 | # 199 | # TODO: Figure out why '**' part of search pattern is not working 200 | # 201 | 202 | # If no search pattern, use default one that looks for common problems 203 | my $filter_pattern = shift || 204 | 'Restart|nable|\*\*|imeout|ailure|terrupt|100%|9[89]%|second'; 205 | 206 | return get_entries_from_log($log_file, 207 | $chkservd_entry_start_pattern, 208 | $days, 209 | $filter_pattern, 210 | # 500, 211 | 1000, 212 | # 5000, 213 | # 10000, 214 | # 20000, 215 | # 40000, 216 | # 80000, 217 | 'check', 'Service check'); 218 | #'tmp warning', '/tmp'); 219 | #'socket', 'socket'); 220 | } 221 | # my $filename = shift; 222 | # my $entry_start_pattern = shift; 223 | # my $days = shift || 1; 224 | # my $filter_pattern = shift || '.*'; 225 | # my $bytes = shift || 200; # For maximum efficiency, should be a little 226 | # # greater than the average log entry length 227 | # 228 | ## Input for evnet types should be either of the following 229 | ## - a string, which should be a regex pattern to watch for 230 | ## - an array containing event labels and respective regex patterns to 231 | ## identify the event: 232 | ## (label1, pattern1, [label2, pattern2], ...) 233 | #my $last_seen_events_input = shift; 234 | 235 | 236 | # 237 | # Get epoch time (unix time) from a timestamp string 238 | # 239 | sub epoch_time_from_timestamp { 240 | my $timestamp = shift; 241 | my ($date, $time, $offset) = split(' ', $timestamp); 242 | die "Invalid timestamp!\n" unless $date and $time and $offset; 243 | my ($year, $month, $day) = split('-', $date); 244 | die "Invalid date!\n" unless $year and $month and $day; 245 | if ($year < 100) { 246 | if ($year < 70) { 247 | $year = $year + 2000; 248 | } 249 | else { 250 | $year = $year + 1900; 251 | } 252 | } 253 | my ($hour, $minute, $second) = split(':', $time); 254 | die "Invalid time!\n" unless $hour and $minute and $second; 255 | my $uncorrected_epoch_time = timegm($second, $minute, $hour, 256 | $day, $month - 1, $year); 257 | my $offset_multiplier = substr($offset, 0, 1) eq '+' ? 1 : -1; 258 | my $offset_hours = substr($offset, 1, 2); 259 | my $offset_minutes = substr($offset, 3, 2); 260 | my $offset_seconds = $offset_hours * 60 * 60 + $offset_minutes * 60; 261 | $offset_seconds = $offset_seconds * $offset_multiplier; 262 | return $uncorrected_epoch_time - $offset_seconds; 263 | } 264 | 265 | # 266 | # TIP: This script doesn't make use the following function, get_config_value(), 267 | # but I'm including it to illustrate what you would want to do instead of 268 | # calling the 'grep' shell command 269 | # 270 | # Return the value of a particular variable in a configuration file 271 | # 272 | sub get_config_value { 273 | 274 | my $filename = shift; 275 | my $variable = shift; 276 | my $delimiter = shift || '='; 277 | my $value; 278 | 279 | # Throw error if required input not present 280 | die "Missing input!\n" if !$filename or !$variable; 281 | 282 | my $fh; 283 | my @matching_lines; 284 | 285 | open $fh, $filename or die "Could not open $filename: $!"; 286 | @matching_lines = grep /^$variable\s?$delimiter/, <$fh>; 287 | 288 | # Get rid of trailing newline 289 | chomp @matching_lines; 290 | 291 | # Throw error if no matches or multiple matches 292 | if (!scalar @matching_lines) { 293 | die "No entry for '$variable' found in $filename\n"; 294 | } 295 | 296 | # Scalar context of array equal to number of entries 297 | elsif (scalar @matching_lines > 1) { 298 | die "More than one entry for '$variable' in $filename\n" 299 | } 300 | 301 | # Grab value, trimming any whitespace 302 | $value = (split($delimiter, $matching_lines[0]))[1]; 303 | $value =~ s/^\s+|\s+$//g; 304 | 305 | return $value; 306 | } 307 | 308 | # 309 | # Get array of log entries going back to a particular date 310 | # 311 | sub get_entries_from_log { 312 | my $filename = shift; 313 | my $entry_start_pattern = shift; 314 | my $days = shift || 1; 315 | my $filter_pattern = shift || '.*'; 316 | my $bytes = shift || 200; # For maximum efficiency, should be a little 317 | # greater than the average log entry length 318 | 319 | # Input for event types should be either of the following 320 | # - a string, which should be a regex pattern to watch for 321 | # - an array containing event labels and respective regex patterns to 322 | # identify the event: 323 | # (label1, pattern1, [label2, pattern2], ...) 324 | my $last_seen_events_input = \@_; 325 | 326 | # Watched events are repackaged into an array of hashes, each hash with the 327 | # following structure: 328 | # { $label => { 'pattern' => $pattern, 'time' => $time } } 329 | my @last_seen_events; 330 | 331 | # Always make the filter match the first event type to watch for 332 | push(@last_seen_events, { 333 | 'filter match' => { 334 | 'pattern' => $filter_pattern, 335 | 'time' => undef 336 | } 337 | }); 338 | 339 | # If watching for entries matching other patterns, add them to the array 340 | if ($last_seen_events_input) { 341 | 342 | # If just one element, it means input was just a pattern, so add it 343 | # with generic label 344 | if (scalar @$last_seen_events_input < 2) { 345 | push(@last_seen_events, { 346 | 'watched event' => { 347 | 'pattern' => @$last_seen_events_input[0], 348 | 'time' => undef 349 | } 350 | }); 351 | } 352 | else { 353 | # Valid input has an even number of elements (label, pattern) 354 | if (scalar @$last_seen_events_input % 2) { 355 | die "Invalid 'last seen' input!\n"; 356 | } 357 | # Convert event input into array of name/value pairs 358 | my %h = @$last_seen_events_input; 359 | 360 | # Load each event into array 361 | keys %h; 362 | while(my($label, $pattern) = each %h) { 363 | push(@last_seen_events, { 364 | $label => { 365 | 'pattern' => $pattern, 366 | 'time' => undef 367 | } 368 | }); 369 | } 370 | } 371 | } 372 | 373 | # Throw error if required input not present 374 | if (!$filename || !$entry_start_pattern) { 375 | die "Missing input!\n"; 376 | } 377 | 378 | # 379 | # Prepare variables 380 | # 381 | 382 | my $bytes_read; 383 | 384 | # Set how often the date should be parsed out and converted to epoch time 385 | # in order to determine if we've read back to the cutoff date yet. Doing 386 | # this periodically speeds up read times 387 | # my $check_date_every_x_entries = 1; 388 | # my $check_date_every_x_entries = 10; 389 | my $check_date_every_x_entries = 100; 390 | # my $check_date_every_x_entries = 1000; 391 | # my $check_date_every_x_entries = 10000; 392 | 393 | my @entries; 394 | # my $entries_count = 0; 395 | 396 | my $fh; 397 | my $final_seek = 0; 398 | my $lower_datecheck_offset = 0; 399 | my $pointer_position_after_seek; 400 | 401 | # # Track entry offset to facilitate doing things every X iterations 402 | # my $seek_offset = 0; 403 | 404 | my $start_epoch_time; 405 | my $tail_of_chopped_entry = ''; 406 | my $this_chunk = ''; 407 | my $timestamp_pattern; 408 | 409 | my $todays_epoch_time = time; 410 | if ($days) { 411 | $start_epoch_time = $todays_epoch_time - $days * 24 * 60 * 60; 412 | } 413 | 414 | # Ensure sign makes sense. Should always be negative when tailing so as to 415 | # count backwards from the end of the file. 416 | $bytes = -$bytes if ($bytes > 0); 417 | 418 | # If input variable for line pattern regex contains the '^' start-of-line 419 | # character, remove it 420 | $entry_start_pattern =~ s/^\^//; 421 | 422 | # Extract start date pattern from timestamp pattern 423 | $timestamp_pattern = $entry_start_pattern; 424 | $timestamp_pattern =~ s/^.*(\(.*\)).*$/$1/ or die "Couldn't extract date", 425 | "from pattern. Timestamp should be enclosed in parentheses.\n"; 426 | 427 | # Open log file 428 | open $fh, $filename or die "Could not open $filename: $!\n"; 429 | 430 | # 431 | # The following seek will read in chunks of the file, reading backwards 432 | # from the end of the file. Each chunk will then be analyzed to see 433 | # if it contains any entries, which will be added to an array of entries 434 | # as they are found. The elapsed time between entries will also be tracked 435 | # in a separate array 436 | # 437 | 438 | # 439 | # Explanation of 'seek' function, as it is not self-evident what it does 440 | # 441 | # Seek sets the filehandle's position. You can optionally seek from EOF by 442 | # using WHENCE = 2 (SEEK_END) to tail a file. 443 | # 444 | # seek FILEHANDLE,POSITION,WHENCE 445 | # Values for WHENCE: 446 | # 0 - SEEK_SET - new position in bytes 447 | # 1 - SEEK_CUR - current position + POSITION 448 | # 2 - SEEK_END - EOF + POSITION (negative position backtracks from 449 | # end of file) 450 | seek $fh, $bytes, 2; 451 | { 452 | my $buffer; 453 | my $characters_parsed; 454 | my $leftmost_match_offset; 455 | my @offsets; 456 | my $this_chunk_length; 457 | 458 | # When reading the file for the first time, check to see if the 459 | # length of the file is less than the size of the chunk we are reading 460 | # on each iteration. If so, indicate that this is the final seek 461 | unless ($pointer_position_after_seek) { 462 | $pointer_position_after_seek = tell($fh) - $bytes; 463 | } 464 | if ($pointer_position_after_seek <= 2 * -$bytes) { 465 | $final_seek = 1; 466 | } 467 | 468 | #debug('pointer position after seek', $pointer_position_after_seek); 469 | 470 | #debug('before read bytes', $bytes); 471 | # Read chunk of data of size $bytes 472 | $bytes = -(read $fh, $buffer, -$bytes); 473 | # $bytes_read = read $fh, $buffer, -$bytes; 474 | 475 | # # If the amount of data read is different than requested, we've reached 476 | # # the top of the file, so this will be the final seek 477 | # if ($bytes_read < -$bytes) { 478 | # $bytes = -$bytes_read; 479 | # $final_seek = 1; 480 | # } 481 | 482 | #debug('after read bytes', $bytes); 483 | # read $fh, $buffer, -$bytes; 484 | 485 | #debug('buffer', $buffer); 486 | # This shouldn't happen, but no need to continue if nothing to read 487 | last unless $bytes; 488 | 489 | # Look at data read into buffer plus remainder from previous loop 490 | # $this_chunk = $buffer . $this_chunk . $tail_of_chopped_entry; 491 | $this_chunk = $buffer . $this_chunk . $tail_of_chopped_entry; 492 | 493 | # # Prepend a newline if this is the top of the file so that regex 494 | # # can identify it as a log entry (matching on '\n') 495 | # $this_chunk = "\n" . $this_chunk if $final_seek; 496 | 497 | # Artificially prepend a newline to handle the special case that this is the 498 | # very first line in the file, which would be excluded without the newline. Note 499 | # that this newline must be removed later on from the string that is passed to 500 | # the next interation 501 | $this_chunk = "\n" . $this_chunk; 502 | 503 | # Find offsets for all entries contained within this chunk. These 504 | # offsets will be used later to extract out any entries found in this 505 | # chunk 506 | while ($this_chunk =~ /\n($entry_start_pattern)/g) { 507 | push @offsets, $-[0]; 508 | } 509 | 510 | #debug('offsets:', @offsets); 511 | # NOTE: it would be nice to just write a function to extract the 512 | # entries and return the leftover part, but we also need to check the 513 | # timestamps to know when we've looked far enough back in the file 514 | 515 | # If matches found, save current buffer for next loop 516 | if (scalar @offsets) { 517 | 518 | # NOTE: The following foreach() function is a loop within a loop. 519 | # However, it is efficient because we are only looping through a 520 | # handful of entries at most and there is no easy way to just 521 | # magically suck the matching entries out of the chunk we are 522 | # inspecting and also filter unmatching results and analyze the 523 | # timestamp 524 | 525 | # While iterating in reverse order, extract entries from this chunk, 526 | # saving matching entries into array and performing other 527 | # calculations 528 | $this_chunk_length = length($this_chunk); 529 | $characters_parsed = 0; 530 | ENTRIES_IN_CHUNK: 531 | foreach my $offset (reverse @offsets) { 532 | 533 | # Length of part of chunk we're looking at now 534 | my $newline_plus_entry_length = 535 | $this_chunk_length - $offset - $characters_parsed; 536 | 537 | # Get part that represents a log entry (minus leading newline) 538 | my $entry = substr($this_chunk, 539 | $offset + 1, 540 | $newline_plus_entry_length - 1); 541 | 542 | # Increment the variable holding the number of characters 543 | # already looked at for use in next iteration 544 | $characters_parsed = $characters_parsed + 545 | $newline_plus_entry_length; 546 | 547 | # The very last entry in the log will have a hanging newline 548 | # which should be removed, so chomp last extracted entry in case 549 | # it also happens to be the very last entry in the file 550 | chomp($entry) unless scalar @entries; 551 | 552 | # Remember offset of start of first entry for later use 553 | $leftmost_match_offset = $offset; 554 | 555 | # Elements in result array will consist of both the pattern 556 | # filtering on as well as the "last seen" patterns we're 557 | # watching. 558 | 559 | # These last seen patterns are needed to be able to calculate 560 | # elapsed times between occurrances and will be removed later 561 | LAST_SEEN_EVENTS: 562 | foreach my $last_seen_event (@last_seen_events) { 563 | my $label = (keys %{$last_seen_event})[0]; 564 | 565 | #debug('checking...'); 566 | #debug($entry, $last_seen_event->{$label}->{'pattern'}); 567 | # If any patterns match, add that entry 568 | if ($entry =~ /$last_seen_event->{$label}->{'pattern'}/) { 569 | #debug('adding entry!'); 570 | chomp $entry; 571 | # $entries_count = push(@entries, $entry); 572 | push(@entries, $entry); 573 | last LAST_SEEN_EVENTS; 574 | } 575 | } 576 | } # END ENTRIES_IN_CHUNK foreach 577 | 578 | # Save leftover part of chopped entry so that it can be appended to 579 | # the chunk in the next loop. Note that the extra leading newline 580 | # that was artificially added must be stripped off 581 | $tail_of_chopped_entry = substr(substr($this_chunk, 1), 582 | 0, 583 | $leftmost_match_offset); 584 | 585 | # If any entries found in chunk, empty it for the next iteration 586 | $this_chunk = ''; 587 | } 588 | else { 589 | # If nothing matched, no remaining piece of log needs to be saved 590 | $tail_of_chopped_entry = ''; 591 | } 592 | 593 | # # Keep track of which seek we're on so we can check certain things 594 | # # every X seeks 595 | # $seek_offset = $seek_offset + 1; 596 | 597 | # If we're only going back to a certain date, then every so often check 598 | # if we've reached the cutoff date. If so, prune entries past cutoff 599 | # date and break out of the seek loop 600 | # if ($start_epoch_time and $entries_count) { 601 | if ($start_epoch_time and scalar @entries) { 602 | 603 | # Check if we've reached cuttof date only periodically to avoid 604 | # doing too many expensive regex and timestamp-to-epoch time 605 | # conversions. Also, be sure to prune if this is the last seek loop 606 | # so as not to forget to prune after the very last iteration 607 | if (!(scalar @entries % $check_date_every_x_entries) or 608 | $final_seek) { 609 | 610 | # Pass entries array by reference so that it can be trimmed if 611 | # any old entries found. Also need to break out of seek loop if 612 | # any old entries are found since that means we're done 613 | last if prune_old_entries(\@entries, 614 | $lower_datecheck_offset, 615 | $start_epoch_time, 616 | $entry_start_pattern); 617 | #debug('entries:', @entries); 618 | # Remember the offset we should start on for the next date check 619 | $lower_datecheck_offset = scalar @entries; 620 | } 621 | } 622 | 623 | # Set pointer position after seek 624 | $pointer_position_after_seek = tell($fh); 625 | 626 | # Seek upwards to grab next chunk 627 | seek $fh, $bytes * 2, 1; 628 | 629 | # Continue looping until we've reached the top of the file. Top of file 630 | # is detected when the pointer position is the same as the number of 631 | # bytes to read per seek 632 | redo unless $final_seek; 633 | } 634 | 635 | # Reverse entries found in this chunk as values were read pushed into 636 | # arrays in reverse order 637 | @entries = reverse @entries; 638 | debug(@entries);exit; 639 | return filter_entries(\@entries, \@last_seen_events, $entry_start_pattern); 640 | } 641 | 642 | sub filter_entries { 643 | 644 | my $entries = shift; 645 | my $events = shift; 646 | my $entry_start_pattern = shift; 647 | 648 | my @filtered_entries; 649 | my @last_seen_events; 650 | 651 | foreach my $entry (@{ $entries }) { 652 | foreach my $event (@{ $events }) { 653 | my $label = (keys %{$event})[0]; 654 | my $this_epoch_time; 655 | my $this_timestamp; 656 | if ($entry =~ /$event->{$label}->{'pattern'}/) { 657 | if ($label eq 'filter match') { 658 | push(@filtered_entries, { 659 | 'entry' => $entry, 660 | 'last_seen_events' => \@last_seen_events 661 | }); 662 | @last_seen_events = (); 663 | } 664 | else { 665 | if ($entry =~ /^$entry_start_pattern/) { 666 | $this_timestamp = $1; 667 | $this_epoch_time = 668 | epoch_time_from_timestamp($this_timestamp); 669 | } 670 | if ($this_epoch_time) { 671 | push(@last_seen_events, { 672 | $label => $this_epoch_time 673 | }); 674 | } 675 | } 676 | } 677 | } 678 | } 679 | return @filtered_entries; 680 | } 681 | 682 | # TODO: it might be more efficient to just check the first entry, then the 683 | # middle entry, then halfway from middle to end (or middle to beginning), etc. 684 | # instead of looking at the whole array. However, the logic is too complex and 685 | # hurts my brain. 686 | sub prune_old_entries { 687 | my $reversed_entries = shift; 688 | my $start_offset = shift; 689 | my $start_epoch_time = shift; 690 | my $pattern = shift; 691 | 692 | my $i = scalar @{ $reversed_entries } - 1; 693 | my $old_entries_found = 0; 694 | 695 | for ($i = scalar @{ $reversed_entries } - 1; $i >= $start_offset; $i--) { 696 | # Other variables specific to foreach loop 697 | my $this_timestamp; 698 | my $this_epoch_time; 699 | 700 | # Extract timestamp from entry 701 | if (${ $reversed_entries }[$i] =~ /^$pattern/) { 702 | $this_timestamp = $1; 703 | } 704 | 705 | # TODO: Decide whether it is worth writing a custom 706 | # function that converts a timestamp to epoch time 707 | #$this_epoch_time = 708 | # Date::Parse::str2time($this_timestamp); 709 | $this_epoch_time = epoch_time_from_timestamp($this_timestamp); 710 | 711 | # USING DELETE 712 | # if ($this_epoch_time < $start_epoch_time) { 713 | # delete @{ $reversed_entries }[$i]; 714 | # $old_entries_found = 1; 715 | # } 716 | # else { 717 | # last; 718 | # } 719 | # } 720 | # return $old_entries_found; 721 | 722 | # USING SPLICE 723 | #debug('start_epoch_time', $start_epoch_time); 724 | #debug('this_epoch_time', $this_epoch_time); 725 | # Stop adding entries if we've reached cutoff date 726 | if ($this_epoch_time >= $start_epoch_time) { 727 | if (${ $reversed_entries }[$i + 1]) { 728 | #debug('array size:', scalar @{ $reversed_entries }); 729 | #debug("\$i = $i"); 730 | #debug('# pruned:', scalar @{ $reversed_entries } - ($i + 1)); 731 | splice(@{ $reversed_entries }, $i + 1, 732 | scalar @{ $reversed_entries } - ($i + 1)); 733 | #debug(@{ $reversed_entries }); 734 | return 1; 735 | } 736 | #debug('array size:', scalar @{ $reversed_entries }); 737 | #debug("\$i = $i"); 738 | #debug('all ok', $i); 739 | #debug(@{ $reversed_entries }); 740 | return 0; 741 | } 742 | } 743 | 744 | # If we haven't returned yet, it means all entries are too old 745 | #debug('start_epoch_time', $start_epoch_time); 746 | #debug('this_epoch_time', $this_epoch_time); 747 | #debug('array size:', scalar @{ $reversed_entries }); 748 | #debug("\$i = $i"); 749 | #debug('all pruned'); 750 | splice(@{ $reversed_entries }, $i + 1, 751 | scalar @{ $reversed_entries } - ($i + 1)); 752 | #debug(@{ $reversed_entries }); 753 | return 1; 754 | } 755 | 756 | sub populate_last_seen_times { 757 | my $log_entries = shift; 758 | my $last_seen_events = shift; 759 | 760 | my @entries = @{$log_entries}; 761 | my @events = @{$last_seen_events}; 762 | 763 | return unless scalar @entries and scalar @events; 764 | 765 | my $last_seen_hash = {}; 766 | 767 | foreach my $event (@events) { 768 | my $label = (keys %{$event})[0]; 769 | $last_seen_hash->{$label} = $event->{$label}->{'time'}; 770 | } 771 | 772 | for (my $i = scalar @entries; $i--; ) { 773 | last if ($entries[$i]->{'last_seen'}); 774 | $entries[$i]->{'last_seen'} = $last_seen_hash; 775 | } 776 | } 777 | 778 | # 779 | # TIP: It is almost always better to store raw values until they are displayed 780 | # to the user. For example, the time elapsed between logs should be stored 781 | # as an integer value representing the number of seconds between entries. 782 | # When you need to render the time to a human, then format it using 783 | # a subroutine such as the following 784 | # 785 | sub seconds_to_human_readable_time { 786 | my $secs = shift; 787 | if ($secs >= 365 * 24 * 60 * 60) { 788 | return sprintf '%.1fy', $secs / (365 * 24 * 60 * 60); 789 | } 790 | elsif ($secs >= 24 * 60 * 60) { 791 | return sprintf '%.1fd', $secs / (24 * 60 * 60); 792 | } 793 | elsif ($secs >= 60 * 60) { 794 | return sprintf '%.1fh', $secs / (60 * 60); 795 | } 796 | elsif ($secs >= 60) { 797 | return sprintf '%.1fm', $secs / 60; 798 | } 799 | else { 800 | return sprintf '%.1fs', $secs; 801 | } 802 | } 803 | 804 | sub debug { 805 | print Dumper(@_) if $DEBUG; 806 | } 807 | -------------------------------------------------------------------------------- /download_file_from_cpanel_user_homedir: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | use strict; 4 | use warnings; 5 | 6 | eval { local $SIG{__DIE__}; local $SIG{__WARN__}; require Term::ReadKey; }; 7 | 8 | our $VERSION = '1.1'; 9 | 10 | print "cPanel File Downloader v$VERSION.\n"; 11 | print "Please place the file in ~USER/ on the server\n"; 12 | print "Example: Put test.txt in /home/cptkt3806761_nick/test.txt\n"; 13 | print "** Make sure that cptkt3806761_nick can read the file as no error will be given **\n"; 14 | print "\n"; 15 | 16 | my $host; 17 | while ( !length $host || $host !~ m/\./ ) { 18 | print "Enter hostname or ip of server? "; 19 | $host = <>; 20 | chomp($host); 21 | } 22 | 23 | my $user; 24 | while ( !length $user ) { 25 | print "Enter username? "; 26 | $user = <>; 27 | chomp($user); 28 | } 29 | 30 | my $pass; 31 | while ( !length $pass ) { 32 | print "Enter password? "; 33 | Term::ReadKey::ReadMode('noecho') if $INC{'Term/ReadKey.pm'}; 34 | $pass = <>; 35 | chomp($pass); 36 | Term::ReadKey::ReadMode('normal') if $INC{'Term/ReadKey.pm'}; 37 | print "\n"; 38 | } 39 | 40 | my $file; 41 | while ( !length $file ) { 42 | print "Enter filename? "; 43 | $file = <>; 44 | chomp($file); 45 | } 46 | 47 | my @cmd = ( 'curl', '-k', '-u', "$user:XXXXXXX", '-o', $file, "https://$host:2083/download/$file" ); 48 | 49 | print "Running @cmd\n"; 50 | 51 | system( 'curl', '-k', '-u', "$user:$pass", '-o', $file, "https://$host:2083/download/$file" ); 52 | -------------------------------------------------------------------------------- /ea-precheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | PHPINI='/usr/local/lib/php.ini' 4 | HTTPDCONF='/usr/local/apache/conf/httpd.conf' 5 | 6 | clear 7 | 8 | echo ------------------------- 9 | echo 'Apache version' 10 | echo ------------------------- 11 | /usr/local/apache/bin/httpd -v 12 | echo ; echo 13 | echo ------------------------- 14 | echo 'PHP version' 15 | echo ------------------------- 16 | php -v 17 | echo ; echo 18 | echo ------------------------- 19 | echo 'PHP configuration' 20 | echo ------------------------- 21 | /usr/local/cpanel/bin/rebuild_phpconf --current 22 | echo ; echo 23 | echo ------------------------- 24 | echo 'Apache modules' 25 | echo ------------------------- 26 | /usr/local/apache/bin/httpd -l 27 | echo ; echo 28 | echo ------------------------- 29 | echo 'PHP modules' 30 | echo ------------------------- 31 | php -m 32 | echo ; echo 33 | 34 | 35 | echo ------------------------- 36 | echo Backing up $PHPINI 37 | echo ------------------------- 38 | if [ -e $PHPINI ] ; then 39 | cp -p $PHPINI $PHPINI.backup.cpanel.`date +%s` 40 | ls -l $PHPINI $PHPINI.backup.cpanel.* 41 | fi 42 | 43 | echo ; echo 44 | 45 | echo ------------------------- 46 | echo Backing up $HTTPDCONF 47 | echo ------------------------- 48 | if [ -e $HTTPDCONF ] ; then 49 | cp -p $HTTPDCONF $HTTPDCONF.backup.cpanel.`date +%s` 50 | ls -l $HTTPDCONF $HTTPDCONF.backup.cpanel.* 51 | fi 52 | -------------------------------------------------------------------------------- /ns_record_report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Print column headers 4 | tput bold 5 | printf "%-24s %-16s %6s\n" "Nameserver" "ResolvedIP" "Zones" 6 | tput sgr0 7 | 8 | # Find all unique NS records in all zones and loop through their names 9 | for nameserver in `grep -h '\bNS\b' /var/named/*.db |awk '{print $NF}' |sed 's/\.$//' |sort -u`; do 10 | 11 | # Do an A lookup on nameserver name 12 | resolved_ip=`dig A $nameserver. +short |xargs echo -n` 13 | if [[ -z $resolved_ip ]]; then 14 | resolved_ip='no IP found' 15 | fi 16 | 17 | # Count appearances in zones 18 | zones=`grep "\bNS.*$nameserver" /var/named/*.db |wc -l` 19 | 20 | # Print row 21 | printf "%-24s %-16s %6s\n" "$nameserver" "$resolved_ip" "$zones" 22 | done 23 | -------------------------------------------------------------------------------- /spam_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script creates a few summaries from the mail queue to help you decide if 3 | # the server is sending spam or not. 4 | # 5 | # Description: 6 | # http://staffwiki.cpanel.net/LinuxSupport/EximSpamOneLiners 7 | # for a summary of the code, the main code block is at the bottom 8 | # 9 | # To run: 10 | # curl -s --insecure https://raw.githubusercontent.com/cPanelTechs/TechScripts/master/spam_check.sh > spam_check.sh; sh spam_check.sh 11 | # 12 | #todo: check that there's some mail in the queue vs printing empty 13 | 14 | function debug() { 15 | debug="off" 16 | if [ "$debug" = "on" ]; then 17 | echo $1 18 | fi 19 | } 20 | # example: 21 | # debug "variable_name is ${variable_name}" 22 | 23 | use_current=0 24 | backup_current=0 25 | remove_current=0 26 | temp_dir=/root 27 | 28 | function get_temp_file_dir () { 29 | clear; 30 | read -p " 31 | Choose a directory to store the temporary file cptemp_eximbp. This will store the output of exim -bp (default /root): " input_dir 32 | debug "input_dir is ${input_dir}" 33 | input_dir=${input_dir:-/root} 34 | debug "input_dir is ${input_dir}" 35 | temp_dir=$(echo $input_dir | sed 's/\/$//') 36 | debug "temp_dir is ${temp_dir}" 37 | if [ -e $temp_dir ]; then 38 | if [ -e $temp_dir/cptemp_eximbp ]; then 39 | get_output_decision 40 | fi 41 | else 42 | echo "There was a problem, or that directory does not exist. Please try again." 43 | get_temp_file_dir 44 | fi 45 | 46 | echo -e "\nThank you.\nThis file can later be used again to run commands (like 'cat $temp_dir/cptemp_eximbp | exiqsumm').\nThis script will not delete this temp file upon completion." 47 | debug "temp_dir is ${temp_dir}" 48 | } 49 | 50 | # If the temp output file already exists, user must choose (this will go back to get_temp_file_dir when complete) 51 | function get_output_decision () { 52 | echo 53 | read -p "Output file ($temp_dir/cptemp_eximbp) already exists. Please enter a number 1-3 54 | 1) Run diagnosis on the existing output file 55 | 2) Move to backup ($temp_dir/cptemp_eximbp.1), and create a new output file 56 | 3) Delete the existing output file, and create a new one (default): " file_choice 57 | file_choice=${file_choice:-3} 58 | case $file_choice in 59 | 1) use_current=1; 60 | ;; 61 | 2) backup_current=1; 62 | ;; 63 | 3) remove_current=1; 64 | \rm -v $temp_dir/cptemp_eximbp 65 | ;; 66 | *) 67 | echo -e "\nPlease enter a valid choice: 1 to 3." 68 | get_output_decision 69 | ;; 70 | esac 71 | } 72 | 73 | function run_eximbp () { 74 | debug "starting run_eximbp, backup_current is ${backup_current}\n use_current is ${use_current}" 75 | if [ $use_current -eq 0 ]; then 76 | echo -e "\nNow, beginning to run the command 'exim -bp'. If this takes an excruciatingly long time, you can cancel (control-c) this script.\n You can then run this script again using the same target directory and existing 'exim -bp' output file (using option 1 of this script).\n Often, all that's needed is 30s worth of gathering the oldest messages in the queue." 77 | if [ $backup_current -eq 1 ]; then 78 | echo; mv -v $temp_dir/cptemp_eximbp $temp_dir/cptemp_eximbp.1 79 | exim -bp > $temp_dir/cptemp_eximbp 80 | debug "exim -bp >> $temp_dir/cptemp_eximbp" 81 | else 82 | exim -bp > $temp_dir/cptemp_eximbp 83 | debug "exim -bp > $temp_dir/cptemp_eximbp" 84 | fi 85 | fi 86 | } 87 | 88 | #todo: put this in a printf statement, report if domain is local/remote at the end: 89 | # Are they local? 90 | # for i in $doms; do echo -n $i": "; grep $i /etc/localdomains; done 91 | function exiqsumm_to_get_top_domains () { 92 | echo -e "\nDomains stopping up the queue:"; 93 | cat $temp_dir/cptemp_eximbp | exiqsumm | sort -n | tail -5; 94 | 95 | # Get domains from Exim queue 96 | doms=$(cat $temp_dir/cptemp_eximbp | exiqsumm | sort -n | egrep -v "\-\-\-|TOTAL|Domain" | tail -5 | awk '{print $5}') 97 | } 98 | 99 | function check_if_local () { 100 | echo -e "\nDomains from above that are local:" 101 | for onedomain in $doms; do 102 | islocal=$(grep $onedomain /etc/localdomains) 103 | ishostname=$(hostname | grep $onedomain) 104 | if [ "$islocal" -o "$ishostname" ]; then 105 | echo $onedomain; 106 | fi 107 | done 108 | } 109 | 110 | # There's an awk script in here that decodes base64 subjects 111 | function get_subjects_of_top_domains () { 112 | for onedomain_of_five in $doms; do 113 | dom=$onedomain_of_five; 114 | echo -e "\n\n Count / Subjects for domain = $onedomain_of_five:"; 115 | for email_id in `cat $temp_dir/cptemp_eximbp | grep -B1 $dom | awk '{print $3}'`; do 116 | exim -Mvh $email_id | grep Subject; 117 | done | sort | uniq -c | sort -n | tail; 118 | done | awk '{ 119 | split($4,encdata,"?"); 120 | command = (" base64 -d -i;echo"); 121 | if ($0~/(UTF|utf)-8\?(B|b)/) { 122 | printf " "$1" "$2" "$3" "; 123 | print encdata[4] | command; 124 | close(command); 125 | } 126 | else {print} 127 | } 128 | END {printf "\n"}' 129 | } 130 | 131 | # Domains sending: 132 | function find_addresses_sending_out () { 133 | declare -a sendingaddys=($(egrep "<" $temp_dir/cptemp_eximbp | awk '{print $4}' | sort | uniq -c | sort -n | sed 's/<>/bounce_email/g' | tail -4)); 134 | echo -e "\nAddresses sending out: " ${sendingaddys[@]} "\n"| sed 's/ \([0-9]*\) /\n\1 /g' 135 | bigsender=$(echo ${sendingaddys[@]} | awk '{print $NF}'); 136 | echo -e "So the big sender is:\n"$bigsender 137 | } 138 | 139 | function find_addresses_sending_to_top_domains () { 140 | echo; 141 | for onedomain_of_five in $doms; do 142 | echo "Mails attempting to be sent to domain [$onedomain_of_five], from:"; 143 | cat $temp_dir/cptemp_eximbp | grep -B1 $onedomain_of_five | egrep -v "\-\-|$onedomain_of_five" | awk '{print $4}' | sort | uniq -c | sort -n | tail -5; 144 | echo; 145 | done 146 | } 147 | 148 | # Run all functions 149 | get_temp_file_dir 150 | run_eximbp 151 | exiqsumm_to_get_top_domains 152 | check_if_local 153 | get_subjects_of_top_domains 154 | find_addresses_sending_out 155 | find_addresses_sending_to_top_domains 156 | -------------------------------------------------------------------------------- /sslhunter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ "$#" == "0" ] 4 | then 5 | echo "This program hunts for the matching cert/key in an SSL pair." 6 | echo "Usage: sslhunter.sh [file.key|file.cert] (search_path1 search_path2 ... search_pathN)" 7 | exit 1 8 | fi 9 | 10 | if [ -f "$1" ] 11 | then 12 | file="$1" 13 | shift 14 | fi 15 | 16 | 17 | getfiletype () { 18 | if fgrep -q "BEGIN RSA PRIVATE KEY" "$1" 19 | then 20 | filetype=rsa 21 | return 0 22 | elif fgrep -q "BEGIN CERTIFICATE" "$1" 23 | then 24 | filetype=x509 25 | return 0 26 | fi 27 | filetype=other 28 | return 1 29 | } 30 | 31 | getmodulus(){ 32 | echo "Scanning $filetype $2" 33 | eval `openssl $1 -noout -modulus -in "$2"` 34 | } 35 | 36 | getfiletype "$file" 37 | 38 | getmodulus "$filetype" "$file" 39 | 40 | targetmodulus=$Modulus 41 | 42 | IFS=$'\012' 43 | 44 | while [ "$1" ] 45 | do 46 | 47 | if [ -d "$1" ] 48 | then 49 | searchdirs="$1 50 | $searchdirs" 51 | else 52 | echo "$1 is not a valid directory... Skipping." 53 | fi 54 | shift 55 | 56 | done 57 | 58 | if [ ! "$searchdirs" ] 59 | then 60 | searchdirs="/etc/ssl 61 | /var/cpanel/ssl 62 | /home/*/ssl" 63 | fi 64 | 65 | 66 | echo " 67 | Searching for matching modulus in the following directories: 68 | $searchdirs 69 | " 70 | 71 | files=`find -L $searchdirs -type f -print` 72 | 73 | for testfile in $files 74 | do 75 | getfiletype "$testfile" 76 | if [ $filetype != "other" ] 77 | then 78 | getmodulus $filetype "$testfile" 79 | 80 | if [ "$Modulus" = "$targetmodulus" ] 81 | then 82 | echo " Matches!" 83 | matches="$matches 84 | $testfile type $filetype" 85 | fi 86 | fi 87 | done 88 | 89 | echo " 90 | 91 | " 92 | 93 | if [ "$matches" ] 94 | then 95 | echo "These files have the same modulus as $file: $matches 96 | " 97 | else 98 | echo "No matching files found in your search path(s)." 99 | fi 100 | --------------------------------------------------------------------------------