├── Capture.JPG ├── README.md ├── allowed-methods.py └── requirements.txt /Capture.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smackerdodi/allowed-methods/0fd3c29d5e4557c3c06bd30c8252953b67f71562/Capture.JPG -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # allowed-methods 2 | get the http allowed methods for URLs list 3 | # Description 4 | take a list of urls and get the HTTP allowed methods ( GET-POST-PUT-PATCH- ... etc) and it is so fast because it is multi threaded and the output is coloured for easy reading 5 | # Install 6 | 1- git clone https://github.com/smackerdodi/allowed-methods.git 7 | 8 | 2- cd allowed-methods 9 | 10 | 3- pip3 install -r requirements.txt 11 | 12 | # Usage 13 | 14 | python3 allowed-methods.py url.txt output.txt 15 | 16 | url.txt : file contain url list to enumerate 17 | 18 | output.txt : file contain url and its allowed methods 19 | 20 | # Screenshot 21 | 22 | ![scrrenshot](https://github.com/smackerdodi/allowed-methods/blob/main/Capture.JPG) 23 | -------------------------------------------------------------------------------- /allowed-methods.py: -------------------------------------------------------------------------------- 1 | import concurrent.futures 2 | import requests 3 | import threading 4 | import sys 5 | import time 6 | import urllib3 7 | from colorama import Fore, Style 8 | urllib3.disable_warnings() 9 | inputfile=sys.argv[1] 10 | outputfile=sys.argv[2] 11 | output=open(outputfile, "a") 12 | with open(inputfile, "r") as f: 13 | inputurl = [line.rstrip() for line in f] 14 | threadLocal = threading.local() 15 | count = len(inputurl) 16 | print("number of urls = " + str(count)) 17 | def get_session(): 18 | if not hasattr(threadLocal, "session"): 19 | threadLocal.session = requests.Session() 20 | return threadLocal.session 21 | def check_allowed_methods(url): 22 | try : 23 | session=get_session() 24 | res=session.options(url, timeout=1, allow_redirects=False) 25 | if res.headers: 26 | methods=url + " : " + res.headers['allow'] 27 | print(Style.BRIGHT + Fore.WHITE + (url)+ " : " + Fore.YELLOW + (res.headers['allow'])) 28 | output.write(methods +"\n") 29 | else : 30 | print(Style.BRIGHT + Fore.WHITE + (url)+ " : " + Fore.RED + str(res.status_code)) 31 | except: 32 | pass 33 | def itterate_url(inputurl): 34 | url=inputurl 35 | check_allowed_methods(url) 36 | 37 | if __name__ == "__main__": 38 | start_time = time.time() 39 | with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: 40 | executor.map(itterate_url, inputurl) 41 | duration = time.time() - start_time 42 | print("finished in : " + str(duration) + " sec") 43 | 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | urllib3 2 | concurrent 3 | requests 4 | sys 5 | time 6 | colorama --------------------------------------------------------------------------------