├── .dockerignore ├── .gitignore ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── docker-compose.debug.yml ├── docker-compose.yml ├── install.sh └── src └── main.rs /.dockerignore: -------------------------------------------------------------------------------- 1 | **/.classpath 2 | **/.dockerignore 3 | **/.env 4 | **/.git 5 | **/.gitignore 6 | **/.project 7 | **/.settings 8 | **/.toolstarget 9 | **/.vs 10 | **/.vscode 11 | **/*.*proj.user 12 | **/*.dbmdl 13 | **/*.jfm 14 | **/bin 15 | **/charts 16 | **/docker-compose* 17 | **/compose* 18 | **/Dockerfile* 19 | **/node_modules 20 | **/npm-debug.log 21 | **/obj 22 | **/secrets.dev.yaml 23 | **/values.dev.yaml 24 | LICENSE 25 | README.md 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hrekt" 3 | version = "0.1.6" 4 | edition = "2021" 5 | authors = ["zoid", ""] 6 | description = "A really fast http prober." 7 | license = "MIT" 8 | repository = "https://github.com/ethicalhackingplayground/hrekt" 9 | 10 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 11 | 12 | [dependencies] 13 | governor = "0.5.1" 14 | colored = { version = "2.0.0" } 15 | futures = "0.3.26" 16 | futures-util = "0.3.26" 17 | openssl = { version = "0.10.45", features = ["vendored"] } 18 | tokio = { version = "1.28.2", features = ["full"] } 19 | spmc = "0.3.0" 20 | clap = "4.3.3" 21 | regex = "1.7.3" 22 | tokio-dns-unofficial = "0.4.0" 23 | async-std = "1.12.0" 24 | port-selector = "0.1.6" 25 | reqwest = { version = "0.11.14", features = ["native-tls", "blocking"] } 26 | wappalyzer = { git = "https://github.com/ethicalhackingplayground/wappalyzer", rev = "a2c1a8fc" } 27 | headless_chrome = {git = "https://github.com/ethicalhackingplayground/rust-headless-chrome", rev = "3c7488b5", features = ["fetch"]} 28 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:latest 2 | 3 | WORKDIR /usr/src/myapp 4 | COPY . . 5 | 6 | RUN cargo install --path . 7 | 8 | CMD ["hrekt"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 zoidsec 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

hrekt 2 |
3 |

4 | 5 |

A really fast http prober.

6 | 7 |

8 | 9 | 10 | 11 | 12 | 13 |
14 |

15 | 16 | --- 17 | 18 |

19 | Install • 20 | Usage • 21 | Examples • 22 | FYI • 23 | Contributing • 24 | License • 25 | Join Discord 26 |

27 | 28 | --- 29 | 30 | ## Installation 31 | 32 | #### Fresh Installs 33 | ```bash 34 | cargo build -r 35 | mv target/release/hrekt /bin/hrekt 36 | ``` 37 | 38 | #### Already been installed 39 | ```bash 40 | cargo build -r 41 | mv target/release/hrekt //.cargo/bin/ 42 | ``` 43 | 44 | 45 | Make sure to replace `` with your username. 46 | 47 | or 48 | 49 | #### Installer 50 | ```bash 51 | chmod +x install.sh ; ./install.sh 52 | ``` 53 | 54 | ## For Windows Users 55 | 56 | If you want to install hrekt on Windows, you are required to install `perl` & `rust` 57 | 58 | #### Dependencies 59 | - Perl: [https://strawberryperl.com/](https://strawberryperl.com/) 60 | - Rust: [https://www.rust-lang.org/learn/get-started](https://www.rust-lang.org/learn/get-started)] 61 | 62 | Then you should be able to run 63 | 64 | ```bash 65 | cargo install --path . 66 | ``` 67 | 68 | Which will than build and compile the binary to `target/release/hrekt.exe` 69 | 70 | Can only be compiled locally right now. 71 | 72 | 73 | ## Usage 74 | 75 | ```bash 76 | Usage: hrekt [OPTIONS] 77 | 78 | Options: 79 | -r, --rate 80 | Maximum in-flight requests per second 81 | 82 | [default: 1000] 83 | 84 | -c, --concurrency 85 | The amount of concurrent requests 86 | 87 | [default: 100] 88 | 89 | -t, --timeout 90 | The delay between each request 91 | 92 | [default: 3] 93 | 94 | -w, --workers 95 | The amount of workers 96 | 97 | [default: 1] 98 | 99 | -p, --ports 100 | the ports to probe default ports are (80,443) 101 | 102 | [default: 80,443] 103 | 104 | --title 105 | display the page titles 106 | 107 | --tech-detect 108 | display the technology used 109 | 110 | --status-code 111 | display the status-codes 112 | 113 | --server 114 | displays the server 115 | 116 | --content-type 117 | displays the content type 118 | 119 | --content-length 120 | displays the content length 121 | 122 | -x, --path 123 | probe the specified path 124 | 125 | [default: ] 126 | 127 | --body-regex 128 | regex to be used to match a specific pattern in the response 129 | 130 | [default: ] 131 | 132 | --header-regex 133 | regex to be used to match a specific pattern in the header 134 | 135 | [default: ] 136 | 137 | -l, --follow-redirects 138 | follow http redirects 139 | 140 | -q, --silent 141 | suppress output 142 | 143 | -h, --help 144 | Print help (see a summary with '-h') 145 | 146 | -V, --version 147 | Print version 148 | ``` 149 | 150 | --- 151 | 152 | 153 | ## Demonstration 154 | 155 | [![asciicast](https://asciinema.org/a/XekxthZdgxO5luq7wTDvOxamq.svg)](https://asciinema.org/a/XekxthZdgxO5luq7wTDvOxamq) 156 | 157 | --- 158 | 159 | ## Examples 160 | 161 | #### Display titles 162 | 163 | ```bash 164 | cat subs.txt | hrekt --title 165 | ``` 166 | 167 | #### Probe ports 168 | 169 | ```bash 170 | cat subs.txt | hrekt --ports 443,80,9200 171 | ``` 172 | 173 | #### Display technologies 174 | 175 | ```bash 176 | cat subs.txt | hrekt --tech-detect 177 | ``` 178 | 179 | #### Probe the response body 180 | 181 | ```bash 182 | cat subs.txt | hrekt --body-regex 'href="\/content\/dam.*' 183 | ``` 184 | 185 | #### Probe the headers 186 | 187 | ```bash 188 | cat subs.txt | hrekt --header-regex 'Server:.*' 189 | ``` 190 | 191 | #### Probe the path 192 | 193 | ```bash 194 | cat subs.txt | hrekt --path /v1/api 195 | ``` 196 | 197 | #### Multiple Flags 198 | 199 | ```bash 200 | cat subs.txt | hrekt --path /etc.clientlibs --tech-detect --title --body-regex 'href="\/content\/dam.*' 201 | ``` 202 | 203 | ## FYI 204 | It's advisable to only use tech detection when needed, as it tends to result in slow discoveries because we use chromium based detection. 205 | 206 | --- 207 | 208 | If you find any cool bugs, it would be nice if I have some sorta appreciation such as shouting me out on your Twitter, buying me a coffee or donating to my Paypal. 209 | 210 | [![BuyMeACoffee](https://img.shields.io/badge/Buy%20Me%20a%20Coffee-ffdd00?style=for-the-badge&logo=buy-me-a-coffee&logoColor=black)](https://www.buymeacoffee.com/SBhULWm) [![PayPal](https://img.shields.io/badge/PayPal-00457C?style=for-the-badge&logo=paypal&logoColor=white)](https://www.paypal.com/paypalme/cyberlixpty) 211 | 212 | I hope you enjoy 213 | 214 | ## Contributing 215 | 216 | Pull requests are welcome. For major changes, please open an issue first 217 | to discuss what you would like to change. 218 | 219 | Please make sure to update tests as appropriate. 220 | 221 | 222 | ## License 223 | 224 | Hrekt is distributed under [MIT License](https://github.com/ethicalhackingplayground/hrekt/blob/main/LICENSE) 225 | -------------------------------------------------------------------------------- /docker-compose.debug.yml: -------------------------------------------------------------------------------- 1 | version: '3.4' 2 | 3 | services: 4 | rekt: 5 | image: rekt 6 | build: 7 | context: . 8 | dockerfile: ./Dockerfile 9 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.4' 2 | 3 | services: 4 | rekt: 5 | image: rekt 6 | build: 7 | context: . 8 | dockerfile: ./Dockerfile 9 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Colours 4 | RED="31" 5 | GREEN="32" 6 | BOLDGREEN="\e[1;${GREEN}m" 7 | BOLDRED="\e[1;${RED}m" 8 | ENDCOLOR="\e[0m" 9 | 10 | # Installing rust 11 | rust=$(which cargo) 12 | if [ -z "$rust" ]; then 13 | echo -e "[$BOLDRED!$ENDCOLOR] Rust is not installed" 14 | sleep 1 15 | echo -e "[$BOLDGREEN+$ENDCOLOR] Installing rust..." 16 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 17 | else 18 | echo -e "[$BOLDGREEN+$ENDCOLOR] Rust is installed" 19 | fi 20 | 21 | # install the binary 22 | echo -e "[$BOLDGREEN+$ENDCOLOR] Setting up everything as $(whoami) user..." 23 | sleep 1 24 | # Build the binary in the target/releases directory 25 | echo -e "[$BOLDGREEN+$ENDCOLOR] Compiling the binary..." 26 | cargo build -r 27 | # Copy the binary to /bin and chmod it with the appropriate permissions 28 | echo -e "[$BOLDGREEN+$ENDCOLOR] Copying the binary to /bin" 29 | sudo cp target/release/hrekt /bin/hrekt ; sudo chmod +x /bin/hrekt 30 | sleep 1 31 | echo -e "[$BOLDGREEN+$ENDCOLOR] Copying the binary to /usr/bin" 32 | sudo cp target/release/hrekt /usr/bin/hrekt ; sudo chmod +x /usr/bin/hrekt 33 | sleep 1 34 | echo -e "[$BOLDGREEN+$ENDCOLOR] Copying the binary to ~/.cargo/bin/" 35 | sudo cp target/release/hrekt ~/.cargo/bin/hrekt ; sudo chmod +x ~/.cargo/bin/hrekt 36 | sleep 1 37 | # Print end message 38 | sleep 1 39 | echo "hrekt has been successfully built." 40 | echo "Happy hacking..." 41 | 42 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use async_std::io; 2 | use async_std::io::prelude::*; 3 | use clap::{Arg, ArgAction, Command}; 4 | use colored::Colorize; 5 | use futures::{stream::FuturesUnordered, StreamExt}; 6 | use governor::{Quota, RateLimiter}; 7 | use headless_chrome::Browser; 8 | use regex; 9 | use regex::Regex; 10 | use reqwest::redirect; 11 | use std::{error::Error, time::Duration}; 12 | use tokio::{net, runtime::Builder, task}; 13 | use wappalyzer::{self}; 14 | 15 | #[derive(Clone, Debug)] 16 | pub struct Job { 17 | host: Option, 18 | body_regex: Option, 19 | header_regex: Option, 20 | ports: Option, 21 | display_title: Option, 22 | display_tech: Option, 23 | status_codes: Option, 24 | content_length: Option, 25 | content_type: Option, 26 | server: Option, 27 | path: Option, 28 | } 29 | 30 | /** 31 | * Print the ascii banner 32 | */ 33 | fn print_banner() { 34 | const BANNER: &str = r#" 35 | __ __ __ 36 | / /_ ________ / /__/ /_ 37 | / __ \/ ___/ _ \/ //_/ __/ 38 | / / / / / / __/ ,< / /_ 39 | /_/ /_/_/ \___/_/|_|\__/ 40 | 41 | v0.1.6 42 | "#; 43 | eprintln!("{}", BANNER.white()); 44 | } 45 | 46 | /** 47 | * The main entry point 48 | */ 49 | #[tokio::main] 50 | async fn main() -> Result<(), Box> { 51 | // parse the cli arguments 52 | let matches = Command::new("hrekt") 53 | .version("0.1.6") 54 | .author("Blake Jacobs ") 55 | .about("really fast http prober") 56 | .arg( 57 | Arg::new("rate") 58 | .short('r') 59 | .long("rate") 60 | .default_value("1000") 61 | .display_order(1) 62 | .help("Maximum in-flight requests per second"), 63 | ) 64 | .arg( 65 | Arg::new("concurrency") 66 | .short('c') 67 | .long("concurrency") 68 | .default_value("100") 69 | .display_order(2) 70 | .help("The amount of concurrent requests"), 71 | ) 72 | .arg( 73 | Arg::new("timeout") 74 | .short('t') 75 | .long("timeout") 76 | .default_value("3") 77 | .display_order(3) 78 | .help("The delay between each request"), 79 | ) 80 | .arg( 81 | Arg::new("workers") 82 | .short('w') 83 | .long("workers") 84 | .default_value("1") 85 | .display_order(4) 86 | .help("The amount of workers"), 87 | ) 88 | .arg( 89 | Arg::new("ports") 90 | .short('p') 91 | .long("ports") 92 | .default_value("80,443") 93 | .display_order(5) 94 | .help("the ports to probe default ports are (80,443)"), 95 | ) 96 | .arg( 97 | Arg::new("title") 98 | .long("title") 99 | .hide_short_help(true) 100 | .action(ArgAction::SetTrue) 101 | .display_order(6) 102 | .help("display the page titles"), 103 | ) 104 | .arg( 105 | Arg::new("tech-detect") 106 | .long("tech-detect") 107 | .hide_short_help(true) 108 | .action(ArgAction::SetTrue) 109 | .display_order(7) 110 | .help("display the technology used"), 111 | ) 112 | .arg( 113 | Arg::new("status-code") 114 | .long("status-code") 115 | .hide_short_help(true) 116 | .action(ArgAction::SetTrue) 117 | .display_order(8) 118 | .help("display the status-codes"), 119 | ) 120 | .arg( 121 | Arg::new("server") 122 | .long("server") 123 | .action(ArgAction::SetTrue) 124 | .display_order(9) 125 | .help("displays the server"), 126 | ) 127 | .arg( 128 | Arg::new("content-type") 129 | .long("content-type") 130 | .hide_short_help(true) 131 | .action(ArgAction::SetTrue) 132 | .display_order(10) 133 | .help("displays the content type"), 134 | ) 135 | .arg( 136 | Arg::new("content-length") 137 | .long("content-length") 138 | .hide_short_help(true) 139 | .action(ArgAction::SetTrue) 140 | .display_order(11) 141 | .help("displays the content length"), 142 | ) 143 | .arg( 144 | Arg::new("path") 145 | .long("path") 146 | .short('x') 147 | .default_value("") 148 | .display_order(12) 149 | .help("probe the specified path"), 150 | ) 151 | .arg( 152 | Arg::new("body-regex") 153 | .long("body-regex") 154 | .hide_short_help(true) 155 | .default_value("") 156 | .display_order(13) 157 | .help("regex to be used to match a specific pattern in the response"), 158 | ) 159 | .arg( 160 | Arg::new("header-regex") 161 | .long("header-regex") 162 | .hide_short_help(true) 163 | .default_value("") 164 | .display_order(14) 165 | .help("regex to be used to match a specific pattern in the header"), 166 | ) 167 | .arg( 168 | Arg::new("follow-redirects") 169 | .short('l') 170 | .long("follow-redirects") 171 | .action(ArgAction::SetTrue) 172 | .display_order(15) 173 | .help("follow http redirects"), 174 | ) 175 | .arg( 176 | Arg::new("silent") 177 | .short('q') 178 | .long("silent") 179 | .action(ArgAction::SetTrue) 180 | .display_order(16) 181 | .help("suppress output"), 182 | ) 183 | .get_matches(); 184 | 185 | let silent = matches.get_flag("silent"); 186 | if !silent { 187 | print_banner(); 188 | } 189 | 190 | let status_codes = matches.get_flag("status-code"); 191 | 192 | let rate = match matches.get_one::("rate").unwrap().parse::() { 193 | Ok(n) => n.parse::().unwrap(), 194 | Err(_) => { 195 | println!("{}", "could not parse rate, using default of 1000"); 196 | 1000 197 | } 198 | }; 199 | 200 | let body_regex = match matches 201 | .get_one::("body-regex") 202 | .unwrap() 203 | .parse::() 204 | { 205 | Ok(body_regex) => body_regex, 206 | Err(_) => "".to_string(), 207 | }; 208 | 209 | let header_regex = match matches 210 | .get_one::("header-regex") 211 | .unwrap() 212 | .parse::() 213 | { 214 | Ok(header_regex) => header_regex, 215 | Err(_) => "".to_string(), 216 | }; 217 | 218 | let ports = match matches 219 | .get_one::("ports") 220 | .unwrap() 221 | .parse::() 222 | { 223 | Ok(ports) => ports, 224 | Err(_) => "".to_string(), 225 | }; 226 | 227 | let path = match matches.get_one::("path").unwrap().parse::() { 228 | Ok(path) => path, 229 | Err(_) => "".to_string(), 230 | }; 231 | 232 | let display_title = matches.get_flag("title"); 233 | let display_tech = matches.get_flag("tech-detect"); 234 | let follow_redirects = matches.get_flag("follow-redirects"); 235 | let content_length = matches.get_flag("content-length"); 236 | let content_type = matches.get_flag("content-type"); 237 | let server = matches.get_flag("server"); 238 | 239 | let concurrency = match matches 240 | .get_one::("concurrency") 241 | .map(|s| s.to_string()) 242 | { 243 | Some(n) => match n.parse::() { 244 | Ok(n) => n, 245 | Err(_) => 100, 246 | }, 247 | None => { 248 | println!("{}", "could not parse concurrency, using default of 100"); 249 | 100 250 | } 251 | }; 252 | 253 | let timeout = match matches.get_one::("timeout").map(|s| s.to_string()) { 254 | Some(timeout) => match timeout.parse::() { 255 | Ok(timeout) => timeout, 256 | Err(_) => 3, 257 | }, 258 | None => 3, 259 | }; 260 | 261 | let w: usize = match matches.get_one::("workers").map(|s| s.to_string()) { 262 | Some(w) => match w.parse::() { 263 | Ok(w) => w, 264 | Err(_) => 1, 265 | }, 266 | None => { 267 | println!("{}", "could not parse workers, using default of 1"); 268 | 1 269 | } 270 | }; 271 | 272 | // collect hosts from stdin 273 | let mut hosts = vec![]; 274 | let stdin = io::BufReader::new(io::stdin()); 275 | let mut lines = stdin.lines(); 276 | while let Some(line) = lines.next().await { 277 | let host = match line { 278 | Ok(host) => host, 279 | Err(_) => "".to_string(), 280 | }; 281 | hosts.push(host); 282 | } 283 | 284 | // Set up a worker pool with the number of threads specified from the arguments 285 | let rt = Builder::new_multi_thread() 286 | .enable_all() 287 | .worker_threads(w) 288 | .build() 289 | .unwrap(); 290 | 291 | // job channels 292 | let (job_tx, job_rx) = spmc::channel::(); 293 | rt.spawn(async move { 294 | send_url( 295 | job_tx, 296 | hosts, 297 | body_regex, 298 | header_regex, 299 | ports, 300 | display_title, 301 | display_tech, 302 | status_codes, 303 | content_type, 304 | content_length, 305 | server, 306 | path, 307 | rate, 308 | ) 309 | .await 310 | }); 311 | 312 | // process the jobs 313 | let workers = FuturesUnordered::new(); 314 | 315 | // process the jobs for scanning. 316 | for _ in 0..concurrency { 317 | let jrx = job_rx.clone(); 318 | // initialize the new chromium browser instance 319 | let port = match port_selector::random_free_tcp_port() { 320 | Some(port) => port, 321 | None => { 322 | continue; 323 | } 324 | }; 325 | let browser = wappalyzer::new_browser(port); 326 | let browser_instance = browser.clone(); 327 | workers.push(task::spawn(async move { 328 | // run the detector 329 | run_detector(jrx, follow_redirects, browser_instance, timeout).await 330 | })); 331 | } 332 | let _: Vec<_> = workers.collect().await; 333 | rt.shutdown_background(); 334 | 335 | Ok(()) 336 | } 337 | 338 | /** 339 | * Send the urls to be processed by the workers 340 | */ 341 | async fn send_url( 342 | mut tx: spmc::Sender, 343 | hosts: Vec, 344 | body_regex: String, 345 | header_regex: String, 346 | ports: String, 347 | display_title: bool, 348 | display_tech: bool, 349 | status_codes: bool, 350 | content_type: bool, 351 | content_length: bool, 352 | server: bool, 353 | path: String, 354 | rate: u32, 355 | ) -> Result<(), Box> { 356 | //set rate limit 357 | let lim = RateLimiter::direct(Quota::per_second(std::num::NonZeroU32::new(rate).unwrap())); 358 | 359 | for host in hosts.iter() { 360 | // send the jobs 361 | lim.until_ready().await; 362 | let msg = Job { 363 | host: Some(host.to_string().clone()), 364 | body_regex: Some(body_regex.clone()), 365 | header_regex: Some(header_regex.clone()), 366 | ports: Some(ports.to_string()), 367 | display_title: Some(display_title.clone()), 368 | display_tech: Some(display_tech.clone()), 369 | path: Some(path.clone()), 370 | status_codes: Some(status_codes.clone()), 371 | content_length: Some(content_length.clone()), 372 | content_type: Some(content_type.clone()), 373 | server: Some(server.clone()), 374 | }; 375 | if let Err(err) = tx.send(msg) { 376 | eprintln!("{}", err.to_string()); 377 | } 378 | } 379 | Ok(()) 380 | } 381 | 382 | /** 383 | * Perform the HTTP probing operation. 384 | */ 385 | pub async fn run_detector( 386 | rx: spmc::Receiver, 387 | follow_redirects: bool, 388 | browser: Browser, 389 | timeout: usize, 390 | ) { 391 | let mut headers = reqwest::header::HeaderMap::new(); 392 | headers.insert( 393 | reqwest::header::USER_AGENT, 394 | reqwest::header::HeaderValue::from_static( 395 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:95.0) Gecko/20100101 Firefox/95.0", 396 | ), 397 | ); 398 | 399 | let client; 400 | if follow_redirects { 401 | //no certs 402 | client = reqwest::Client::builder() 403 | .default_headers(headers) 404 | .redirect(redirect::Policy::limited(10)) 405 | .timeout(Duration::from_secs(timeout.try_into().unwrap())) 406 | .danger_accept_invalid_hostnames(true) 407 | .danger_accept_invalid_certs(true) 408 | .build() 409 | .unwrap(); 410 | } else { 411 | //no certs 412 | client = reqwest::Client::builder() 413 | .default_headers(headers) 414 | .redirect(redirect::Policy::none()) 415 | .timeout(Duration::from_secs(timeout.try_into().unwrap())) 416 | .danger_accept_invalid_hostnames(true) 417 | .danger_accept_invalid_certs(true) 418 | .build() 419 | .unwrap(); 420 | } 421 | 422 | while let Ok(job) = rx.recv() { 423 | let job_host: String = job.host.unwrap(); 424 | let job_status_codes = job.status_codes.unwrap(); 425 | let job_content_length = job.content_length.unwrap(); 426 | let job_content_type = job.content_type.unwrap(); 427 | let job_server = job.server.unwrap(); 428 | let job_body_regex = job.body_regex.unwrap(); 429 | let job_header_regex = job.header_regex.unwrap(); 430 | let job_path = job.path.unwrap(); 431 | let job_ports = job.ports.unwrap(); 432 | let job_title = job.display_title.unwrap(); 433 | let job_tech = job.display_tech.unwrap(); 434 | let mut resolved_domains: Vec = vec![String::from("")]; 435 | 436 | // probe for open ports and perform dns resolution 437 | let ports_array = job_ports.split(","); 438 | for (_, port) in ports_array.enumerate() { 439 | let job_host_http = job_host.clone(); 440 | let job_host_https = job_host_http.clone(); 441 | let http_port = port.to_string(); 442 | let https_port = http_port.to_string(); 443 | if port == "80" { 444 | let http = http_resolver(job_host_http, "http://".to_owned(), http_port).await; 445 | resolved_domains.push(http); 446 | } else if port == "443" { 447 | let https = http_resolver(job_host_https, "https://".to_owned(), https_port).await; 448 | resolved_domains.push(https); 449 | } else { 450 | let https = 451 | http_resolver(job_host_https, "https://".to_owned(), https_port.to_owned()) 452 | .await; 453 | resolved_domains.push(https); 454 | 455 | let http = http_resolver(job_host_http, "http://".to_owned(), http_port).await; 456 | resolved_domains.push(http); 457 | } 458 | } 459 | 460 | // Iterate over the resolved IP addresses and send HTTP requests 461 | for domain in &resolved_domains { 462 | let domain_cp = domain.clone(); 463 | if job_path != "" { 464 | let path_url = String::from(format!("{}{}", domain, job_path)); 465 | let url = path_url.clone(); 466 | let mut domain_result_url = String::from(""); 467 | 468 | let path_resp_get = client.get(path_url); 469 | let path_resp_req = match path_resp_get.build() { 470 | Ok(path_resp_req) => path_resp_req, 471 | Err(_) => { 472 | continue; 473 | } 474 | }; 475 | let path_resp = match client.execute(path_resp_req).await { 476 | Ok(path_resp) => path_resp, 477 | Err(_) => { 478 | continue; 479 | } 480 | }; 481 | 482 | // check if a valid path has been found 483 | if path_resp.status().as_u16() != 404 && path_resp.status().as_u16() != 400 { 484 | let browser_instance = browser.clone(); 485 | domain_result_url.push_str(&url); 486 | let domain_result = domain_result_url.clone(); 487 | let domain_result_cloned = domain_result.clone(); 488 | let get = client.get(domain_result_url); 489 | let req = match get.build() { 490 | Ok(req) => req, 491 | Err(_) => { 492 | continue; 493 | } 494 | }; 495 | let resp = match client.execute(req).await { 496 | Ok(resp) => resp, 497 | Err(_) => { 498 | continue; 499 | } 500 | }; 501 | 502 | let mut content_length = String::from(""); 503 | 504 | if job_content_length { 505 | let domain_result_2 = domain_result_cloned.clone(); 506 | let get_request = client.get(domain_result_2); 507 | let request = match get_request.build() { 508 | Ok(req) => req, 509 | Err(_) => { 510 | continue; 511 | } 512 | }; 513 | let response = match client.execute(request).await { 514 | Ok(resp) => resp, 515 | Err(_) => { 516 | continue; 517 | } 518 | }; 519 | content_length.push_str("["); 520 | let cl = match response.content_length() { 521 | Some(cl) => cl.to_string(), 522 | None => "".to_string(), 523 | }; 524 | content_length.push_str(&cl); 525 | content_length.push_str("]"); 526 | } 527 | 528 | let mut content_type = String::from(""); 529 | 530 | if job_content_type { 531 | let domain_result_2 = domain_result_cloned.clone(); 532 | let get_request = client.get(domain_result_2); 533 | let request = match get_request.build() { 534 | Ok(req) => req, 535 | Err(_) => { 536 | continue; 537 | } 538 | }; 539 | let response = match client.execute(request).await { 540 | Ok(resp) => resp, 541 | Err(_) => { 542 | continue; 543 | } 544 | }; 545 | let ct = match response.headers().get("Content-Type") { 546 | Some(ct) => match ct.to_str() { 547 | Ok(ct) => ct.to_string(), 548 | Err(_) => continue, 549 | }, 550 | None => "".to_string(), 551 | }; 552 | if !ct.is_empty() { 553 | content_type.push_str("["); 554 | content_type.push_str(&ct); 555 | content_type.push_str("]"); 556 | } 557 | } 558 | 559 | let mut server = String::from(""); 560 | 561 | if job_server { 562 | let domain_result_2 = domain_result_cloned.clone(); 563 | let get_request = client.get(domain_result_2); 564 | let request = match get_request.build() { 565 | Ok(req) => req, 566 | Err(_) => { 567 | continue; 568 | } 569 | }; 570 | let response = match client.execute(request).await { 571 | Ok(resp) => resp, 572 | Err(_) => { 573 | continue; 574 | } 575 | }; 576 | let s = match response.headers().get("Server") { 577 | Some(s) => match s.to_str() { 578 | Ok(s) => s.to_string(), 579 | Err(_) => continue, 580 | }, 581 | None => "".to_string(), 582 | }; 583 | if !server.is_empty() { 584 | server.push_str("["); 585 | server.push_str(&s); 586 | server.push_str("]"); 587 | } 588 | } 589 | 590 | let get_request = client.get(domain_result_cloned); 591 | let request = match get_request.build() { 592 | Ok(req) => req, 593 | Err(_) => { 594 | continue; 595 | } 596 | }; 597 | let response = match client.execute(request).await { 598 | Ok(resp) => resp, 599 | Err(_) => { 600 | continue; 601 | } 602 | }; 603 | 604 | // perform the regex on the headers 605 | if !job_header_regex.is_empty() { 606 | let headers = resp.headers(); 607 | for (k, v) in headers.iter() { 608 | let header_value = match v.to_str() { 609 | Ok(header_value) => header_value, 610 | Err(_) => "", 611 | }; 612 | let header_str = String::from(format!( 613 | "{}:{}", 614 | k.as_str().to_string(), 615 | header_value 616 | )); 617 | let re = match regex::Regex::new(&job_header_regex) { 618 | Ok(re) => re, 619 | Err(_) => continue, 620 | }; 621 | if !re.is_match(&header_str) { 622 | continue; 623 | } 624 | } 625 | } 626 | 627 | let body = match resp.text().await { 628 | Ok(body) => body, 629 | Err(_) => { 630 | continue; 631 | } 632 | }; 633 | 634 | // extract the page title 635 | let mut title = String::from(""); 636 | if job_title { 637 | let re = match Regex::new("(.*)") { 638 | Ok(re) => re, 639 | Err(_) => continue, 640 | }; 641 | for cap in re.captures_iter(&body) { 642 | if cap.len() > 0 { 643 | if !cap[1].to_string().is_empty() { 644 | title.push_str("["); 645 | title.push_str(&cap[1].to_string()); 646 | title.push_str("]"); 647 | break; 648 | } 649 | } 650 | } 651 | } 652 | 653 | // perform the regex on the response body 654 | let re = match regex::Regex::new(&job_body_regex) { 655 | Ok(re) => re, 656 | Err(_) => continue, 657 | }; 658 | 659 | let url = match reqwest::Url::parse(&domain_result) { 660 | Ok(url) => url, 661 | Err(_) => continue, 662 | }; 663 | 664 | // extract the technologies 665 | let mut tech_str = String::from(""); 666 | if job_tech { 667 | let tech_analysis = wappalyzer::scan(url, &browser_instance).await; 668 | let tech_result = match tech_analysis.result { 669 | Ok(tech_result) => tech_result, 670 | Err(_) => continue, 671 | }; 672 | let mut tech_name = String::from(""); 673 | for tech in tech_result.iter() { 674 | tech_name.push_str(&tech.name); 675 | tech_name.push_str(","); 676 | } 677 | if !tech_name.is_empty() { 678 | tech_str.push_str("["); 679 | let tech = match tech_name.strip_suffix(",") { 680 | Some(tech) => tech.to_string(), 681 | None => "".to_string(), 682 | }; 683 | tech_str.push_str(&tech.to_string()); 684 | tech_str.push_str("]"); 685 | } 686 | } 687 | 688 | if !job_body_regex.is_empty() { 689 | if !re.is_match(&body) { 690 | continue; 691 | } 692 | } 693 | 694 | let mut status_code = String::from(""); 695 | if job_status_codes { 696 | let sc = response.status().as_u16(); 697 | status_code.push_str("["); 698 | status_code.push_str(&sc.to_string()); 699 | status_code.push_str("]"); 700 | if sc >= 100 && sc < 200 { 701 | // print the final results 702 | println!( 703 | "{} {} {} {} {} {} {}", 704 | domain_result, 705 | title.cyan(), 706 | status_code.white(), 707 | tech_str.white().bold(), 708 | content_type, 709 | content_length, 710 | server 711 | ); 712 | } 713 | if sc >= 200 && sc < 300 { 714 | // print the final results 715 | println!( 716 | "{} {} {} {} {} {} {}", 717 | domain_result, 718 | title.cyan(), 719 | status_code.green(), 720 | tech_str.white().bold(), 721 | content_type, 722 | content_length, 723 | server 724 | ); 725 | } 726 | if sc >= 300 && sc < 400 { 727 | // print the final results 728 | println!( 729 | "{} {} {} {} {} {} {}", 730 | domain_result, 731 | title.cyan(), 732 | status_code.blue(), 733 | tech_str.white().bold(), 734 | content_type, 735 | content_length, 736 | server 737 | ); 738 | } 739 | if sc >= 400 && sc < 500 { 740 | // print the final results 741 | println!( 742 | "{} {} {} {} {} {} {}", 743 | domain_result, 744 | title.cyan(), 745 | status_code.magenta(), 746 | tech_str.white().bold(), 747 | content_type, 748 | content_length, 749 | server 750 | ); 751 | } 752 | if sc >= 500 && sc < 600 { 753 | // print the final results 754 | println!( 755 | "{} {} {} {} {} {} {}", 756 | domain_result, 757 | title.cyan(), 758 | status_code.red(), 759 | tech_str.white().bold(), 760 | content_type, 761 | content_length, 762 | server 763 | ); 764 | } 765 | } else { 766 | // print the final results 767 | println!( 768 | "{} {} {} {} {} {} {}", 769 | domain_result, 770 | title.cyan(), 771 | status_code.red(), 772 | tech_str.white().bold(), 773 | content_type, 774 | content_length, 775 | server 776 | ); 777 | } 778 | } 779 | } else { 780 | let browser_instance = browser.clone(); 781 | let url = String::from(domain_cp); 782 | let url_cloned = url.clone(); 783 | let domain_result = url.clone(); 784 | let domain_result_cloned = domain_result.clone(); 785 | let get = client.get(url); 786 | let req = match get.build() { 787 | Ok(req) => req, 788 | Err(_) => { 789 | continue; 790 | } 791 | }; 792 | let resp = match client.execute(req).await { 793 | Ok(resp) => resp, 794 | Err(_) => { 795 | continue; 796 | } 797 | }; 798 | 799 | let get_request = client.get(url_cloned); 800 | let request = match get_request.build() { 801 | Ok(req) => req, 802 | Err(_) => { 803 | continue; 804 | } 805 | }; 806 | let response = match client.execute(request).await { 807 | Ok(resp) => resp, 808 | Err(_) => { 809 | continue; 810 | } 811 | }; 812 | 813 | let mut content_length = String::from(""); 814 | 815 | if job_content_length { 816 | let domain_result_cloned_2 = domain_result_cloned.clone(); 817 | let get_request = client.get(domain_result_cloned_2); 818 | let request = match get_request.build() { 819 | Ok(req) => req, 820 | Err(_) => { 821 | continue; 822 | } 823 | }; 824 | let response = match client.execute(request).await { 825 | Ok(resp) => resp, 826 | Err(_) => { 827 | continue; 828 | } 829 | }; 830 | content_length.push_str("["); 831 | let cl = match response.content_length() { 832 | Some(cl) => cl.to_string(), 833 | None => "".to_string(), 834 | }; 835 | content_length.push_str(&cl); 836 | content_length.push_str("]"); 837 | } 838 | 839 | let mut content_type = String::from(""); 840 | 841 | if job_content_type { 842 | let domain_result_cloned_2 = domain_result_cloned.clone(); 843 | let get_request = client.get(domain_result_cloned_2); 844 | let request = match get_request.build() { 845 | Ok(req) => req, 846 | Err(_) => { 847 | continue; 848 | } 849 | }; 850 | let response = match client.execute(request).await { 851 | Ok(resp) => resp, 852 | Err(_) => { 853 | continue; 854 | } 855 | }; 856 | 857 | let ct = match response.headers().get("Content-Type") { 858 | Some(ct) => match ct.to_str() { 859 | Ok(ct) => ct.to_string(), 860 | Err(_) => continue, 861 | }, 862 | None => "".to_string(), 863 | }; 864 | if !ct.is_empty() { 865 | content_type.push_str("["); 866 | content_type.push_str(&ct); 867 | content_type.push_str("]"); 868 | } 869 | } 870 | let mut server = String::from(""); 871 | if job_server { 872 | let domain_result_cloned_2 = domain_result_cloned.clone(); 873 | let get_request = client.get(domain_result_cloned_2); 874 | let request = match get_request.build() { 875 | Ok(req) => req, 876 | Err(_) => { 877 | continue; 878 | } 879 | }; 880 | let response = match client.execute(request).await { 881 | Ok(resp) => resp, 882 | Err(_) => { 883 | continue; 884 | } 885 | }; 886 | let s = match response.headers().get("Server") { 887 | Some(s) => match s.to_str() { 888 | Ok(s) => s.to_string(), 889 | Err(_) => continue, 890 | }, 891 | None => "".to_string(), 892 | }; 893 | if !s.is_empty() { 894 | server.push_str("["); 895 | server.push_str(&s); 896 | server.push_str("]"); 897 | } 898 | } 899 | 900 | if !job_header_regex.is_empty() { 901 | let headers = resp.headers(); 902 | for (k, v) in headers.iter() { 903 | let header_value = match v.to_str() { 904 | Ok(header_value) => header_value, 905 | Err(_) => "", 906 | }; 907 | let header_str = 908 | String::from(format!("{}:{}", k.as_str().to_string(), header_value)); 909 | let re = match regex::Regex::new(&job_header_regex) { 910 | Ok(re) => re, 911 | Err(_) => continue, 912 | }; 913 | if !re.is_match(&header_str) { 914 | continue; 915 | } 916 | } 917 | } 918 | 919 | let body = match resp.text().await { 920 | Ok(body) => body, 921 | Err(_) => { 922 | continue; 923 | } 924 | }; 925 | 926 | let mut title = String::from(""); 927 | if job_title { 928 | let re = match Regex::new("(.*)") { 929 | Ok(re) => re, 930 | Err(_) => continue, 931 | }; 932 | for cap in re.captures_iter(&body) { 933 | if cap.len() > 0 { 934 | if !cap[1].to_string().is_empty() { 935 | title.push_str("["); 936 | title.push_str(&cap[1].to_string()); 937 | title.push_str("]"); 938 | break; 939 | } 940 | } 941 | } 942 | } 943 | 944 | let re = match regex::Regex::new(&job_body_regex) { 945 | Ok(re) => re, 946 | Err(_) => continue, 947 | }; 948 | 949 | let url = match reqwest::Url::parse(&domain_result) { 950 | Ok(url) => url, 951 | Err(_) => continue, 952 | }; 953 | 954 | let mut tech_str = String::from(""); 955 | if job_tech { 956 | let tech_analysis = wappalyzer::scan(url, &browser_instance).await; 957 | let tech_result = match tech_analysis.result { 958 | Ok(tech_result) => tech_result, 959 | Err(_) => continue, 960 | }; 961 | let mut tech_name = String::from(""); 962 | for tech in tech_result.iter() { 963 | tech_name.push_str(&tech.name); 964 | tech_name.push_str(","); 965 | } 966 | if !tech_name.is_empty() { 967 | tech_str.push_str("["); 968 | let tech = match tech_name.strip_suffix(",") { 969 | Some(tech) => tech.to_string(), 970 | None => "".to_string(), 971 | }; 972 | tech_str.push_str(&tech.to_string()); 973 | tech_str.push_str("]"); 974 | } 975 | } 976 | 977 | if !job_body_regex.is_empty() { 978 | if !re.is_match(&body) { 979 | continue; 980 | } 981 | } 982 | 983 | let mut status_code = String::from(""); 984 | if job_status_codes { 985 | let sc = response.status().as_u16(); 986 | status_code.push_str("["); 987 | status_code.push_str(&sc.to_string()); 988 | status_code.push_str("]"); 989 | if sc >= 100 && sc < 200 { 990 | // print the final results 991 | println!( 992 | "{} {} {} {} {} {} {}", 993 | domain_result, 994 | title.cyan(), 995 | status_code.white(), 996 | tech_str.white().bold(), 997 | content_type, 998 | content_length, 999 | server 1000 | ); 1001 | } 1002 | if sc >= 200 && sc < 300 { 1003 | // print the final results 1004 | println!( 1005 | "{} {} {} {} {} {} {}", 1006 | domain_result, 1007 | title.cyan(), 1008 | status_code.green(), 1009 | tech_str.white().bold(), 1010 | content_type, 1011 | content_length, 1012 | server 1013 | ); 1014 | } 1015 | if sc >= 300 && sc < 400 { 1016 | // print the final results 1017 | println!( 1018 | "{} {} {} {} {} {} {}", 1019 | domain_result, 1020 | title.cyan(), 1021 | status_code.blue(), 1022 | tech_str.white().bold(), 1023 | content_type, 1024 | content_length, 1025 | server 1026 | ); 1027 | } 1028 | if sc >= 400 && sc < 500 { 1029 | // print the final results 1030 | println!( 1031 | "{} {} {} {} {} {} {}", 1032 | domain_result, 1033 | title.cyan(), 1034 | status_code.magenta(), 1035 | tech_str.white().bold(), 1036 | content_type, 1037 | content_length, 1038 | server 1039 | ); 1040 | } 1041 | if sc >= 500 && sc < 600 { 1042 | // print the final results 1043 | println!( 1044 | "{} {} {} {} {} {} {}", 1045 | domain_result, 1046 | title.cyan(), 1047 | status_code.red(), 1048 | tech_str.white().bold(), 1049 | content_type, 1050 | content_length, 1051 | server 1052 | ); 1053 | } 1054 | } else { 1055 | // print the final results 1056 | println!( 1057 | "{} {} {} {} {} {} {}", 1058 | domain_result, 1059 | title.cyan(), 1060 | status_code.white(), 1061 | tech_str.white().bold(), 1062 | content_type, 1063 | content_length, 1064 | server 1065 | ); 1066 | } 1067 | } 1068 | } 1069 | } 1070 | } 1071 | 1072 | /** 1073 | * Resolve the subdomains and return the host 1074 | */ 1075 | async fn http_resolver(host: String, schema: String, port: String) -> String { 1076 | let mut host_str = String::from(schema); 1077 | let domain = String::from(format!("{}:{}", host, port)); 1078 | let lookup = match net::lookup_host(domain).await { 1079 | Ok(lookup) => lookup, 1080 | Err(_) => return "".to_string(), 1081 | }; 1082 | 1083 | // Perform DNS resolution to get IP addresses for the hostname 1084 | for addr in lookup { 1085 | if addr.is_ipv4() { 1086 | host_str.push_str(&host); 1087 | host_str.push_str(":"); 1088 | host_str.push_str(&port.to_string()); 1089 | break; 1090 | } 1091 | } 1092 | return host_str; 1093 | } 1094 | --------------------------------------------------------------------------------