├── .gitignore ├── Cargo.toml ├── README.md └── src ├── main.rs ├── types ├── mod.rs └── types.rs └── utils ├── arguments.rs ├── file.rs └── mod.rs /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | .idea/ -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "adidns_parser" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | serde = { version = "1.0.217", features = ["derive"] } 8 | csv = "1.3.1" 9 | 10 | [dependencies.clap] 11 | version = "4.5.28" 12 | features = ["derive"] 13 | 14 | [profile.release] 15 | opt-level = 3 16 | lto = true 17 | panic = "abort" 18 | 19 | [profile.dev] 20 | opt-level = 3 21 | lto = true 22 | panic = "abort" 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🔭 Bottom-Line Up Front 2 | 3 | `ADIDNS_Parser` is an illustratation of the extensibility of graph databases within common tooling used by security assessment teams. 4 | 5 | Specifically, this generates files that can reconcile ADIDNS dumped data to existing `Computer` objects in `Bloodhound` databases. 6 | 7 | 8 | ## 🤔 Why? 9 | 10 | Fundamentally, professional security assessments are inelastic in duration. 11 | 12 | Compoundingly, they are data synthesis problems more often than purely technical. The method of "winning" is enrichment of data, as quickly as possible, and acting upon it. 13 | 14 | Arbitrary properties add additional contraints to filtering. The more sieves one has, the better their lens for finding what's desired. 15 | 16 | 17 | ## 🚀 Usage 18 | ``` 19 | .\adidns_parser.exe --help 20 | Usage: adidns_parser.exe --dns-data --known-computers 21 | 22 | Options: 23 | -a, --dns-data 24 | The formatted Active Directory Integrated DNS data file, with format: HOSTNAME IP_ADDRESS per newline 25 | -c, --known-computers 26 | The known computers file, with format: HOSTNAME per newline 27 | -h, --help 28 | Print help 29 | -V, --version 30 | Print version 31 | ``` 32 | 33 | ## 🌱 Outputs 34 | 35 | A single CSV file is generated upon execution: `resolved_records.csv` 36 | 37 | 38 | ## 👷 Building 39 | Assuming you have rust installed: `cargo build --release` 40 | 41 | 42 | ## 🐇 Anticipated Throughput 43 | 44 | ```powershell 45 | GCI *.txt | % { Get-Content $_ | Measure-Object | Select Count }; .\adidns_parser.exe --dns-data .\adidns_records.txt --known-computers .\full_systems.txt 46 | 47 | Count 48 | ----- 49 | 216772 50 | 135110 51 | 52 | Found 83120 matching records in 96.727ms 53 | ``` 54 | 55 | ## Quick Importation of Data 56 | To be very brrr, one should do a few things: 57 | - Enable `APOC` for your `Neo4j` database 58 | - Ensure a the browser interface is enabled 59 | 60 | 61 | If you'd prefer to do so via `Docker`, the following will accomplish both: 62 | ```yaml 63 | --- 64 | services: 65 | graph-db: 66 | image: docker.io/library/neo4j:4.4 67 | environment: 68 | NEO4J_AUTH: "neo4j/CHANGE_ME" 69 | NEO4JLABS_PLUGINS: '["apoc"]' 70 | NEO4J_dbms_allow__upgrade: true 71 | NEO4J_dbms_security_procedures_unrestricted: "apoc.*" 72 | NEO4J_dbms_security_procedures_allowlist: "apoc.*" 73 | NEO4J_apoc_import_file_enabled: "true" 74 | NEO4J_apoc_import_file_use__neo4j__config: "true" 75 | ports: 76 | - 7474:7474 77 | - 7687:7687 78 | volumes: 79 | - ./neo4j:/data 80 | ``` 81 | 82 | Execute via: `docker-compose up -d` 83 | 84 | Upload your `resolved_records.csv` file to the `/var/lib/neo4j/import` directory: `docker cp resolved_records.csv CONTAINER_NAME /var/lib/neo4j/import/` 85 | 86 | Within the browser interface (default of `http://localhost:7474/browser`): 87 | ```cypher 88 | CALL apoc.periodic.iterate( 89 | "LOAD CSV WITH HEADERS FROM 'file:///resolved_records.csv' AS Row Return Row", 90 | "MATCH (c:Computer {name: Row.system}) SET c.ipaddress = Row.ipaddress", 91 | {batchSize: 1000, parallel: true} 92 | ); 93 | ``` 94 | 95 | ## Example Filtering 96 | Using `Pythonic` bindings to our database, we can now add additional filters to include/omit entire address ranges. 97 | 98 | No more hand-jamming required! 99 | 100 | ```python 101 | #!/usr/bin/env python3 102 | from neo4j import GraphDatabase 103 | from ipaddress import IPv4Network 104 | 105 | class GraphDB(object): 106 | def __init__(self, ip: str = "", port: int = 0, username:str = "", password: str = "") -> None: 107 | self.driver = GraphDatabase.driver(f'bolt://{ip}:{port}', auth=(username, password)) 108 | self.session = self.driver.session() 109 | self._computers = None 110 | 111 | def close(self): 112 | self.driver.close() 113 | 114 | def getSystemsWithCriteria(self, splat: list = None): 115 | ''' 116 | In this example, we assume the ipaddress is already set. 117 | We will abuse how Cypher interpolates arrays (they are the same in Python's str representation) 118 | ''' 119 | query = f''' 120 | MATCH (c:Computer) 121 | WHERE NOT c.ipaddress IS NULL 122 | AND c.ipaddress in {str(splat)} 123 | RETURN c as Computer 124 | ''' 125 | return list(self.session.run(query=query)) 126 | 127 | 128 | def main(): 129 | neo4jdb = GraphDB(ip='localhost', port=0, username='neo4j', password='REPLACE_ME') 130 | candidateRange = [str(host) for host in IPv4Network(address='192.168.1.0/24')] 131 | 132 | if (data := neo4jdb.getSystemsWithCriteria(candidateRange)): 133 | for system in data: 134 | print(system['Computer']['name'], system['Computer']['ipaddress']) 135 | 136 | 137 | if __name__ == '__main__': 138 | main() 139 | ``` -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | mod types; 2 | mod utils; 3 | 4 | use crate::types::types::SystemKeyValues; 5 | use crate::utils::file::read_lines; 6 | use clap::Parser; 7 | 8 | fn main() -> Result<(), Box> { 9 | let arguments = utils::arguments::DNSStruct::parse(); 10 | 11 | let mut system_keyvalues = SystemKeyValues::new(); 12 | 13 | system_keyvalues 14 | .ingest_known_computer_data(read_lines(arguments.known_computers)) 15 | .sort_all_pairs(); 16 | 17 | system_keyvalues.compare_dns_information(read_lines(arguments.dns_data)) 18 | } 19 | -------------------------------------------------------------------------------- /src/types/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod types; 2 | -------------------------------------------------------------------------------- /src/types/types.rs: -------------------------------------------------------------------------------- 1 | use csv::Writer; 2 | use serde::ser::SerializeStruct; 3 | use serde::{Serialize, Serializer}; 4 | use std::collections::HashMap; 5 | use std::error::Error; 6 | use std::fs::File; 7 | use std::io; 8 | use std::time::Instant; 9 | 10 | #[derive(Debug)] 11 | struct SystemRecord { 12 | system: String, 13 | ipaddress: String, 14 | } 15 | 16 | impl Serialize for SystemRecord { 17 | fn serialize(&self, serializer: S) -> Result 18 | where 19 | S: Serializer, 20 | { 21 | let mut state = serializer.serialize_struct("CurrentRecord", 2)?; 22 | state.serialize_field("system", &self.system)?; 23 | state.serialize_field("ipaddress", &self.ipaddress)?; 24 | state.end() 25 | } 26 | } 27 | 28 | #[derive(Debug)] 29 | pub struct SystemKeyValues { 30 | known_computers: HashMap>, 31 | system_metadata: Writer, 32 | } 33 | 34 | impl<'a> SystemKeyValues { 35 | pub fn new() -> SystemKeyValues { 36 | Self { 37 | known_computers: HashMap::>::new(), 38 | system_metadata: Writer::from_path("resolved_records.csv").unwrap(), 39 | } 40 | } 41 | 42 | pub fn sort_all_pairs(&mut self) -> &SystemKeyValues { 43 | for pair in self.known_computers.values_mut() { 44 | pair.sort_unstable(); 45 | } 46 | 47 | self 48 | } 49 | 50 | pub fn ingest_known_computer_data( 51 | &mut self, 52 | lines: io::Result>>, 53 | ) -> &mut SystemKeyValues { 54 | if let Ok(data) = lines { 55 | for line in data { 56 | if let Ok(line) = line { 57 | let prefix = line 58 | .clone() 59 | .chars() 60 | .nth(0) 61 | .unwrap() 62 | .to_ascii_uppercase() 63 | .to_string(); 64 | 65 | let system_name = line; 66 | 67 | let _ = self.known_computers.entry(prefix.clone()).or_insert(vec![]); 68 | let _ = self 69 | .known_computers 70 | .entry(prefix.clone()) 71 | .and_modify(|e| e.push(system_name)); 72 | } 73 | } 74 | } 75 | self 76 | } 77 | 78 | pub fn compare_dns_information( 79 | &mut self, 80 | lines: io::Result>>, 81 | ) -> Result<(), Box> /*-> &SystemKeyValues*/ { 82 | let start = Instant::now(); 83 | 84 | let mut count = 0; 85 | 86 | if let Ok(data) = lines { 87 | for line in data { 88 | if let Ok(line) = line { 89 | let parsed_line: Vec = 90 | line.split_whitespace().map(|s| s.to_string()).collect(); 91 | 92 | match parsed_line.len() { 93 | 2 => { 94 | let prefix = parsed_line[0] 95 | .chars() 96 | .nth(0) 97 | .unwrap() 98 | .to_ascii_uppercase() 99 | .to_string(); 100 | 101 | if self.known_computers.contains_key(&prefix) { 102 | match self.known_computers.get(&prefix) { 103 | Some(computers) => { 104 | if computers.binary_search(&parsed_line[0]).is_ok() { 105 | self.system_metadata.serialize(SystemRecord { 106 | system: parsed_line[0] 107 | .clone() 108 | .to_uppercase() 109 | .to_string(), 110 | ipaddress: parsed_line[1].to_string(), 111 | })?; 112 | 113 | count += 1; 114 | } 115 | } 116 | None => {} 117 | } 118 | } 119 | } 120 | _ => {} 121 | } 122 | } 123 | } 124 | } 125 | 126 | println!("Found {} matching records in {:?}", count, start.elapsed()); 127 | 128 | self.system_metadata.flush()?; 129 | 130 | Ok(()) 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/utils/arguments.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | 3 | #[derive(Parser)] 4 | #[clap(name="ADIDNS_Comparator", author="EspressoCake", version="0.1.0", long_about = None)] 5 | pub struct DNSStruct { 6 | #[clap( 7 | long, 8 | short = 'a', 9 | help = "The formatted Active Directory Integrated DNS data file, with format: HOSTNAME IP_ADDRESS per newline", 10 | required = true 11 | )] 12 | pub dns_data: String, 13 | 14 | #[clap( 15 | long, 16 | short = 'c', 17 | help = "The known computers file, with format: HOSTNAME per newline", 18 | required = true 19 | )] 20 | pub known_computers: String, 21 | } 22 | -------------------------------------------------------------------------------- /src/utils/file.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io::{self, BufRead}; 3 | use std::path::Path; 4 | 5 | // Helper functions 6 | pub fn read_lines

(filename: P) -> io::Result>> 7 | where 8 | P: AsRef, 9 | { 10 | let file = File::open(filename)?; 11 | 12 | Ok(io::BufReader::new(file).lines()) 13 | } 14 | -------------------------------------------------------------------------------- /src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod arguments; 2 | pub mod file; 3 | --------------------------------------------------------------------------------