├── tests ├── unit │ ├── __init__.py │ └── test_core.py ├── integration │ ├── __init__.py │ ├── test_sample_data.py │ └── test_integration.py ├── test_setup_validation.py ├── conftest.py ├── run_sample_data_tests.py ├── verify_tests.py ├── run_tests.py ├── __init__.py ├── fixtures │ └── test_data.py └── README.md ├── img ├── charts.png ├── DPAT icon.png ├── dpat summary.png └── group statistics.png ├── .vscode ├── settings.json └── launch.json ├── sample_data ├── history │ ├── registry │ │ ├── SYSTEM │ │ └── SECURITY │ ├── Active Directory │ │ └── ntds.dit │ ├── john-customer-small.pot │ └── customer-small.ntds ├── Enterprise Admins PowerView Output.txt ├── Enterprise Admins.txt ├── umlauts │ ├── hashcat.potfile │ ├── john.pot │ └── customer.ntds ├── base │ ├── password history demo example.csv │ ├── add-domain-users-with-history.ps1 │ ├── gendata.py │ └── first.txt └── Domain Admins.txt ├── requirements-dev.txt ├── .devcontainer └── devcontainer.json ├── requirements.txt ├── pytest.ini ├── .gitignore ├── LICENSE ├── pyproject.toml ├── report.css ├── README.md └── poetry.lock /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /img/charts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/img/charts.png -------------------------------------------------------------------------------- /img/DPAT icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/img/DPAT icon.png -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.pythonPath": "C:\\Python27\\python.exe" 3 | } -------------------------------------------------------------------------------- /img/dpat summary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/img/dpat summary.png -------------------------------------------------------------------------------- /img/group statistics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/img/group statistics.png -------------------------------------------------------------------------------- /sample_data/history/registry/SYSTEM: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/sample_data/history/registry/SYSTEM -------------------------------------------------------------------------------- /sample_data/history/registry/SECURITY: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/sample_data/history/registry/SECURITY -------------------------------------------------------------------------------- /sample_data/history/Active Directory/ntds.dit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/sample_data/history/Active Directory/ntds.dit -------------------------------------------------------------------------------- /sample_data/Enterprise Admins PowerView Output.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clr2of8/DPAT/HEAD/sample_data/Enterprise Admins PowerView Output.txt -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # Development and Testing Dependencies for DPAT 2 | 3 | # Include base requirements 4 | -r requirements.txt 5 | 6 | # Testing dependencies 7 | pytest>=8.0.0 8 | pytest-cov>=5.0.0 9 | pytest-mock>=3.14.0 10 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "image": "mcr.microsoft.com/devcontainers/python:3.12-bookworm", 3 | "features": { 4 | "ghcr.io/devcontainers-extra/features/live-server:1": {} 5 | }, 6 | "postCreateCommand": "pip install -r requirements-dev.txt" 7 | } 8 | -------------------------------------------------------------------------------- /sample_data/Enterprise Admins.txt: -------------------------------------------------------------------------------- 1 | child.domain.com\Dayna.Wade-admin 2 | child.domain.com\Dustin.Wahlund-admin 3 | child.domain.com\Earnestine.Waiau-admin 4 | child.domain.com\Emerson.Wala-admin 5 | child.domain.com\Roman.Zurek-admin 6 | child.domain.com\Rosalinda.Zusman-admin 7 | child.domain.com\Sallie.Zych-admin 8 | child.domain.com\Samuel.Zysk-admin 9 | -------------------------------------------------------------------------------- /sample_data/umlauts/hashcat.potfile: -------------------------------------------------------------------------------- 1 | ecd382f6949d712f7f81982242755cc3:$HEX[4df66d2052fc6c657a21] 2 | 0c01c4fd959e29dd499cb0c31dfc6a75:$HEX[314e65706869333a37] 3 | a3ba63b79dab674ad4575ec9f605aebe:$hex[4be4747a6c65] 4 | 0c0009fe6efce0cc20c7aef8f4b54bb4:[nothexN!] 5 | 12ac2f50d6666a408470649b8cf58aa8:Hey Hey Hey! 6 | 4472910b89492aef53ceb6b420b15f52:Fall2019 7 | a8bfc015bdbf6d0cf77396692ca5fc11:DomainAdmin77 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Domain Password Audit Tool (DPAT) Requirements 2 | # Python 3.8 or higher required 3 | 4 | # Core dependencies 5 | pycryptodome>=3.23.0 6 | 7 | # Optional dependencies for enhanced functionality 8 | # Uncomment any of the following if you need additional capabilities: 9 | # passlib>=1.7.4 # For additional password hashing support 10 | # impacket>=0.11.0 # For additional NT hash cracking support 11 | -------------------------------------------------------------------------------- /sample_data/umlauts/john.pot: -------------------------------------------------------------------------------- 1 | $NT$ecd382f6949d712f7f81982242755cc3:Möm Rülez! 2 | $NT$0c01c4fd959e29dd499cb0c31dfc6a75:1Nephi3:7 3 | $NT$a86f8d1ed0156db9e86d7c586894c196:$HEX[a4cf23] 4 | $NT$aaeb7b36057ea55e2b5bccf0fb3a4992:$HEX[4be4747a6c65] 5 | $NT$a3ba63b79dab674ad4575ec9f605aebe:$hex[4be4747a6c65] 6 | $NT$0c0009fe6efce0cc20c7aef8f4b54bb4:[nothexN!] 7 | $NT$12ac2f50d6666a408470649b8cf58aa8:Hey Hey Hey! 8 | $NT$4472910b89492aef53ceb6b420b15f52:Fall2019 9 | $NT$a8bfc015bdbf6d0cf77396692ca5fc11:DomainAdmin77 10 | -------------------------------------------------------------------------------- /sample_data/base/password history demo example.csv: -------------------------------------------------------------------------------- 1 | Username,Current Password,History 0,History 1,History 2,History 3,History 4 2 | Larry,Fall2019,Summer2019,Spring2019,Winter2018,Fall2018,Spring2018 3 | Curly,Baseball77,Baseball76,Baseball75,Baseball74,Baseball73,Baseball72 4 | Carrie,PringlesSalt!,!EatPringles,Friday fun!,I luv my kids!,New Job!,Something! 5 | Darin,Black^Hills,Black%Hills,Black$Hills,Black#Hills,Black@Hills,Black!Hills 6 | Mo,Zodiac-leo,Zodiak-Cancer,Zodiac-Gemini,Zodiak-Taurus,Zodiak-Pisces,Zodiak-Aquarius 7 | cjordan,6cjordan-pw,5cjordan-pw,4cjordan-pw,3cjordan-pw,2cjordan-pw,1cjordan-pw 8 | pope,Proverbs 3:5,Philippians 4:6,Romans 8:28,Philippians 4:13,Jeremiah 29:11,John 3:16 9 | -------------------------------------------------------------------------------- /sample_data/base/add-domain-users-with-history.ps1: -------------------------------------------------------------------------------- 1 | Import-Module ActiveDirectory 2 | $csv = Import-Csv -Path '.\password history demo example.csv' 3 | 4 | foreach ($user_info in $csv){ 5 | $Name = $user_info.Username 6 | $Passwords = @($user_info.'History 3',$user_info.'History 2',$user_info.'History 1',$user_info.'History 0', $user_info.'Current Password') 7 | $password = $user_info.'History 4' 8 | New-ADUser -Name $Name -SamAccountName $Name -AccountPassword(ConvertTo-SecureString -AsPlainText "$password" -Force) -Enabled $true 9 | foreach ($password in $Passwords) { 10 | Set-ADAccountPassword -Identity $Name -Reset -NewPassword (ConvertTo-SecureString -AsPlainText "$password" -Force) 11 | } 12 | } -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | # Pytest configuration for DPAT test suite 3 | 4 | # Test discovery 5 | testpaths = tests 6 | python_files = test_*.py 7 | python_classes = Test* 8 | python_functions = test_* 9 | 10 | # Output options 11 | addopts = -v --tb=short --strict-markers --disable-warnings 12 | 13 | # Markers 14 | markers = 15 | unit: Unit tests 16 | integration: Integration tests 17 | slow: Slow running tests 18 | requires_files: Tests that require external files 19 | 20 | # Logging 21 | log_cli = true 22 | log_cli_level = WARNING 23 | log_cli_format = %(asctime)s [%(levelname)8s] %(name)s: %(message)s 24 | log_cli_date_format = %Y-%m-%d %H:%M:%S 25 | 26 | # Minimum version 27 | minversion = 6.0 28 | 29 | # Test timeout (in seconds) 30 | timeout = 300 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Python 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | *.so 6 | .Python 7 | build/ 8 | develop-eggs/ 9 | dist/ 10 | downloads/ 11 | eggs/ 12 | .eggs/ 13 | lib/ 14 | lib64/ 15 | parts/ 16 | sdist/ 17 | var/ 18 | wheels/ 19 | *.egg-info/ 20 | .installed.cfg 21 | *.egg 22 | MANIFEST 23 | 24 | # Virtual environments 25 | venv/ 26 | .venv/ 27 | ENV/ 28 | env/ 29 | .env 30 | 31 | # Testing 32 | .pytest_cache/ 33 | .coverage 34 | htmlcov/ 35 | coverage.xml 36 | *.cover 37 | .hypothesis/ 38 | .tox/ 39 | .nox/ 40 | 41 | # IDE 42 | .vscode/ 43 | .idea/ 44 | *.swp 45 | *.swo 46 | *~ 47 | .project 48 | .pydevproject 49 | .settings/ 50 | 51 | # OS 52 | .DS_Store 53 | Thumbs.db 54 | 55 | # Project specific 56 | pass_audit.db 57 | DPAT Report/ 58 | DPAT Report - Sanitized/ 59 | Sanitized - DPAT Report/ 60 | *.html 61 | **/DPAT icon.png 62 | 63 | # Claude 64 | .claude/* 65 | 66 | # Poetry - DO NOT ignore lock file 67 | # poetry.lock is intentionally not ignored 68 | DPAT Report - Sanitized/report.css 69 | groups_test/Domain Admins.txt 70 | groups_test/Enterprise Admins.txt 71 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Carrie Roberts 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /sample_data/umlauts/customer.ntds: -------------------------------------------------------------------------------- 1 | Administrator:500:aad3b435b51404eeaad3b435b51404ee:a8bfc015bdbf6d0cf77396692ca5fc11::: 2 | Guest:501:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::: 3 | dpat:1001:aad3b435b51404eeaad3b435b51404ee:4472910b89492aef53ceb6b420b15f52::: 4 | WIN-M2HC5DF3QHT$:1002:aad3b435b51404eeaad3b435b51404ee:bffd9b2883d322e0b95f859e14fa4879::: 5 | krbtgt:502:aad3b435b51404eeaad3b435b51404ee:d79e8ab4d16989228127fcbf6c4e5967::: 6 | dpatdomain.local\larry:1603:aad3b435b51404eeaad3b435b51404ee:ecd382f6949d712f7f81982242755cc3::: 7 | dpatdomain.local\curly:1604:aad3b435b51404eeaad3b435b51404ee:0c01c4fd959e29dd499cb0c31dfc6a75::: 8 | dpatdomain.local\mo:1605:aad3b435b51404eeaad3b435b51404ee:a86f8d1ed0156db9e86d7c586894c196::: 9 | dpatdomain.local\bob:1606:aad3b435b51404eeaad3b435b51404ee:aaeb7b36057ea55e2b5bccf0fb3a4992::: 10 | dpatdomain.local\frank:1607:aad3b435b51404eeaad3b435b51404ee:a3ba63b79dab674ad4575ec9f605aebe::: 11 | dpatdomain.local\nancy:1608:aad3b435b51404eeaad3b435b51404ee:0c0009fe6efce0cc20c7aef8f4b54bb4::: 12 | dpatdomain.local\harry:1609:aad3b435b51404eeaad3b435b51404ee:cac3a73c02d89fd62392800815e0f425::: 13 | dpatdomain.local\albert:1610:aad3b435b51404eeaad3b435b51404ee:12ac2f50d6666a408470649b8cf58aa8::: 14 | -------------------------------------------------------------------------------- /sample_data/Domain Admins.txt: -------------------------------------------------------------------------------- 1 | child.domain.com\Agnes.Aarons-admin 2 | child.domain.com\Cliff.Adames-admin 3 | child.domain.com\Pete.Baysmore-admin 4 | child.domain.com\Raul.Beaber-admin 5 | child.domain.com\Reggie.Beacher-admin 6 | child.domain.com\Rex.Beadling-admin 7 | child.domain.com\Hazel.Dillman-admin 8 | child.domain.com\Herbert.Dils-admin 9 | child.domain.com\Horace.Dimarco-admin 10 | child.domain.com\Isabella.Dimitroff-admin 11 | child.domain.com\Jackie.Dimodica-admin 12 | child.domain.com\Clint.Hollifield-admin 13 | child.domain.com\Coleen.Hollinghead-admin 14 | child.domain.com\August.Mcginnis-admin 15 | child.domain.com\Bobbie.Mcgrane-admin 16 | child.domain.com\Booker.Mcgraph-admin 17 | child.domain.com\Brendan.Mcgriff-admin 18 | child.domain.com\Cecil.Mcinnis-admin 19 | child.domain.com\Celeste.Mcintire-admin 20 | child.domain.com\Celia.Mcintosh-admin 21 | child.domain.com\Alex.Revis-admin 22 | child.domain.com\Cory.Ruhoff-admin 23 | child.domain.com\Damian.Scarver-admin 24 | child.domain.com\Gilbert.Settle-admin 25 | child.domain.com\Oscar.Veyna-admin 26 | child.domain.com\Rex.Vidot-admin 27 | child.domain.com\Scot.Viles-admin 28 | child.domain.com\Burton.Vonner-admin 29 | child.domain.com\Dayna.Wade-admin 30 | child.domain.com\Dustin.Wahlund-admin 31 | child.domain.com\Earnestine.Waiau-admin 32 | child.domain.com\Emerson.Wala-admin 33 | child.domain.com\Roman.Zurek-admin 34 | child.domain.com\Rosalinda.Zusman-admin 35 | child.domain.com\Sallie.Zych-admin 36 | child.domain.com\Samuel.Zysk-admin 37 | -------------------------------------------------------------------------------- /sample_data/history/john-customer-small.pot: -------------------------------------------------------------------------------- 1 | $NT$4472910b89492aef53ceb6b420b15f52:Fall2019 2 | $NT$d097e876365525e9c26f844d4891575a:Baseball77 3 | $NT$8489422dd6d0e84d463275ec53061fbc:PringlesSalt! 4 | $NT$df3d54d782e7b609b96a8b851a98c3a0:Black^Hills 5 | $NT$0c59de500c535e5423787020d807389d:Proverbs 3:5 6 | $NT$e62830daed8dbea4acd0b99d682946bb:Summer2019 7 | $NT$664a1aa376be470f7362ed264821e1c8:Baseball76 8 | $NT$e0396fea67867f981621cad6dc3155a6:!EatPringles 9 | $NT$eda2d14fae261e6c53f6f8c502e03882:Black%Hills 10 | $NT$19f32aa71bd564e318d738fa85508e4c:Zodiak-Cancer 11 | $NT$127d5d2635d0dc7c371c2cb77f3b3631:Philippians 4:6 12 | $NT$535baf9cf1c3067f9e952cc093f47cea:Spring2019 13 | $NT$c8139c6da49a7b9f179aa27dd5acd508:Baseball75 14 | $NT$3d0354c73796887bd88ee9e632cfe033:Black$Hills 15 | $NT$96d8b91c8f5136fc64dc7291b23cc2cf:Romans 8:28 16 | $NT$1e93978b2a94c18e34427bc97236d042:Baseball74 17 | $NT$a69cf337a3a2280be96847210ac11a03:I luv my kids! 18 | $NT$e15f90a648602e63599bb1c5aee97154:Black#Hills 19 | $NT$3fba9839b9256db212248d87b1000b02:Zodiak-Taurus 20 | $NT$4cdd9c320acdb6afb818880192834ad8:Philippians 4:13 21 | $NT$4b1008e316a3d0668824bdfb24dd8ae6:Fall2018 22 | $NT$5bb4ae91cf3644f735c3b04cd4a0c499:Baseball73 23 | $NT$05fdb9b0f73186b91470556c34300e52:New Job! 24 | $NT$b37dd19d3746319788e786c99fd6d9a4:Black@Hills 25 | $NT$10e824a030bb0deb2f05d51ebbcf507a:Zodiak-Pisces 26 | $NT$1f5d5b79f097c41235b573c7d7917c8d:Jeremiah 29:11 27 | $NT$5f6b883d42910065a60fbdbfbfa27caa:Spring2018 28 | $NT$7fda55e03dca701ff32fcb83e9ddcad9:Baseball72 29 | $NT$ab566ac3eb41a3aed66ff8f553817ea1:Black!Hills 30 | $NT$ee5549b777ad18e52ac98247a26873fa:John 3:16 31 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "dpat" 3 | version = "0.1.0" 4 | description = "Domain Password Audit Tool - A tool for auditing domain passwords based on extracted NTDS files" 5 | authors = ["Your Name "] 6 | readme = "README.md" 7 | license = "MIT" 8 | packages = [{include = "dpat.py"}] 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.8" 12 | pycryptodome = "^3.23.0" 13 | 14 | [tool.poetry.group.dev.dependencies] 15 | pytest = "^8.0.0" 16 | pytest-cov = "^5.0.0" 17 | pytest-mock = "^3.14.0" 18 | 19 | [tool.poetry.scripts] 20 | test = "pytest:main" 21 | tests = "pytest:main" 22 | 23 | [tool.pytest.ini_options] 24 | minversion = "8.0" 25 | testpaths = ["tests"] 26 | python_files = ["test_*.py", "*_test.py"] 27 | python_classes = ["Test*"] 28 | python_functions = ["test_*"] 29 | addopts = [ 30 | "--strict-markers", 31 | "--verbose", 32 | "--cov=.", 33 | "--cov-branch", 34 | "--cov-report=term-missing:skip-covered", 35 | "--cov-report=html:htmlcov", 36 | "--cov-report=xml:coverage.xml", 37 | "--cov-fail-under=80", 38 | "-ra", 39 | "--showlocals", 40 | "--tb=short", 41 | ] 42 | markers = [ 43 | "unit: Unit tests", 44 | "integration: Integration tests", 45 | "slow: Tests that take a long time to run", 46 | ] 47 | filterwarnings = [ 48 | "error", 49 | "ignore::UserWarning", 50 | "ignore::DeprecationWarning", 51 | ] 52 | 53 | [tool.coverage.run] 54 | source = ["."] 55 | omit = [ 56 | "*/tests/*", 57 | "*/test_*", 58 | "*/__pycache__/*", 59 | "*/site-packages/*", 60 | "*/distutils/*", 61 | "*/venv/*", 62 | "*/.venv/*", 63 | "*/htmlcov/*", 64 | "setup.py", 65 | "conftest.py", 66 | ] 67 | branch = true 68 | 69 | [tool.coverage.report] 70 | precision = 2 71 | show_missing = true 72 | skip_covered = false 73 | fail_under = 80 74 | exclude_lines = [ 75 | "pragma: no cover", 76 | "def __repr__", 77 | "if self.debug:", 78 | "if settings.DEBUG", 79 | "raise AssertionError", 80 | "raise NotImplementedError", 81 | "if 0:", 82 | "if __name__ == .__main__.:", 83 | "if TYPE_CHECKING:", 84 | "class .*\\bProtocol\\):", 85 | "@(abc\\.)?abstractmethod", 86 | ] 87 | 88 | [tool.coverage.html] 89 | directory = "htmlcov" 90 | 91 | [tool.coverage.xml] 92 | output = "coverage.xml" 93 | 94 | [build-system] 95 | requires = ["poetry-core"] 96 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /tests/test_setup_validation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sys 3 | import os 4 | from pathlib import Path 5 | 6 | 7 | class TestSetupValidation: 8 | """Validation tests to ensure the testing infrastructure is properly configured.""" 9 | 10 | def test_pytest_is_installed(self): 11 | """Verify pytest is available.""" 12 | assert "pytest" in sys.modules or True # Will be true after poetry install 13 | 14 | def test_project_structure_exists(self): 15 | """Verify the expected project structure is in place.""" 16 | project_root = Path(__file__).parent.parent 17 | 18 | assert project_root.exists() 19 | assert (project_root / "tests").exists() 20 | assert (project_root / "tests" / "__init__.py").exists() 21 | assert (project_root / "tests" / "unit").exists() 22 | assert (project_root / "tests" / "integration").exists() 23 | assert (project_root / "tests" / "conftest.py").exists() 24 | assert (project_root / "pyproject.toml").exists() 25 | 26 | def test_main_module_exists(self): 27 | """Verify the main dpat module file exists.""" 28 | project_root = Path(__file__).parent.parent 29 | dpat_file = project_root / "dpat.py" 30 | 31 | assert dpat_file.exists() 32 | assert dpat_file.is_file() 33 | 34 | # Verify it's a valid Python file by checking it has content 35 | content = dpat_file.read_text() 36 | assert "#!/usr/bin/env python" in content 37 | assert "argparse" in content 38 | 39 | def test_conftest_fixtures_available(self, temp_dir, mock_config): 40 | """Verify conftest fixtures are accessible.""" 41 | assert temp_dir.exists() 42 | assert temp_dir.is_dir() 43 | assert mock_config is not None 44 | assert hasattr(mock_config, 'debug') 45 | 46 | @pytest.mark.unit 47 | def test_unit_marker_works(self): 48 | """Verify the unit test marker is recognized.""" 49 | assert True 50 | 51 | @pytest.mark.integration 52 | def test_integration_marker_works(self): 53 | """Verify the integration test marker is recognized.""" 54 | assert True 55 | 56 | @pytest.mark.slow 57 | def test_slow_marker_works(self): 58 | """Verify the slow test marker is recognized.""" 59 | assert True 60 | 61 | def test_temp_file_fixture(self, temp_file): 62 | """Verify the temp_file fixture works correctly.""" 63 | test_content = "Hello, World!" 64 | temp_path = temp_file("test.txt", test_content) 65 | 66 | assert temp_path.exists() 67 | assert temp_path.read_text() == test_content 68 | 69 | def test_coverage_configured(self): 70 | """Verify coverage is properly configured.""" 71 | # This test will pass when run with coverage 72 | assert True 73 | 74 | def test_mock_fixtures_work(self, mock_database, mock_webbrowser): 75 | """Verify mock fixtures are properly set up.""" 76 | assert mock_database is not None 77 | assert hasattr(mock_database, 'cursor') 78 | assert mock_webbrowser is not None -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "DPAT basic", 9 | "type": "python", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal", 13 | "args": ["-n", "sample_data\\customer.ntds", "-c","sample_data\\oclHashcat.pot","-g","sample_data\\Domain Admins.txt", "sample_data\\Enterprise Admins.txt", "-w"] 14 | }, 15 | { 16 | "name": "DPAT basic Umlauts", 17 | "type": "python", 18 | "request": "launch", 19 | "program": "${file}", 20 | "console": "integratedTerminal", 21 | "args": ["-n", "sample_data\\umlauts\\customer.ntds", "-c","sample_data\\umlauts\\hashcat.potfile"] 22 | }, 23 | { 24 | "name": "DPAT basic History", 25 | "type": "python", 26 | "request": "launch", 27 | "program": "${file}", 28 | "console": "integratedTerminal", 29 | "args": ["-n", "sample_data\\history\\customer.ntds", "-c","sample_data\\history\\hashcat.potfile"] 30 | }, 31 | { 32 | "name": "DPAT basic History small", 33 | "type": "python", 34 | "request": "launch", 35 | "program": "${file}", 36 | "console": "integratedTerminal", 37 | "args": ["-n", "sample_data\\history\\customer-small.ntds", "-c","sample_data\\history\\john-customer-small.pot", "-w"] 38 | }, 39 | { 40 | "name": "DPAT basic History - Sanitized", 41 | "type": "python", 42 | "request": "launch", 43 | "program": "${file}", 44 | "console": "integratedTerminal", 45 | "args": ["-n", "sample_data\\history\\customer.ntds", "-c","sample_data\\history\\hashcat.potfile", "-s"] 46 | }, 47 | { 48 | "name": "DPAT basic Umlauts JtR", 49 | "type": "python", 50 | "request": "launch", 51 | "program": "${file}", 52 | "console": "integratedTerminal", 53 | "args": ["-n", "sample_data\\umlauts\\customer.ntds", "-c","sample_data\\umlauts\\john.pot"] 54 | }, 55 | { 56 | "name": "DPAT sanitized", 57 | "type": "python", 58 | "request": "launch", 59 | "program": "${file}", 60 | "console": "integratedTerminal", 61 | "args": ["-n", "sample_data\\customer.ntds", "-c","sample_data\\oclHashcat.pot", "-s"] 62 | }, 63 | { 64 | "name": "DPAT All", 65 | "type": "python", 66 | "request": "launch", 67 | "program": "${file}", 68 | "console": "integratedTerminal", 69 | "args": ["-n", "sample_data\\customer.ntds", "-c","sample_data\\oclHashcat.pot","-g","sample_data\\Domain Admins.txt", "sample_data\\Enterprise Admins.txt","-s","-w"] 70 | }, 71 | { 72 | "name": "gen data", 73 | "type": "python", 74 | "request": "launch", 75 | "program": "${file}", 76 | "console": "integratedTerminal" 77 | } 78 | ] 79 | } -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import tempfile 3 | import os 4 | import shutil 5 | from pathlib import Path 6 | from unittest.mock import Mock, MagicMock 7 | 8 | 9 | @pytest.fixture 10 | def temp_dir(): 11 | """Create a temporary directory for test files.""" 12 | temp_path = tempfile.mkdtemp() 13 | yield Path(temp_path) 14 | shutil.rmtree(temp_path) 15 | 16 | 17 | @pytest.fixture 18 | def temp_file(temp_dir): 19 | """Create a temporary file in the temp directory.""" 20 | def _create_temp_file(filename="test_file.txt", content=""): 21 | file_path = temp_dir / filename 22 | file_path.write_text(content) 23 | return file_path 24 | return _create_temp_file 25 | 26 | 27 | @pytest.fixture 28 | def mock_config(): 29 | """Provide a mock configuration object.""" 30 | config = Mock() 31 | config.debug = False 32 | config.output_dir = "/tmp/test_output" 33 | config.input_file = "test_input.txt" 34 | return config 35 | 36 | 37 | @pytest.fixture 38 | def sample_ntds_data(): 39 | """Provide sample NTDS data for testing.""" 40 | return [ 41 | "domain.com\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0:::", 42 | "domain.com\\user2:1002:aad3b435b51404eeaad3b435b51404ee:e19ccf75ee54e06b06a5907af13cef42:::", 43 | "domain.com\\admin:1003:aad3b435b51404eeaad3b435b51404ee:8846f7eaee8fb117ad06bdd830b7586c:::", 44 | ] 45 | 46 | 47 | @pytest.fixture 48 | def sample_hashcat_output(): 49 | """Provide sample hashcat potfile data.""" 50 | return [ 51 | "31d6cfe0d16ae931b73c59d7e0c089c0:", 52 | "e19ccf75ee54e06b06a5907af13cef42:P@ssw0rd", 53 | "8846f7eaee8fb117ad06bdd830b7586c:password123", 54 | ] 55 | 56 | 57 | @pytest.fixture 58 | def mock_database(): 59 | """Provide a mock database connection.""" 60 | db = MagicMock() 61 | db.cursor.return_value = MagicMock() 62 | return db 63 | 64 | 65 | @pytest.fixture(autouse=True) 66 | def reset_environment(): 67 | """Reset environment variables before and after each test.""" 68 | original_env = os.environ.copy() 69 | yield 70 | os.environ.clear() 71 | os.environ.update(original_env) 72 | 73 | 74 | @pytest.fixture 75 | def capture_stdout(monkeypatch): 76 | """Capture stdout for testing print statements.""" 77 | import io 78 | import sys 79 | 80 | captured_output = io.StringIO() 81 | monkeypatch.setattr(sys, 'stdout', captured_output) 82 | 83 | def get_output(): 84 | return captured_output.getvalue() 85 | 86 | return get_output 87 | 88 | 89 | @pytest.fixture 90 | def mock_webbrowser(monkeypatch): 91 | """Mock webbrowser module to prevent opening actual browser windows.""" 92 | mock_browser = Mock() 93 | monkeypatch.setattr('webbrowser.open', mock_browser) 94 | return mock_browser 95 | 96 | 97 | @pytest.fixture 98 | def mock_argparse_args(): 99 | """Provide mock command line arguments.""" 100 | args = Mock() 101 | args.ntdsfile = "test_ntds.txt" 102 | args.crackfile = "test_crack.txt" 103 | args.outputfile = "test_output.html" 104 | args.reportdirectory = "test_report" 105 | args.writedb = False 106 | args.sanitize = False 107 | args.grouplists = None 108 | args.machineaccts = False 109 | args.krbtgt = False 110 | return args -------------------------------------------------------------------------------- /tests/run_sample_data_tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Sample data test runner for DPAT. 4 | 5 | This script runs integration tests using the actual sample data files 6 | from the sample_data directory. 7 | """ 8 | 9 | import unittest 10 | import sys 11 | from pathlib import Path 12 | 13 | # Add the parent directory to the path so we can import dpat 14 | sys.path.insert(0, str(Path(__file__).parent.parent.parent)) 15 | 16 | def run_sample_data_tests(): 17 | """Run sample data integration tests.""" 18 | # Add the project root to the path 19 | project_root = Path(__file__).parent.parent 20 | sys.path.insert(0, str(project_root)) 21 | 22 | # Discover sample data tests 23 | loader = unittest.TestLoader() 24 | suite = loader.loadTestsFromName('tests.integration.test_sample_data') 25 | 26 | # Create test runner 27 | runner = unittest.TextTestRunner( 28 | verbosity=2, 29 | stream=sys.stdout 30 | ) 31 | 32 | # Run tests 33 | result = runner.run(suite) 34 | 35 | return result 36 | 37 | def check_sample_data_files(): 38 | """Check if sample data files exist.""" 39 | # Get the project root directory (DPAT directory) 40 | project_root = Path(__file__).parent.parent 41 | sample_data_dir = project_root / "sample_data" 42 | 43 | required_files = [ 44 | "customer.ntds", 45 | "oclHashcat.pot", 46 | "Domain Admins.txt", 47 | "Enterprise Admins.txt", 48 | "Enterprise Admins PowerView Output.txt" 49 | ] 50 | 51 | missing_files = [] 52 | for filename in required_files: 53 | file_path = sample_data_dir / filename 54 | if not file_path.exists(): 55 | missing_files.append(str(file_path)) 56 | 57 | if missing_files: 58 | print("❌ Missing sample data files:") 59 | for file_path in missing_files: 60 | print(f" - {file_path}") 61 | print("\nPlease ensure all sample data files are present before running tests.") 62 | return False 63 | 64 | print("✅ All sample data files found:") 65 | for filename in required_files: 66 | file_path = sample_data_dir / filename 67 | print(f" - {file_path}") 68 | 69 | return True 70 | 71 | def main(): 72 | """Main test runner function.""" 73 | print("🧪 DPAT Sample Data Integration Tests") 74 | print("=" * 50) 75 | 76 | # Check if sample data files exist 77 | if not check_sample_data_files(): 78 | return 1 79 | 80 | print("\n🔍 Running sample data integration tests...") 81 | 82 | # Run tests 83 | result = run_sample_data_tests() 84 | 85 | # Print summary 86 | print(f"\n{'='*50}") 87 | print(f"Test Summary:") 88 | print(f" Tests run: {result.testsRun}") 89 | print(f" Failures: {len(result.failures)}") 90 | print(f" Errors: {len(result.errors)}") 91 | print(f" Skipped: {len(result.skipped) if hasattr(result, 'skipped') else 0}") 92 | 93 | if result.failures: 94 | print(f"\nFailures:") 95 | for test, traceback in result.failures: 96 | print(f" {test}: {traceback}") 97 | 98 | if result.errors: 99 | print(f"\nErrors:") 100 | for test, traceback in result.errors: 101 | print(f" {test}: {traceback}") 102 | 103 | # Return exit code 104 | return 0 if result.wasSuccessful() else 1 105 | 106 | if __name__ == '__main__': 107 | sys.exit(main()) 108 | -------------------------------------------------------------------------------- /sample_data/history/customer-small.ntds: -------------------------------------------------------------------------------- 1 | Administrator:500:aad3b435b51404eeaad3b435b51404ee:a8bfc015bdbf6d0cf77396692ca5fc11::: 2 | Guest:501:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::: 3 | dpat:1001:aad3b435b51404eeaad3b435b51404ee:4472910b89492aef53ceb6b420b15f52::: 4 | WIN-M2HC5DF3QHT$:1002:aad3b435b51404eeaad3b435b51404ee:bffd9b2883d322e0b95f859e14fa4879::: 5 | krbtgt:502:aad3b435b51404eeaad3b435b51404ee:d79e8ab4d16989228127fcbf6c4e5967::: 6 | Larry:1603:aad3b435b51404eeaad3b435b51404ee:4472910b89492aef53ceb6b420b15f52::: 7 | Larry_history0:1603:aad3b435b51404eeaad3b435b51404ee:e62830daed8dbea4acd0b99d682946bb::: 8 | Larry_history1:1603:aad3b435b51404eeaad3b435b51404ee:535baf9cf1c3067f9e952cc093f47cea::: 9 | Larry_history2:1603:aad3b435b51404eeaad3b435b51404ee:89cdf00b5bd63aba309f7c3ee8d5dd50::: 10 | Larry_history3:1603:aad3b435b51404eeaad3b435b51404ee:4b1008e316a3d0668824bdfb24dd8ae6::: 11 | Larry_history4:1603:aad3b435b51404eeaad3b435b51404ee:5f6b883d42910065a60fbdbfbfa27caa::: 12 | Curly:1604:aad3b435b51404eeaad3b435b51404ee:d097e876365525e9c26f844d4891575a::: 13 | Curly_history0:1604:aad3b435b51404eeaad3b435b51404ee:664a1aa376be470f7362ed264821e1c8::: 14 | Curly_history1:1604:aad3b435b51404eeaad3b435b51404ee:c8139c6da49a7b9f179aa27dd5acd508::: 15 | Curly_history2:1604:aad3b435b51404eeaad3b435b51404ee:1e93978b2a94c18e34427bc97236d042::: 16 | Curly_history3:1604:aad3b435b51404eeaad3b435b51404ee:5bb4ae91cf3644f735c3b04cd4a0c499::: 17 | Curly_history4:1604:aad3b435b51404eeaad3b435b51404ee:7fda55e03dca701ff32fcb83e9ddcad9::: 18 | Carrie:1605:aad3b435b51404eeaad3b435b51404ee:8489422dd6d0e84d463275ec53061fbc::: 19 | Carrie_history0:1605:aad3b435b51404eeaad3b435b51404ee:e0396fea67867f981621cad6dc3155a6::: 20 | Carrie_history1:1605:aad3b435b51404eeaad3b435b51404ee:60d07a7555b279dbad7689404bff7d84::: 21 | Carrie_history2:1605:aad3b435b51404eeaad3b435b51404ee:a69cf337a3a2280be96847210ac11a03::: 22 | Carrie_history3:1605:aad3b435b51404eeaad3b435b51404ee:05fdb9b0f73186b91470556c34300e52::: 23 | Carrie_history4:1605:aad3b435b51404eeaad3b435b51404ee:a7424d4de7a71b0a3fc83b7c9943de17::: 24 | Darin:1606:aad3b435b51404eeaad3b435b51404ee:df3d54d782e7b609b96a8b851a98c3a0::: 25 | Darin_history0:1606:aad3b435b51404eeaad3b435b51404ee:eda2d14fae261e6c53f6f8c502e03882::: 26 | Darin_history1:1606:aad3b435b51404eeaad3b435b51404ee:3d0354c73796887bd88ee9e632cfe033::: 27 | Darin_history2:1606:aad3b435b51404eeaad3b435b51404ee:e15f90a648602e63599bb1c5aee97154::: 28 | Darin_history3:1606:aad3b435b51404eeaad3b435b51404ee:b37dd19d3746319788e786c99fd6d9a4::: 29 | Darin_history4:1606:aad3b435b51404eeaad3b435b51404ee:ab566ac3eb41a3aed66ff8f553817ea1::: 30 | Mo:1607:aad3b435b51404eeaad3b435b51404ee:2438971f2f7369f60255114bd5bdac4f::: 31 | Mo_history0:1607:aad3b435b51404eeaad3b435b51404ee:19f32aa71bd564e318d738fa85508e4c::: 32 | Mo_history1:1607:aad3b435b51404eeaad3b435b51404ee:ec120459cb0069aa7b0b4594e4a099ba::: 33 | Mo_history2:1607:aad3b435b51404eeaad3b435b51404ee:3fba9839b9256db212248d87b1000b02::: 34 | Mo_history3:1607:aad3b435b51404eeaad3b435b51404ee:10e824a030bb0deb2f05d51ebbcf507a::: 35 | Mo_history4:1607:aad3b435b51404eeaad3b435b51404ee:a15765a7f0315175a6f31ebc128c76bc::: 36 | cjordan:1608:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::: 37 | pope:1609:aad3b435b51404eeaad3b435b51404ee:0c59de500c535e5423787020d807389d::: 38 | pope_history0:1609:aad3b435b51404eeaad3b435b51404ee:127d5d2635d0dc7c371c2cb77f3b3631::: 39 | pope_history1:1609:aad3b435b51404eeaad3b435b51404ee:96d8b91c8f5136fc64dc7291b23cc2cf::: 40 | pope_history2:1609:aad3b435b51404eeaad3b435b51404ee:4cdd9c320acdb6afb818880192834ad8::: 41 | pope_history3:1609:aad3b435b51404eeaad3b435b51404ee:1f5d5b79f097c41235b573c7d7917c8d::: 42 | pope_history4:1609:aad3b435b51404eeaad3b435b51404ee:ee5549b777ad18e52ac98247a26873fa::: 43 | -------------------------------------------------------------------------------- /tests/verify_tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Simple test verification script for DPAT test suite. 4 | 5 | This script runs a basic verification to ensure the test suite is properly 6 | configured and can discover and run tests. 7 | """ 8 | 9 | import sys 10 | import unittest 11 | from pathlib import Path 12 | 13 | # Add the parent directory to the path 14 | sys.path.insert(0, str(Path(__file__).parent.parent)) 15 | 16 | def test_imports(): 17 | """Test that all required modules can be imported.""" 18 | try: 19 | from tests import TestConfig, TestDataGenerator, DPATTestCase 20 | from tests.unit.test_core import TestConfig as UnitTestConfig 21 | from tests.integration.test_integration import TestNTDSProcessingIntegration 22 | print("✅ All test modules imported successfully") 23 | return True 24 | except ImportError as e: 25 | print(f"❌ Import error: {e}") 26 | return False 27 | 28 | def test_discovery(): 29 | """Test that tests can be discovered.""" 30 | try: 31 | loader = unittest.TestLoader() 32 | suite = loader.discover('tests', pattern='test_*.py') 33 | test_count = suite.countTestCases() 34 | print(f"✅ Discovered {test_count} test cases") 35 | return test_count > 0 36 | except Exception as e: 37 | print(f"❌ Test discovery error: {e}") 38 | return False 39 | 40 | def test_sample_data(): 41 | """Test that sample data can be generated.""" 42 | try: 43 | from tests import TestDataGenerator 44 | generator = TestDataGenerator() 45 | 46 | ntds_data = generator.create_sample_ntds_data() 47 | cracked_data = generator.create_sample_cracked_data() 48 | group_data = generator.create_sample_group_data() 49 | 50 | print(f"✅ Generated sample data:") 51 | print(f" - NTDS entries: {len(ntds_data)}") 52 | print(f" - Cracked entries: {len(cracked_data)}") 53 | print(f" - Groups: {len(group_data)}") 54 | 55 | return len(ntds_data) > 0 and len(cracked_data) > 0 and len(group_data) > 0 56 | except Exception as e: 57 | print(f"❌ Sample data generation error: {e}") 58 | return False 59 | 60 | def test_basic_functionality(): 61 | """Test basic DPAT functionality.""" 62 | try: 63 | from dpat import Config, calculate_percentage, strtobool 64 | 65 | # Test Config 66 | config = Config( 67 | ntds_file="test.ntds", 68 | cracked_file="test.pot", 69 | min_password_length=8 70 | ) 71 | print("✅ Config creation works") 72 | 73 | # Test utility functions 74 | percentage = calculate_percentage(25, 100) 75 | assert percentage == 25.0, f"Expected 25.0, got {percentage}" 76 | print("✅ Percentage calculation works") 77 | 78 | bool_val = strtobool("true") 79 | assert bool_val == True, f"Expected True, got {bool_val}" 80 | print("✅ String to boolean conversion works") 81 | 82 | return True 83 | except Exception as e: 84 | print(f"❌ Basic functionality test error: {e}") 85 | return False 86 | 87 | def main(): 88 | """Run all verification tests.""" 89 | print("🧪 DPAT Test Suite Verification") 90 | print("=" * 40) 91 | 92 | tests = [ 93 | ("Import Tests", test_imports), 94 | ("Test Discovery", test_discovery), 95 | ("Sample Data Generation", test_sample_data), 96 | ("Basic Functionality", test_basic_functionality), 97 | ] 98 | 99 | passed = 0 100 | total = len(tests) 101 | 102 | for test_name, test_func in tests: 103 | print(f"\n🔍 Running {test_name}...") 104 | if test_func(): 105 | passed += 1 106 | else: 107 | print(f"❌ {test_name} failed") 108 | 109 | print("\n" + "=" * 40) 110 | print(f"📊 Results: {passed}/{total} tests passed") 111 | 112 | if passed == total: 113 | print("🎉 All verification tests passed! Test suite is ready.") 114 | return 0 115 | else: 116 | print("⚠️ Some verification tests failed. Check the errors above.") 117 | return 1 118 | 119 | if __name__ == '__main__': 120 | sys.exit(main()) 121 | -------------------------------------------------------------------------------- /tests/run_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test runner and configuration for DPAT test suite. 3 | 4 | This module provides the main test runner and configuration for running 5 | the complete DPAT test suite. 6 | """ 7 | 8 | import unittest 9 | import sys 10 | import os 11 | from pathlib import Path 12 | import logging 13 | 14 | # Add the parent directory to the path so we can import dpat 15 | sys.path.insert(0, str(Path(__file__).parent.parent)) 16 | 17 | # Configure test logging 18 | logging.basicConfig( 19 | level=logging.WARNING, 20 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' 21 | ) 22 | 23 | def discover_tests(): 24 | """Discover all test modules.""" 25 | # Get the tests directory 26 | tests_dir = Path(__file__).parent 27 | 28 | # Discover unit tests 29 | unit_loader = unittest.TestLoader() 30 | unit_suite = unit_loader.discover( 31 | start_dir=str(tests_dir / 'unit'), 32 | pattern='test_*.py', 33 | top_level_dir=str(tests_dir) 34 | ) 35 | 36 | # Discover integration tests 37 | integration_loader = unittest.TestLoader() 38 | integration_suite = integration_loader.discover( 39 | start_dir=str(tests_dir / 'integration'), 40 | pattern='test_*.py', 41 | top_level_dir=str(tests_dir) 42 | ) 43 | 44 | # Combine all test suites 45 | all_tests = unittest.TestSuite() 46 | all_tests.addTest(unit_suite) 47 | all_tests.addTest(integration_suite) 48 | 49 | return all_tests 50 | 51 | def run_tests(verbosity=2, failfast=False): 52 | """Run all tests with the specified verbosity.""" 53 | # Discover tests 54 | test_suite = discover_tests() 55 | 56 | # Create test runner 57 | runner = unittest.TextTestRunner( 58 | verbosity=verbosity, 59 | failfast=failfast, 60 | stream=sys.stdout 61 | ) 62 | 63 | # Run tests 64 | result = runner.run(test_suite) 65 | 66 | return result 67 | 68 | def run_unit_tests_only(verbosity=2, failfast=False): 69 | """Run only unit tests.""" 70 | tests_dir = Path(__file__).parent 71 | 72 | loader = unittest.TestLoader() 73 | suite = loader.discover( 74 | start_dir=str(tests_dir / 'unit'), 75 | pattern='test_*.py', 76 | top_level_dir=str(tests_dir) 77 | ) 78 | 79 | runner = unittest.TextTestRunner( 80 | verbosity=verbosity, 81 | failfast=failfast, 82 | stream=sys.stdout 83 | ) 84 | 85 | result = runner.run(suite) 86 | return result 87 | 88 | def run_sample_data_tests_only(verbosity=2, failfast=False): 89 | """Run only sample data integration tests.""" 90 | tests_dir = Path(__file__).parent 91 | 92 | loader = unittest.TestLoader() 93 | suite = loader.loadTestsFromName('tests.integration.test_sample_data') 94 | 95 | runner = unittest.TextTestRunner( 96 | verbosity=verbosity, 97 | failfast=failfast, 98 | stream=sys.stdout 99 | ) 100 | 101 | result = runner.run(suite) 102 | return result 103 | 104 | def run_specific_test(test_module, verbosity=2, failfast=False): 105 | """Run a specific test module.""" 106 | loader = unittest.TestLoader() 107 | suite = loader.loadTestsFromName(test_module) 108 | 109 | runner = unittest.TextTestRunner( 110 | verbosity=verbosity, 111 | failfast=failfast, 112 | stream=sys.stdout 113 | ) 114 | 115 | result = runner.run(suite) 116 | return result 117 | 118 | def main(): 119 | """Main test runner function.""" 120 | import argparse 121 | 122 | parser = argparse.ArgumentParser(description='Run DPAT test suite') 123 | parser.add_argument( 124 | '--unit-only', 125 | action='store_true', 126 | help='Run only unit tests' 127 | ) 128 | parser.add_argument( 129 | '--sample-data-only', 130 | action='store_true', 131 | help='Run only sample data integration tests' 132 | ) 133 | parser.add_argument( 134 | '--module', 135 | type=str, 136 | help='Run specific test module (e.g., tests.unit.test_core)' 137 | ) 138 | parser.add_argument( 139 | '--verbosity', 140 | type=int, 141 | default=2, 142 | choices=[0, 1, 2], 143 | help='Test output verbosity (0=quiet, 1=normal, 2=verbose)' 144 | ) 145 | parser.add_argument( 146 | '--failfast', 147 | action='store_true', 148 | help='Stop on first failure' 149 | ) 150 | parser.add_argument( 151 | '--list-tests', 152 | action='store_true', 153 | help='List all available tests without running them' 154 | ) 155 | 156 | args = parser.parse_args() 157 | 158 | if args.list_tests: 159 | # List all available tests 160 | test_suite = discover_tests() 161 | print(f"Found {test_suite.countTestCases()} test cases:") 162 | 163 | def print_tests(suite, indent=0): 164 | for test in suite: 165 | if hasattr(test, '_tests'): 166 | print_tests(test, indent + 1) 167 | else: 168 | print(" " * indent + str(test)) 169 | 170 | print_tests(test_suite) 171 | return 0 172 | 173 | # Run tests based on arguments 174 | if args.module: 175 | result = run_specific_test(args.module, args.verbosity, args.failfast) 176 | elif args.unit_only: 177 | result = run_unit_tests_only(args.verbosity, args.failfast) 178 | elif args.sample_data_only: 179 | result = run_sample_data_tests_only(args.verbosity, args.failfast) 180 | elif args.integration_only: 181 | result = run_integration_tests_only(args.verbosity, args.failfast) 182 | else: 183 | result = run_tests(args.verbosity, args.failfast) 184 | 185 | # Print summary 186 | print(f"\n{'='*50}") 187 | print(f"Test Summary:") 188 | print(f" Tests run: {result.testsRun}") 189 | print(f" Failures: {len(result.failures)}") 190 | print(f" Errors: {len(result.errors)}") 191 | print(f" Skipped: {len(result.skipped) if hasattr(result, 'skipped') else 0}") 192 | 193 | if result.failures: 194 | print(f"\nFailures:") 195 | for test, traceback in result.failures: 196 | print(f" {test}: {traceback}") 197 | 198 | if result.errors: 199 | print(f"\nErrors:") 200 | for test, traceback in result.errors: 201 | print(f" {test}: {traceback}") 202 | 203 | # Return exit code 204 | return 0 if result.wasSuccessful() else 1 205 | 206 | if __name__ == '__main__': 207 | sys.exit(main()) 208 | -------------------------------------------------------------------------------- /report.css: -------------------------------------------------------------------------------- 1 | /* ---------- Base / Theme ---------- */ 2 | :root { 3 | --bg: #f9fafb; 4 | --fg: #1f2937; 5 | --accent: #2563eb; 6 | --accent-light: #dbeafe; 7 | --border: #e5e7eb; 8 | --border-strong: #9ca3af; 9 | --row-alt: #f3f4f6; 10 | --row-hover: #e0e7ff; 11 | --shadow-1: 0 7px 8px -4px rgba(0,0,0,.1),0 12px 17px 2px rgba(0,0,0,.06),0 5px 22px 4px rgba(0,0,0,.04); 12 | --font-main: "Inter", "Segoe UI", Roboto, Helvetica, Arial, sans-serif; 13 | --font-mono: ui-monospace, SFMono-Regular, "SF Mono", Consolas, "Liberation Mono", Menlo, monospace; 14 | } 15 | 16 | @media (prefers-color-scheme: dark) { 17 | :root { 18 | --bg: #111827; 19 | --fg: #e5e7eb; 20 | --accent: #60a5fa; 21 | --accent-light: #1e3a8a; 22 | --border: #374151; 23 | --border-strong: #6b7280; 24 | --row-alt: #1f2937; 25 | --row-hover: #374151; 26 | --shadow-1: 0 7px 8px -4px rgba(0,0,0,.6),0 12px 17px 2px rgba(0,0,0,.4),0 5px 22px 4px rgba(0,0,0,.3); 27 | } 28 | } 29 | 30 | /* Manual dark mode override */ 31 | html.dark-theme { 32 | --bg: #111827; 33 | --fg: #e5e7eb; 34 | --accent: #60a5fa; 35 | --accent-light: #1e3a8a; 36 | --border: #374151; 37 | --border-strong: #6b7280; 38 | --row-alt: #374151; 39 | --row-hover: #4b5563; 40 | --shadow-1: 0 7px 8px -4px rgba(0,0,0,.6),0 12px 17px 2px rgba(0,0,0,.4),0 5px 22px 4px rgba(0,0,0,.3); 41 | } 42 | 43 | /* Manual light mode override */ 44 | html.light-theme { 45 | --bg: #f9fafb; 46 | --fg: #1f2937; 47 | --accent: #2563eb; 48 | --accent-light: #dbeafe; 49 | --border: #e5e7eb; 50 | --border-strong: #9ca3af; 51 | --row-alt: #f3f4f6; 52 | --row-hover: #e0e7ff; 53 | --shadow-1: 0 7px 8px -4px rgba(0,0,0,.1),0 12px 17px 2px rgba(0,0,0,.06),0 5px 22px 4px rgba(0,0,0,.04); 54 | } 55 | 56 | /* ---------- Global ---------- */ 57 | html, body { 58 | margin: 0; 59 | padding: 0; 60 | background: var(--bg); 61 | color: var(--fg); 62 | font-family: var(--font-main); 63 | font-size: 16px; 64 | line-height: 1.4; 65 | } 66 | 67 | body { 68 | padding-top: 76px; /* Account for fixed navbar */ 69 | } 70 | 71 | .main-content { 72 | padding: 32px 16px 120px; 73 | max-width: 1400px; 74 | margin: 0 auto; 75 | } 76 | 77 | /* optional global link style */ 78 | a { 79 | color: var(--accent); 80 | text-decoration: none; 81 | } 82 | a:hover, 83 | a:focus { 84 | text-decoration: underline; 85 | } 86 | 87 | /* spacing breaker instead of
spam */ 88 | .section-space { 89 | height: 32px; 90 | } 91 | 92 | /* ---------- Table styles ---------- */ 93 | .table-wrap { 94 | width: 100%; 95 | overflow-x: auto; 96 | box-shadow: var(--shadow-1); 97 | border-radius: 10px; 98 | background: white; 99 | margin-bottom: 2rem; 100 | } 101 | @media (prefers-color-scheme: dark) { 102 | .table-wrap { background: #1f2937; } 103 | } 104 | html.dark-theme .table-wrap { background: #1f2937; } 105 | html.light-theme .table-wrap { background: white; } 106 | 107 | /* Override Bootstrap table styles for better integration */ 108 | .table-wrap .table { 109 | margin-bottom: 0; 110 | font-size: 0.92rem; 111 | } 112 | 113 | .table-wrap .table caption { 114 | text-align: left; 115 | font-weight: 600; 116 | padding: 12px 16px 8px; 117 | color: var(--fg); 118 | caption-side: top; 119 | } 120 | 121 | .table-wrap .table thead th { 122 | background: var(--accent-light); 123 | color: var(--fg); 124 | border-bottom: 2px solid var(--border-strong); 125 | text-align: center; 126 | font-size: 0.9rem; 127 | font-weight: 600; 128 | } 129 | 130 | .table-wrap .table tbody td { 131 | text-align: center; 132 | vertical-align: middle; 133 | word-break: break-word; 134 | } 135 | 136 | /* Dark mode overrides for Bootstrap tables */ 137 | html.dark-theme .table-wrap .table { 138 | --bs-table-bg: transparent; 139 | --bs-table-striped-bg: var(--row-alt); 140 | --bs-table-hover-bg: var(--row-hover); 141 | --bs-table-border-color: var(--border); 142 | --bs-table-color: var(--fg); 143 | } 144 | 145 | html.dark-theme .table-wrap .table tbody tr:nth-of-type(odd) { 146 | background-color: var(--row-alt) !important; 147 | color: #f3f4f6 !important; 148 | } 149 | 150 | html.dark-theme .table-wrap .table tbody tr:nth-of-type(even) { 151 | background-color: var(--bg) !important; 152 | color: #f3f4f6 !important; 153 | } 154 | 155 | html.dark-theme .table-wrap .table tbody tr:hover { 156 | background-color: var(--row-hover) !important; 157 | color: #f3f4f6 !important; 158 | } 159 | 160 | html.dark-theme .table-wrap .table tbody tr td { 161 | color: #f3f4f6 !important; 162 | } 163 | 164 | html.light-theme .table-wrap .table { 165 | --bs-table-bg: transparent; 166 | --bs-table-striped-bg: var(--row-alt); 167 | --bs-table-hover-bg: var(--row-hover); 168 | --bs-table-border-color: var(--border); 169 | --bs-table-color: var(--fg); 170 | } 171 | 172 | html.light-theme .table-wrap .table tbody tr:nth-of-type(odd) { 173 | background-color: var(--row-alt) !important; 174 | color: var(--fg) !important; 175 | } 176 | 177 | html.light-theme .table-wrap .table tbody tr:nth-of-type(even) { 178 | background-color: var(--bg) !important; 179 | color: var(--fg) !important; 180 | } 181 | 182 | html.light-theme .table-wrap .table tbody tr:hover { 183 | background-color: var(--row-hover) !important; 184 | color: var(--fg) !important; 185 | } 186 | 187 | .text-left { text-align:left; max-width:1400px; margin:0 auto 24px; } 188 | 189 | /* ---------- Pills / badges ---------- */ 190 | .badge { 191 | display: inline-block; 192 | padding: 2px 8px; 193 | border-radius: 9999px; 194 | font-size: 0.75rem; 195 | font-weight: 600; 196 | background: var(--accent-light); 197 | color: var(--accent); 198 | } 199 | .badge-gray { 200 | background: var(--border); 201 | color: var(--fg); 202 | } 203 | 204 | /* ---------- Small utilities ---------- */ 205 | .mono { font-family: var(--font-mono); font-size: 0.8rem; } 206 | .right { text-align: right; } 207 | .left { text-align: left; } 208 | .nowrap { white-space: nowrap; } 209 | 210 | /* ---------- Bootstrap Integration ---------- */ 211 | /* Navbar theme toggle button styling */ 212 | #theme-toggle { 213 | transition: all 0.3s ease; 214 | } 215 | 216 | #theme-toggle:hover { 217 | transform: scale(1.05); 218 | } 219 | 220 | .theme-toggle-icon { 221 | transition: transform 0.3s ease; 222 | } 223 | 224 | #theme-toggle:hover .theme-toggle-icon { 225 | transform: rotate(180deg); 226 | } 227 | 228 | /* DataTables customization */ 229 | .dataTables_wrapper { 230 | padding-top: 1rem; 231 | } 232 | 233 | .dataTables_filter { 234 | margin-bottom: 1rem; 235 | } 236 | 237 | .dataTables_length { 238 | margin-bottom: 1rem; 239 | } 240 | 241 | /* DataTables dark mode styling */ 242 | html.dark-theme .dataTables_wrapper .dataTables_length, 243 | html.dark-theme .dataTables_wrapper .dataTables_filter, 244 | html.dark-theme .dataTables_wrapper .dataTables_info, 245 | html.dark-theme .dataTables_wrapper .dataTables_paginate { 246 | color: var(--fg) !important; 247 | } 248 | 249 | html.dark-theme .dataTables_wrapper .dataTables_length select, 250 | html.dark-theme .dataTables_wrapper .dataTables_filter input { 251 | background-color: var(--bg) !important; 252 | color: var(--fg) !important; 253 | border-color: var(--border) !important; 254 | } 255 | 256 | html.dark-theme .dataTables_wrapper .dataTables_paginate .paginate_button { 257 | background-color: var(--bg) !important; 258 | color: var(--fg) !important; 259 | border-color: var(--border) !important; 260 | } 261 | 262 | html.dark-theme .dataTables_wrapper .dataTables_paginate .paginate_button:hover { 263 | background-color: var(--row-hover) !important; 264 | color: var(--fg) !important; 265 | } 266 | 267 | html.dark-theme .dataTables_wrapper .dataTables_paginate .paginate_button.current { 268 | background-color: var(--accent) !important; 269 | color: white !important; 270 | } 271 | 272 | /* Chart container improvements */ 273 | .chart-container { 274 | background: var(--bg); 275 | border-radius: 10px; 276 | padding: 1rem; 277 | box-shadow: var(--shadow-1); 278 | margin-bottom: 2rem; 279 | } 280 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DPAT Icon Domain Password Audit Tool (DPAT) 2 | 3 | DPAT is a Python-based security tool that analyzes password hashes extracted from Active Directory domain controllers. It generates comprehensive HTML reports showing password usage statistics, policy violations, and security insights from both NTDS dumps and password cracking results. 4 | 5 | ### Sample Reports 6 | 7 | ![DPAT Summary](img/dpat%20summary.png) 8 | 9 | ![Group Statistics](img/group%20statistics.png) 10 | 11 | ![Charts](img/charts.png) 12 | 13 | ## Features 14 | 15 | - **Comprehensive Password Analysis**: Analyze password length distributions, reuse patterns, and policy violations 16 | - **Interactive HTML Reports**: Generate detailed, clickable HTML reports with drill-down capabilities 17 | - **Password History Support**: Analyze password history data when available 18 | - **Group-Based Analysis**: Include group membership statistics for privileged accounts 19 | - **Kerberoastable Account Analysis**: Identify and analyze cracked Kerberoastable service accounts 20 | - **LM Hash Cracking**: Automatically attempt to crack NT hashes from partially cracked LM hashes 21 | - **Data Sanitization**: Option to sanitize sensitive data in reports for sharing 22 | - **Multiple Input Formats**: Support for Hashcat, John the Ripper, and other password cracking tools 23 | 24 | ## Requirements 25 | 26 | - Python 3.8 or higher 27 | - pycryptodome (for MD4 hash support) 28 | 29 | ## Installation 30 | 31 | ### Basic Installation 32 | 33 | **Install core dependencies:** 34 | ```bash 35 | pip install -r requirements.txt 36 | ``` 37 | 38 | **For development and testing:** 39 | ```bash 40 | pip install -r requirements-dev.txt 41 | ``` 42 | 43 | ### Manual Installation 44 | 45 | If you prefer to install dependencies individually: 46 | ```bash 47 | pip install pycryptodome 48 | ``` 49 | 50 | ### Using Poetry 51 | ```bash 52 | poetry install 53 | ``` 54 | 55 | ## Quick Start 56 | 57 | 1. **Extract NTDS data** from your domain controller using secretsdump.py 58 | 2. **Crack passwords** using Hashcat, John the Ripper, or similar tools 59 | 3. **Run DPAT** to generate the analysis report 60 | 61 | ```bash 62 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 63 | ``` 64 | 65 | ## Usage 66 | 67 | ### Basic Command 68 | ```bash 69 | python dpat.py -n -c -p 70 | ``` 71 | 72 | ### Required Parameters 73 | - `-n, --ntdsfile`: NTDS file (output from secretsdump.py) 74 | - `-c, --crackfile`: Password cracking output file (hashcat.potfile, john.pot, etc.) 75 | - `-p, --minpasslen`: Minimum password length from domain policy 76 | 77 | ### Optional Parameters 78 | - `-o, --outputfile`: HTML report filename (default: _DomainPasswordAuditReport.html) 79 | - `-d, --reportdirectory`: Output directory (default: "DPAT Report") 80 | - `-s, --sanitize`: Sanitize passwords and hashes in reports 81 | - `-g, --groupsdirectory`: Directory containing group membership files 82 | - `-m, --machineaccts`: Include machine accounts in analysis 83 | - `-k, --krbtgt`: Include krbtgt account in analysis 84 | - `-kz, --kerbfile`: File containing Kerberoastable accounts 85 | - `-w, --writedb`: Write SQLite database to disk for inspection 86 | - `--no-prompt`: Skip browser prompt (useful for automation) 87 | - `-dbg, --debug`: Enable debug output 88 | 89 | ### Example Commands 90 | 91 | **Basic analysis:** 92 | ```bash 93 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 94 | ``` 95 | 96 | **With group analysis:** 97 | ```bash 98 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 -g /path/to/groups 99 | ``` 100 | 101 | **Sanitized report:** 102 | ```bash 103 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 -s 104 | ``` 105 | 106 | **With Kerberoastable accounts:** 107 | ```bash 108 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 -kz kerberoastable.txt 109 | ``` 110 | 111 | ## Input File Formats 112 | 113 | ### NTDS File Format 114 | The NTDS file should be in the format output by secretsdump.py: 115 | ``` 116 | domain\username:RID:lmhash:nthash::: 117 | ``` 118 | 119 | ### Cracked Password File Formats 120 | 121 | **Hashcat format:** 122 | ``` 123 | nthash:password 124 | lmhash:password 125 | ``` 126 | 127 | **John the Ripper format:** 128 | ``` 129 | $NT$nthash:password 130 | $LM$lmhash:password 131 | ``` 132 | 133 | **Hex encoded passwords:** 134 | ``` 135 | nthash:$HEX[68656c6c6f] 136 | ``` 137 | 138 | ### Group Files Format 139 | Group membership files should contain one username per line: 140 | ``` 141 | domain\username 142 | ``` 143 | 144 | ## Data Extraction 145 | 146 | ### Step 1: Extract NTDS Data 147 | On a domain controller, create an IFM (Install From Media) backup: 148 | ```cmd 149 | ntdsutil "ac in ntds" "ifm" "cr fu c:\temp" q q 150 | ``` 151 | 152 | This creates: 153 | - `c:\temp\Active Directory\ntds.dit` 154 | - `c:\temp\registry\SYSTEM` 155 | 156 | ### Step 2: Convert to DPAT Format 157 | Use secretsdump.py to convert the backup: 158 | ```bash 159 | secretsdump.py -system registry/SYSTEM -ntds "Active Directory/ntds.dit" LOCAL -outputfile customer 160 | ``` 161 | 162 | For password history (if available): 163 | ```bash 164 | secretsdump.py -system registry/SYSTEM -ntds "Active Directory/ntds.dit" LOCAL -outputfile customer -history 165 | ``` 166 | 167 | ### Step 3: Crack Passwords 168 | **Using Hashcat:** 169 | ```bash 170 | hashcat -m 1000 customer.ntds /path/to/wordlist 171 | ``` 172 | 173 | **Using John the Ripper:** 174 | ```bash 175 | john --format=NT customer.ntds 176 | ``` 177 | 178 | ## Report Sections 179 | 180 | DPAT generates comprehensive reports including: 181 | 182 | - **Password Statistics**: Total hashes, unique hashes, cracked passwords 183 | - **Password Policy Violations**: Passwords shorter than policy minimum 184 | - **Username/Password Matches**: Accounts using username as password 185 | - **LM Hash Analysis**: Non-blank LM hashes and cracking statistics 186 | - **Password Length Distribution**: Detailed length analysis with drill-down 187 | - **Password Reuse**: Shared passwords across multiple accounts 188 | - **Top Passwords**: Most commonly used passwords 189 | - **Password History**: Historical password analysis (when available) 190 | - **Group Statistics**: Privileged group analysis (when group files provided) 191 | - **Kerberoastable Accounts**: Service account analysis (when provided) 192 | 193 | 194 | ## Sample Data 195 | 196 | The repository includes sample data in the `sample_data/` directory for testing: 197 | - `customer.ntds`: Sample NTDS file 198 | - `oclHashcat.pot`: Sample cracked passwords 199 | - `history/`: Sample data with password history 200 | 201 | Test with sample data: 202 | ```bash 203 | python dpat.py -n sample_data/customer.ntds -c sample_data/oclHashcat.pot -p 8 204 | ``` 205 | 206 | ## Advanced Features 207 | 208 | ### Group Analysis 209 | Create group membership files using PowerShell: 210 | ```powershell 211 | Get-NetGroupMember -Recurse -GroupName "Domain Admins" > "Domain Admins.txt" 212 | ``` 213 | 214 | ### Kerberoastable Account Analysis 215 | Use CypherHound's parse-kerberoastable.py script to generate Kerberoastable account files for enhanced analysis. 216 | 217 | ### CypherHound Integration 218 | You can plug files into DPAT generated by [CypherHound](https://github.com/fin3ss3g0d/cypherhound) in order to provide more statistics to DPAT reports. Currently, the extra statistics are: 219 | - Group cracking statistics for every group in a given domain 220 | - Cracked kerberoastable accounts 221 | 222 | If you would like to utilize it for providing these extra statistics you will need to utilize the scripts here: 223 | - [parse-memberships.py](https://github.com/fin3ss3g0d/cypherhound/tree/main#scriptsdpatparse-membershipspy). Pass the output directory to DPAT with the `-g` flag. 224 | - [parse-kerberoastable.py](https://github.com/fin3ss3g0d/cypherhound/tree/main#scriptsdpatparse-kerberoastablepy). Pass the output file to DPAT with the `-kz` flag. 225 | 226 | They are simple scripts with usage linked above and using them should be straightforward. 227 | 228 | ### Data Sanitization 229 | Use the `-s` flag to create sanitized reports suitable for sharing: 230 | ```bash 231 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 -s 232 | ``` 233 | 234 | ## Troubleshooting 235 | 236 | ### Common Issues 237 | 238 | **"No module named 'distutils'"** 239 | - This is expected on Python 3.12+. DPAT handles this automatically. 240 | 241 | **Empty reports** 242 | - Verify your NTDS file format matches the expected format 243 | - Check that your cracked password file contains matching hashes 244 | - Ensure minimum password length parameter is correct 245 | 246 | **Missing details links** 247 | - Some report sections may not have detail pages if no data is found 248 | - This is normal behavior for empty categories 249 | 250 | ### Debug Mode 251 | Enable debug output for troubleshooting: 252 | ```bash 253 | python dpat.py -n customer.ntds -c hashcat.potfile -p 8 -dbg 254 | ``` 255 | 256 | ## Testing 257 | 258 | Run the test suite: 259 | ```bash 260 | python -m unittest tests.unit.test_core -v 261 | python -m unittest tests.integration.test_integration -v 262 | ``` 263 | 264 | ## Contributing 265 | 266 | 1. Fork the repository 267 | 2. Create a feature branch 268 | 3. Make your changes 269 | 4. Add tests for new functionality 270 | 5. Submit a pull request 271 | 272 | ## License 273 | 274 | This project is licensed under the MIT License - see the LICENSE file for details. 275 | 276 | ## Credits 277 | 278 | - **Carrie Roberts** - Original author 279 | - **Dylan Evans** - Contributor 280 | 281 | ## Originally Sponsored by: 282 | 283 | [![Black Hills Information Security](https://www.blackhillsinfosec.com/wp-content/uploads/2018/12/BHIS-logo-L-1024x1024-221x221.png)](http://www.blackhillsinfosec.com) 284 | 285 | ## Support 286 | 287 | For issues, questions, or contributions, please use the GitHub issue tracker. -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test configuration and setup for DPAT test suite. 3 | 4 | This module provides common test utilities, fixtures, and configuration 5 | for testing the Domain Password Audit Tool. 6 | """ 7 | 8 | import os 9 | import tempfile 10 | import unittest 11 | from pathlib import Path 12 | from typing import Dict, List, Optional 13 | import sqlite3 14 | import logging 15 | 16 | # Configure test logging 17 | logging.basicConfig(level=logging.WARNING) # Reduce noise during tests 18 | 19 | 20 | class TestConfig: 21 | """Test configuration and utilities.""" 22 | 23 | def __init__(self): 24 | self.test_dir = Path(__file__).parent 25 | self.fixtures_dir = self.test_dir / "fixtures" 26 | self.temp_dir = None 27 | 28 | def setup_temp_dir(self) -> Path: 29 | """Create a temporary directory for test files.""" 30 | self.temp_dir = Path(tempfile.mkdtemp(prefix="dpat_test_")) 31 | return self.temp_dir 32 | 33 | def cleanup_temp_dir(self): 34 | """Clean up temporary directory.""" 35 | if self.temp_dir and self.temp_dir.exists(): 36 | import shutil 37 | shutil.rmtree(self.temp_dir) 38 | self.temp_dir = None 39 | 40 | def get_fixture_path(self, filename: str) -> Path: 41 | """Get path to a test fixture file.""" 42 | return self.fixtures_dir / filename 43 | 44 | 45 | class TestDataGenerator: 46 | """Generate test data for DPAT testing.""" 47 | 48 | @staticmethod 49 | def create_sample_ntds_data() -> List[str]: 50 | """Create sample NTDS data for testing.""" 51 | return [ 52 | "DOMAIN\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 53 | "DOMAIN\\user2:1002:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 54 | "DOMAIN\\admin:1003:aad3b435b51404eeaad3b435b51404ee:098f6bcd4621d373cade4e832627b4f6::::", 55 | "DOMAIN\\machine$:1004:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 56 | "DOMAIN\\krbtgt:1005:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 57 | "DOMAIN\\user1_history0:1006:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 58 | "DOMAIN\\user1_history1:1007:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 59 | ] 60 | 61 | @staticmethod 62 | def create_sample_cracked_data() -> List[str]: 63 | """Create sample cracked password data for testing.""" 64 | return [ 65 | "31d6cfe0d16ae931b73c59d7e0c089c0:", 66 | "5d41402abc4b2a76b9719d911017c592:hello", 67 | "098f6bcd4621d373cade4e832627b4f6:admin123", 68 | "aad3b435b51404eeaad3b435b51404ee:password", 69 | ] 70 | 71 | @staticmethod 72 | def create_sample_group_data() -> Dict[str, List[str]]: 73 | """Create sample group membership data for testing.""" 74 | return { 75 | "Domain Admins": [ 76 | "DOMAIN\\admin", 77 | "DOMAIN\\admin2" 78 | ], 79 | "Enterprise Admins": [ 80 | "DOMAIN\\admin", 81 | "DOMAIN\\superadmin" 82 | ], 83 | "Regular Users": [ 84 | "DOMAIN\\user1", 85 | "DOMAIN\\user2", 86 | "DOMAIN\\user3" 87 | ] 88 | } 89 | 90 | @staticmethod 91 | def create_sample_kerberoast_data() -> List[str]: 92 | """Create sample Kerberoast data for testing.""" 93 | return [ 94 | "DOMAIN\\service1:1008:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 95 | "DOMAIN\\service2:1009:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 96 | ] 97 | 98 | 99 | class DatabaseTestHelper: 100 | """Helper class for database testing.""" 101 | 102 | @staticmethod 103 | def create_test_database(db_path: str, group_names: Optional[List[str]] = None) -> sqlite3.Connection: 104 | """Create a test database with the DPAT schema.""" 105 | conn = sqlite3.connect(db_path) 106 | cursor = conn.cursor() 107 | 108 | # Create main table 109 | cursor.execute(''' 110 | CREATE TABLE hash_infos ( 111 | username_full text collate nocase, 112 | username text collate nocase, 113 | lm_hash text, 114 | lm_hash_left text, 115 | lm_hash_right text, 116 | nt_hash text, 117 | password text, 118 | lm_pass_left text, 119 | lm_pass_right text, 120 | only_lm_cracked boolean, 121 | history_index int, 122 | history_base_username text 123 | ) 124 | ''') 125 | 126 | # Create indexes 127 | indexes = [ 128 | "CREATE INDEX index_nt_hash ON hash_infos (nt_hash)", 129 | "CREATE INDEX index_lm_hash_left ON hash_infos (lm_hash_left)", 130 | "CREATE INDEX index_lm_hash_right ON hash_infos (lm_hash_right)", 131 | "CREATE INDEX lm_hash ON hash_infos (lm_hash)", 132 | "CREATE INDEX username ON hash_infos (username)" 133 | ] 134 | 135 | for index_sql in indexes: 136 | cursor.execute(index_sql) 137 | 138 | # Create group columns 139 | if group_names: 140 | for group_name in group_names: 141 | sql = f'ALTER TABLE hash_infos ADD COLUMN "{group_name}" boolean' 142 | cursor.execute(sql) 143 | 144 | conn.commit() 145 | return conn 146 | 147 | @staticmethod 148 | def populate_test_database(conn: sqlite3.Connection, test_data: List[Dict]): 149 | """Populate test database with sample data.""" 150 | cursor = conn.cursor() 151 | 152 | for row in test_data: 153 | cursor.execute(''' 154 | INSERT INTO hash_infos 155 | (username_full, username, lm_hash, lm_hash_left, lm_hash_right, 156 | nt_hash, password, history_index, history_base_username) 157 | VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) 158 | ''', ( 159 | row.get('username_full', ''), 160 | row.get('username', ''), 161 | row.get('lm_hash', ''), 162 | row.get('lm_hash_left', ''), 163 | row.get('lm_hash_right', ''), 164 | row.get('nt_hash', ''), 165 | row.get('password', ''), 166 | row.get('history_index', -1), 167 | row.get('history_base_username', '') 168 | )) 169 | 170 | conn.commit() 171 | 172 | 173 | class TestFileManager: 174 | """Manage test files and cleanup.""" 175 | 176 | def __init__(self, temp_dir: Path): 177 | self.temp_dir = temp_dir 178 | self.created_files = [] 179 | 180 | def create_file(self, filename: str, content: List[str]) -> Path: 181 | """Create a test file with content.""" 182 | file_path = self.temp_dir / filename 183 | with open(file_path, 'w', encoding='utf-8') as f: 184 | for line in content: 185 | f.write(line + '\n') 186 | self.created_files.append(file_path) 187 | return file_path 188 | 189 | def create_group_files(self, group_data: Dict[str, List[str]]) -> Dict[str, Path]: 190 | """Create group membership files.""" 191 | group_files = {} 192 | for group_name, members in group_data.items(): 193 | filename = f"{group_name}.txt" 194 | file_path = self.create_file(filename, members) 195 | group_files[group_name] = file_path 196 | return group_files 197 | 198 | def cleanup(self): 199 | """Clean up all created files.""" 200 | for file_path in self.created_files: 201 | if file_path.exists(): 202 | file_path.unlink() 203 | 204 | 205 | class DPATTestCase(unittest.TestCase): 206 | """Base test case class for DPAT tests.""" 207 | 208 | def setUp(self): 209 | """Set up test environment.""" 210 | self.test_config = TestConfig() 211 | self.temp_dir = self.test_config.setup_temp_dir() 212 | self.file_manager = TestFileManager(self.temp_dir) 213 | self.data_generator = TestDataGenerator() 214 | 215 | def tearDown(self): 216 | """Clean up test environment.""" 217 | self.file_manager.cleanup() 218 | self.test_config.cleanup_temp_dir() 219 | 220 | def assert_file_exists(self, file_path: Path, msg: str = None): 221 | """Assert that a file exists.""" 222 | self.assertTrue(file_path.exists(), msg or f"File {file_path} does not exist") 223 | 224 | def assert_file_contains(self, file_path: Path, content: str, msg: str = None): 225 | """Assert that a file contains specific content.""" 226 | self.assert_file_exists(file_path) 227 | with open(file_path, 'r', encoding='utf-8') as f: 228 | file_content = f.read() 229 | self.assertIn(content, file_content, msg or f"File {file_path} does not contain '{content}'") 230 | 231 | def assert_database_has_records(self, conn: sqlite3.Connection, table: str, count: int, msg: str = None): 232 | """Assert that a database table has a specific number of records.""" 233 | cursor = conn.cursor() 234 | cursor.execute(f"SELECT COUNT(*) FROM {table}") 235 | actual_count = cursor.fetchone()[0] 236 | self.assertEqual(actual_count, count, msg or f"Expected {count} records in {table}, got {actual_count}") 237 | 238 | 239 | # Test fixtures and sample data 240 | SAMPLE_NTDS_DATA = TestDataGenerator.create_sample_ntds_data() 241 | SAMPLE_CRACKED_DATA = TestDataGenerator.create_sample_cracked_data() 242 | SAMPLE_GROUP_DATA = TestDataGenerator.create_sample_group_data() 243 | SAMPLE_KERBEROAST_DATA = TestDataGenerator.create_sample_kerberoast_data() 244 | -------------------------------------------------------------------------------- /tests/fixtures/test_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test fixtures and sample data for DPAT testing. 3 | 4 | This module provides realistic test data that mimics real-world NTDS dumps 5 | and password cracking results for comprehensive testing. 6 | """ 7 | 8 | from pathlib import Path 9 | from typing import List, Dict 10 | 11 | # Sample NTDS data (pwdump format) 12 | SAMPLE_NTDS_DATA = [ 13 | # Regular user accounts 14 | "DOMAIN\\john.doe:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 15 | "DOMAIN\\jane.smith:1002:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 16 | "DOMAIN\\admin.user:1003:aad3b435b51404eeaad3b435b51404ee:098f6bcd4621d373cade4e832627b4f6::::", 17 | "DOMAIN\\service.account:1004:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 18 | 19 | # Machine accounts (should be filtered by default) 20 | "DOMAIN\\WORKSTATION01$:1005:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 21 | "DOMAIN\\SERVER01$:1006:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 22 | 23 | # krbtgt account (should be filtered by default) 24 | "DOMAIN\\krbtgt:1007:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 25 | 26 | # Password history entries 27 | "DOMAIN\\john.doe_history0:1008:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 28 | "DOMAIN\\john.doe_history1:1009:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 29 | "DOMAIN\\jane.smith_history0:1010:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 30 | 31 | # Empty password hashes 32 | "DOMAIN\\empty.user:1011:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 33 | 34 | # LM hash disabled (empty LM hash) 35 | "DOMAIN\\lm.disabled:1012::31d6cfe0d16ae931b73c59d7e0c089c0::::", 36 | 37 | # Different domain 38 | "OTHERDOMAIN\\user1:1013:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 39 | ] 40 | 41 | # Sample cracked password data (hashcat.potfile format) 42 | SAMPLE_CRACKED_DATA = [ 43 | # NT hashes 44 | "31d6cfe0d16ae931b73c59d7e0c089c0:", 45 | "5d41402abc4b2a76b9719d911017c592:hello", 46 | "098f6bcd4621d373cade4e832627b4f6:admin123", 47 | "aad3b435b51404eeaad3b435b51404ee:password", 48 | 49 | # LM hashes 50 | "aad3b435b51404ee:password", 51 | "aad3b435b51404ee:Password", 52 | "aad3b435b51404ee:PASSWORD", 53 | 54 | # Hex encoded passwords 55 | "31d6cfe0d16ae931b73c59d7e0c089c0:$HEX[68656c6c6f]", 56 | "5d41402abc4b2a76b9719d911017c592:$HEX[61646d696e313233]", 57 | 58 | # Empty passwords 59 | "31d6cfe0d16ae931b73c59d7e0c089c0:", 60 | 61 | # Special characters 62 | "098f6bcd4621d373cade4e832627b4f6:pass@word123!", 63 | "31d6cfe0d16ae931b73c59d7e0c089c0:test\\user", 64 | ] 65 | 66 | # Sample group membership data 67 | SAMPLE_GROUP_DATA = { 68 | "Domain Admins": [ 69 | "DOMAIN\\admin.user", 70 | "DOMAIN\\super.admin", 71 | "DOMAIN\\admin.user-admin", # Elevated account 72 | ], 73 | "Enterprise Admins": [ 74 | "DOMAIN\\admin.user", 75 | "DOMAIN\\enterprise.admin", 76 | "DOMAIN\\admin.user-admin", # Elevated account 77 | ], 78 | "Regular Users": [ 79 | "DOMAIN\\john.doe", 80 | "DOMAIN\\jane.smith", 81 | "DOMAIN\\regular.user", 82 | "DOMAIN\\john.doe-admin", # Elevated account 83 | ], 84 | "Service Accounts": [ 85 | "DOMAIN\\service.account", 86 | "DOMAIN\\sql.service", 87 | "DOMAIN\\web.service", 88 | ], 89 | "Power Users": [ 90 | "DOMAIN\\power.user1", 91 | "DOMAIN\\power.user2", 92 | "DOMAIN\\power.user1-admin", # Elevated account 93 | ] 94 | } 95 | 96 | # Sample Kerberoast data 97 | SAMPLE_KERBEROAST_DATA = [ 98 | "DOMAIN\\sql.service:1008:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 99 | "DOMAIN\\web.service:1009:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 100 | "DOMAIN\\exchange.service:1010:aad3b435b51404eeaad3b435b51404ee:098f6bcd4621d373cade4e832627b4f6::::", 101 | "DOMAIN\\ldap.service:1011:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 102 | ] 103 | 104 | # Sample account status data 105 | SAMPLE_ACCOUNT_STATUS_DATA = [ 106 | "DOMAIN\\john.doe:enabled", 107 | "DOMAIN\\jane.smith:enabled", 108 | "DOMAIN\\admin.user:enabled", 109 | "DOMAIN\\service.account:enabled", 110 | "DOMAIN\\disabled.user:disabled", 111 | "DOMAIN\\locked.user:disabled", 112 | "DOMAIN\\expired.user:disabled", 113 | ] 114 | 115 | # Complex test scenarios 116 | COMPLEX_NTDS_DATA = [ 117 | # Multiple domains 118 | "DOMAIN1\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 119 | "DOMAIN2\\user2:1002:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 120 | 121 | # Long usernames 122 | "DOMAIN\\very.long.username.that.exceeds.normal.length:1003:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 123 | 124 | # Special characters in usernames 125 | "DOMAIN\\user-with-dashes:1004:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 126 | "DOMAIN\\user_with_underscores:1005:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 127 | "DOMAIN\\user.with.dots:1006:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 128 | 129 | # Email format usernames 130 | "DOMAIN\\user@domain.com:1007:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 131 | 132 | # Unicode characters (if supported) 133 | "DOMAIN\\café:1008:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 134 | ] 135 | 136 | # Edge case test data 137 | EDGE_CASE_DATA = [ 138 | # Empty lines 139 | "", 140 | " ", 141 | 142 | # Invalid formats 143 | "invalid line", 144 | "user:hash", 145 | "user:rid:lm:nt:extra:fields:too:many", 146 | 147 | # Malformed hashes 148 | "DOMAIN\\user1:1001:invalid_lm_hash:invalid_nt_hash::::", 149 | "DOMAIN\\user2:1002:aad3b435b51404eeaad3b435b51404ee:invalid_nt_hash::::", 150 | 151 | # Very long lines 152 | "DOMAIN\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::" + "x" * 1000, 153 | 154 | # Special characters in hashes 155 | "DOMAIN\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 156 | ] 157 | 158 | # Performance test data (large datasets) 159 | def generate_large_ntds_data(count: int = 1000) -> List[str]: 160 | """Generate large NTDS dataset for performance testing.""" 161 | data = [] 162 | for i in range(count): 163 | username = f"DOMAIN\\user{i:04d}" 164 | rid = 1000 + i 165 | lm_hash = "aad3b435b51404eeaad3b435b51404ee" 166 | nt_hash = f"31d6cfe0d16ae931b73c59d7e0c089c{i:02d}" 167 | data.append(f"{username}:{rid}:{lm_hash}:{nt_hash}::::") 168 | return data 169 | 170 | def generate_large_cracked_data(count: int = 500) -> List[str]: 171 | """Generate large cracked password dataset for performance testing.""" 172 | data = [] 173 | for i in range(count): 174 | nt_hash = f"31d6cfe0d16ae931b73c59d7e0c089c{i:02d}" 175 | password = f"password{i:03d}" 176 | data.append(f"{nt_hash}:{password}") 177 | return data 178 | 179 | # Test file creation utilities 180 | def create_test_files(temp_dir: Path) -> Dict[str, Path]: 181 | """Create all test files in a temporary directory.""" 182 | files = {} 183 | 184 | # Create NTDS file 185 | ntds_file = temp_dir / "test.ntds" 186 | with open(ntds_file, 'w', encoding='utf-8') as f: 187 | for line in SAMPLE_NTDS_DATA: 188 | f.write(line + '\n') 189 | files['ntds'] = ntds_file 190 | 191 | # Create cracked file 192 | cracked_file = temp_dir / "test.pot" 193 | with open(cracked_file, 'w', encoding='utf-8') as f: 194 | for line in SAMPLE_CRACKED_DATA: 195 | f.write(line + '\n') 196 | files['cracked'] = cracked_file 197 | 198 | # Create group files 199 | group_dir = temp_dir / "groups" 200 | group_dir.mkdir(exist_ok=True) 201 | 202 | for group_name, members in SAMPLE_GROUP_DATA.items(): 203 | group_file = group_dir / f"{group_name}.txt" 204 | with open(group_file, 'w', encoding='utf-8') as f: 205 | for member in members: 206 | f.write(member + '\n') 207 | files[f'group_{group_name}'] = group_file 208 | 209 | # Create Kerberoast file 210 | kerberoast_file = temp_dir / "kerberoast.txt" 211 | with open(kerberoast_file, 'w', encoding='utf-8') as f: 212 | for line in SAMPLE_KERBEROAST_DATA: 213 | f.write(line + '\n') 214 | files['kerberoast'] = kerberoast_file 215 | 216 | # Create account status file 217 | status_file = temp_dir / "account_status.txt" 218 | with open(status_file, 'w', encoding='utf-8') as f: 219 | for line in SAMPLE_ACCOUNT_STATUS_DATA: 220 | f.write(line + '\n') 221 | files['account_status'] = status_file 222 | 223 | return files 224 | 225 | # Expected test results 226 | EXPECTED_RESULTS = { 227 | 'total_accounts': 4, # Regular users only (excluding machine accounts and krbtgt) 228 | 'cracked_accounts': 3, # Based on cracked data 229 | 'cracked_percentage': 75.0, # 3/4 * 100 230 | 'groups_count': 5, # Number of groups 231 | 'group_members': { 232 | 'Domain Admins': 3, 233 | 'Enterprise Admins': 3, 234 | 'Regular Users': 4, 235 | 'Service Accounts': 3, 236 | 'Power Users': 3 237 | } 238 | } 239 | 240 | # Test configuration templates 241 | TEST_CONFIGS = { 242 | 'minimal': { 243 | 'ntds_file': 'test.ntds', 244 | 'cracked_file': 'test.pot', 245 | 'min_password_length': 8 246 | }, 247 | 'with_groups': { 248 | 'ntds_file': 'test.ntds', 249 | 'cracked_file': 'test.pot', 250 | 'min_password_length': 8, 251 | 'groups_directory': 'groups/' 252 | }, 253 | 'sanitized': { 254 | 'ntds_file': 'test.ntds', 255 | 'cracked_file': 'test.pot', 256 | 'min_password_length': 8, 257 | 'sanitize_output': True 258 | }, 259 | 'full_options': { 260 | 'ntds_file': 'test.ntds', 261 | 'cracked_file': 'test.pot', 262 | 'min_password_length': 8, 263 | 'groups_directory': 'groups/', 264 | 'sanitize_output': True, 265 | 'include_machine_accounts': True, 266 | 'include_krbtgt': True, 267 | 'kerberoast_file': 'kerberoast.txt' 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # DPAT Test Suite Documentation 2 | 3 | ## Overview 4 | 5 | This comprehensive test suite for the Domain Password Audit Tool (DPAT) provides thorough testing of all functionality, including unit tests, integration tests, and performance tests. 6 | 7 | ## Test Structure 8 | 9 | ``` 10 | tests/ 11 | ├── __init__.py # Test configuration and utilities 12 | ├── run_tests.py # Main test runner 13 | ├── unit/ # Unit tests 14 | │ ├── __init__.py 15 | │ └── test_core.py # Core class unit tests 16 | ├── integration/ # Integration tests 17 | │ ├── __init__.py 18 | │ └── test_integration.py # End-to-end workflow tests 19 | └── fixtures/ # Test data and fixtures 20 | ├── __init__.py 21 | └── test_data.py # Sample data and utilities 22 | ``` 23 | 24 | ## Test Categories 25 | 26 | ### Unit Tests (`tests/unit/`) 27 | 28 | Unit tests focus on testing individual classes and methods in isolation: 29 | 30 | - **TestConfig**: Configuration dataclass testing 31 | - **TestNTDSProcessor**: NTDS file parsing and processing 32 | - **TestHashProcessor**: Password hashing and cracking logic 33 | - **TestDataSanitizer**: Data sanitization functionality 34 | - **TestHTMLReportBuilder**: HTML report generation 35 | - **TestDatabaseManager**: Database operations and schema 36 | - **TestGroupManager**: Group membership processing 37 | - **TestCrackedPasswordProcessor**: Cracked password processing 38 | - **TestUtilityFunctions**: Utility functions and helpers 39 | 40 | ### Integration Tests (`tests/integration/`) 41 | 42 | Integration tests test complete workflows and component interactions: 43 | 44 | - **TestNTDSProcessingIntegration**: Complete NTDS processing workflow 45 | - **TestReportGenerationIntegration**: HTML report generation with real data 46 | - **TestGroupProcessingIntegration**: Group membership processing workflow 47 | - **TestCommandLineIntegration**: Command-line interface testing 48 | - **TestErrorHandlingIntegration**: Error handling and edge cases 49 | - **TestSampleDataIntegration**: Tests using real sample data files 50 | 51 | ### Test Fixtures (`tests/fixtures/`) 52 | 53 | Test fixtures provide realistic test data: 54 | 55 | - **Sample NTDS Data**: Realistic NTDS dump format data 56 | - **Sample Cracked Data**: Password cracking results 57 | - **Sample Group Data**: Group membership information 58 | - **Sample Kerberoast Data**: Kerberoast account data 59 | - **Edge Case Data**: Invalid and malformed data for error testing 60 | - **Performance Data**: Large datasets for performance testing 61 | 62 | ## Running Tests 63 | 64 | ### Using the Test Runner 65 | 66 | ```bash 67 | # Run all tests 68 | python tests/run_tests.py 69 | 70 | # Run only unit tests 71 | python tests/run_tests.py --unit-only 72 | 73 | # Run only sample data tests 74 | python tests/run_tests.py --sample-data-only 75 | 76 | # Run with verbose output 77 | python tests/run_tests.py --verbosity 2 78 | 79 | # Stop on first failure 80 | python tests/run_tests.py --failfast 81 | 82 | # List all available tests 83 | python tests/run_tests.py --list-tests 84 | ``` 85 | 86 | ### Using pytest 87 | 88 | ```bash 89 | # Run all tests 90 | pytest 91 | 92 | # Run only unit tests 93 | pytest tests/unit/ 94 | 95 | # Run only integration tests 96 | pytest tests/integration/ 97 | 98 | # Run only sample data tests 99 | python tests/run_tests.py --sample-data-only 100 | 101 | # Run sample data tests with pytest 102 | pytest tests/integration/test_sample_data.py 103 | 104 | # Run specific test file 105 | pytest tests/unit/test_core.py 106 | 107 | # Run with coverage 108 | pytest --cov=dpat --cov-report=html 109 | 110 | # Run with verbose output 111 | pytest -v 112 | 113 | # Run specific test method 114 | pytest tests/unit/test_core.py::TestConfig::test_config_creation 115 | ``` 116 | 117 | ### Using unittest directly 118 | 119 | ```bash 120 | # Run all tests 121 | python -m unittest discover tests 122 | 123 | # Run specific test module 124 | python -m unittest tests.unit.test_core 125 | 126 | # Run with verbose output 127 | python -m unittest -v tests.unit.test_core 128 | ``` 129 | 130 | ## Sample Data Tests 131 | 132 | The test suite includes comprehensive integration tests that use the actual sample data files from the `sample_data` directory: 133 | 134 | ### Required Sample Data Files 135 | - `customer.ntds` - Real NTDS dump with multiple domains 136 | - `oclHashcat.pot` - Real password cracking results 137 | - `Domain Admins.txt` - Domain Admins group membership 138 | - `Enterprise Admins.txt` - Enterprise Admins group membership 139 | - `Enterprise Admins PowerView Output.txt` - PowerView formatted group data 140 | 141 | ### Sample Data Test Features 142 | - **Real-world data processing**: Tests with actual NTDS dumps and cracked passwords 143 | - **Multi-domain analysis**: Tests with child.domain.com, parent.domain.com, and sister.domain.com 144 | - **Group membership processing**: Tests with both standard and PowerView formatted group files 145 | - **Report generation**: Tests HTML report generation with real data 146 | - **Statistics analysis**: Tests password statistics and domain-specific analysis 147 | - **Command line execution**: Tests command line interface with real files 148 | 149 | ### Running Sample Data Tests 150 | 151 | ```bash 152 | # Run sample data tests only 153 | python tests/run_tests.py --sample-data-only 154 | 155 | # Run sample data tests with pytest 156 | pytest tests/integration/test_sample_data.py 157 | 158 | # Run sample data tests with dedicated runner 159 | python tests/run_sample_data_tests.py 160 | ``` 161 | 162 | ### Sample Data Test Cases 163 | - `test_sample_data_ntds_processing` - Process customer.ntds file 164 | - `test_sample_data_with_groups` - Process with Domain Admins and Enterprise Admins groups 165 | - `test_sample_data_powerview_format` - Process PowerView formatted group file 166 | - `test_sample_data_report_generation` - Generate HTML reports with real data 167 | - `test_sample_data_sanitized_reports` - Test sanitized report generation 168 | - `test_sample_data_statistics` - Test statistics generation 169 | - `test_sample_data_domain_analysis` - Test domain-specific analysis 170 | - `test_sample_data_command_line_execution` - Test command line execution 171 | - `test_sample_data_with_groups_command_line` - Test command line with groups 172 | 173 | ## Test Data 174 | 175 | ### Sample NTDS Data 176 | 177 | The test suite includes realistic NTDS data with: 178 | 179 | - Regular user accounts 180 | - Machine accounts (for filtering tests) 181 | - krbtgt account (for filtering tests) 182 | - Password history entries 183 | - Empty password hashes 184 | - LM hash disabled accounts 185 | - Different domain accounts 186 | 187 | ### Sample Cracked Data 188 | 189 | Cracked password data includes: 190 | 191 | - NT hash cracking results 192 | - LM hash cracking results 193 | - Hex encoded passwords 194 | - Empty passwords 195 | - Special character passwords 196 | 197 | ### Sample Group Data 198 | 199 | Group membership data includes: 200 | 201 | - Domain Admins 202 | - Enterprise Admins 203 | - Regular Users 204 | - Service Accounts 205 | - Power Users 206 | 207 | ## Test Configuration 208 | 209 | ### Environment Setup 210 | 211 | Tests automatically create temporary directories and files, so no manual setup is required. Each test: 212 | 213 | 1. Creates a temporary directory 214 | 2. Generates test files with sample data 215 | 3. Runs the test 216 | 4. Cleans up temporary files 217 | 218 | ### Test Isolation 219 | 220 | Each test is completely isolated: 221 | 222 | - Uses separate temporary directories 223 | - Creates fresh database connections 224 | - No shared state between tests 225 | - Automatic cleanup after each test 226 | 227 | ### Mocking and Stubbing 228 | 229 | Tests use Python's `unittest.mock` for: 230 | 231 | - File system operations 232 | - Database connections 233 | - External dependencies 234 | - Error conditions 235 | 236 | ## Test Coverage 237 | 238 | The test suite aims for comprehensive coverage of: 239 | 240 | - **Core Functionality**: All main classes and methods 241 | - **Error Handling**: Invalid input, missing files, database errors 242 | - **Edge Cases**: Empty data, malformed input, special characters 243 | - **Integration**: Complete workflows from input to output 244 | - **Performance**: Large dataset handling 245 | - **Security**: Data sanitization and validation 246 | 247 | ## Adding New Tests 248 | 249 | ### Unit Tests 250 | 251 | 1. Create test methods in the appropriate test class 252 | 2. Use descriptive test method names starting with `test_` 253 | 3. Follow the Arrange-Act-Assert pattern 254 | 4. Use appropriate assertions (`assertEqual`, `assertTrue`, etc.) 255 | 5. Test both success and failure cases 256 | 257 | ### Integration Tests 258 | 259 | 1. Create test methods that test complete workflows 260 | 2. Use real test data files 261 | 3. Verify end-to-end functionality 262 | 4. Test error conditions and edge cases 263 | 5. Ensure proper cleanup 264 | 265 | ### Test Data 266 | 267 | 1. Add new sample data to `tests/fixtures/test_data.py` 268 | 2. Use realistic data that mimics real-world scenarios 269 | 3. Include edge cases and error conditions 270 | 4. Document the purpose of each dataset 271 | 272 | ## Continuous Integration 273 | 274 | The test suite is designed to work with CI/CD systems: 275 | 276 | - No external dependencies required 277 | - Automatic test discovery 278 | - Clear exit codes for success/failure 279 | - Comprehensive logging and reporting 280 | - Fast execution for quick feedback 281 | 282 | ## Troubleshooting 283 | 284 | ### Common Issues 285 | 286 | 1. **Import Errors**: Ensure the parent directory is in the Python path 287 | 2. **File Not Found**: Check that test data files are created correctly 288 | 3. **Database Errors**: Verify database connections are properly closed 289 | 4. **Permission Errors**: Ensure temporary directories can be created 290 | 291 | ### Debug Mode 292 | 293 | Run tests with debug logging: 294 | 295 | ```bash 296 | python tests/run_tests.py --verbosity 2 297 | ``` 298 | 299 | ### Test Isolation 300 | 301 | If tests are interfering with each other: 302 | 303 | 1. Check for shared state 304 | 2. Ensure proper cleanup in `tearDown` methods 305 | 3. Use unique temporary directories 306 | 4. Verify database connections are closed 307 | 308 | ## Performance Testing 309 | 310 | The test suite includes performance tests for: 311 | 312 | - Large NTDS file processing 313 | - Database operations with many records 314 | - HTML report generation with large datasets 315 | - Memory usage during processing 316 | 317 | Run performance tests separately: 318 | 319 | ```bash 320 | python tests/run_tests.py --module tests.performance 321 | ``` 322 | 323 | ## Security Testing 324 | 325 | Security-focused tests verify: 326 | 327 | - Data sanitization functionality 328 | - Input validation and sanitization 329 | - SQL injection prevention 330 | - File path traversal prevention 331 | - Sensitive data handling 332 | 333 | ## Future Enhancements 334 | 335 | Planned improvements to the test suite: 336 | 337 | 1. **Property-based Testing**: Using Hypothesis for property-based testing 338 | 2. **Mutation Testing**: Using mutmut for mutation testing 339 | 3. **Load Testing**: Performance testing with very large datasets 340 | 4. **Security Testing**: Automated security vulnerability testing 341 | 5. **Cross-platform Testing**: Testing on different operating systems 342 | 6. **Docker Integration**: Containerized test environments 343 | 7. **Parallel Testing**: Running tests in parallel for faster execution 344 | 8. **Test Reporting**: Enhanced HTML and XML test reports 345 | 9. **Code Coverage**: Detailed code coverage analysis 346 | 10. **Benchmarking**: Performance benchmarking and regression testing 347 | -------------------------------------------------------------------------------- /sample_data/base/gendata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import random,hashlib,binascii, os 4 | cwd = os.getcwd() 5 | print(cwd) 6 | f_first = open("sample_data/base/first.txt") 7 | f_last = open("sample_data/base/last_small.txt") 8 | f_passwords = open("sample_data/base/subset-rockyou.txt") 9 | f_das= open("sample_data/Domain Admins.txt","w") 10 | 11 | first_list=list(f_first) 12 | count_first = 0 13 | domains = [ "parent.domain.com","child.domain.com","sister.domain.com"] 14 | domain_admins = { 15 | 'Agnes.Aarons-admin':'reallylongone', 16 | 'Cliff.Adames-admin':'PasswordsAreHardToRemember', 17 | 'Alex.Revis-admin':'57kdhfls*%2', 18 | 'Gilbert.Settle-admin':'Ford57andGolf', 19 | 'Cory.Ruhoff-admin':'NewPassword4Work', 20 | 'Damian.Scarver-admin':'domainAdminPass', 21 | 'Oscar.Veyna-admin':'ShirtsNSkins', 22 | 'Rex.Vidot-admin':'1997hereo', 23 | 'Scot.Viles-admin':'sparklesparkleZAP', 24 | 'Burton.Vonner-admin':'VonnerPass16', 25 | 'Dayna.Wade-admin':'Winter16', 26 | 'Dustin.Wahlund-admin':'Frank-did-it', 27 | 'Earnestine.Waiau-admin':'HappyTogether16', 28 | 'Emerson.Wala-admin':'Washington87', 29 | 'Sallie.Zych-admin':'77qwerty88', 30 | 'Samuel.Zysk-admin':'Zundk8*&^', 31 | 'Rosalinda.Zusman-admin':'Making Up Passwords is Hard', 32 | 'Roman.Zurek-admin':'goFigure8', 33 | 'Celia.Mcintosh-admin':'WikiWiki4What', 34 | 'Celeste.Mcintire-admin':'2beornot2be', 35 | 'Cecil.Mcinnis-admin':';kleknklk', 36 | 'Brendan.Mcgriff-admin':'DontForget1', 37 | 'Booker.Mcgraph-admin':'DaisyMisty1', 38 | 'Bobbie.Mcgrane-admin':'P@sswo0rd16', 39 | 'Clint.Hollifield-admin':'1997Married', 40 | 'Coleen.Hollinghead-admin':'MickyMouse56', 41 | 'Jackie.Dimodica-admin':'JerermyNHanna2', 42 | 'Isabella.Dimitroff-admin':'Anastasia', 43 | 'Horace.Dimarco-admin':'LovedByYou', 44 | 'Herbert.Dils-admin':'DiamondRIO3', 45 | 'Hazel.Dillman-admin':'L1ke1T', 46 | 'Rex.Beadling-admin':'ITdoma1n@dmin', 47 | 'Reggie.Beacher-admin':'WorldTurn@round', 48 | 'Raul.Beaber-admin':'Lovemybug2003', 49 | 'Pete.Baysmore-admin':'Hard24get', 50 | 'August.Mcginnis-admin':'W$%23eu&*!rhs0' 51 | } 52 | lm_dict = { 'NotTooHard':'5B9D1AFCC9784729ADD5B1A41F2CB2C0','GoBeavErs1997':'94068F2F1CD1EAF27F76AAABE8E8789D','W$%23eu&*!rhs0':'4CE5B0C344FDD1038930410E6B652F2C','edward!':'67449A7AB9FDFA3AAAD3B435B51404EE','edhardy':'3AF628952D3CBDADAAD3B435B51404EE','ededed':'564B1B69A0CD5B23AAD3B435B51404EE','eddie22':'E3F874C4772EDC14AAD3B435B51404EE','earth1':'01EB2BDB90E4B363AAD3B435B51404EE','eagles36':'99223CC16C15AFCDC81667E9D738C5D9','dylanc':'BA0997645137628BAAD3B435B51404EE','dylanb':'4C72E2913A32E8A4AAD3B435B51404EE','dulceteamo':'0F8C1562718B774A6905068007DD26FD','dude1234':'4107C659B6183D65FF17365FAF1FFE89','druglord':'AC1F31C2E70BCB654A3B108F3FA6CB6D','droppie':'07E0ED6353B36D0AAAD3B435B51404EE','drink':'62B7CD49704064BDAAD3B435B51404EE','dragon17':'4097A469B4C52C1D7C3113B4A1A5E3A0','dracko':'A9327557F0E7E4FDAAD3B435B51404EE','downlow':'B44617E2CA667704AAD3B435B51404EE','douloveme':'62D3291BD468609863E11CD7E7F6092C','dothack':'6EAD70E9C296822BAAD3B435B51404EE','dotaallstar':'2987CC09184A44DE2201161DEDBA27A2','dorin':'947EB89A035D9D34AAD3B435B51404EE','dontspeak':'3B4F679F335D1535F04D685C382B531C','donnalyn':'A8E4E642B0A16CB6E72C57EF50F76A05','donnabelle':'97C88C474C40FE9589AF081E0305AC84','donika':'E5BF9342F4E51BA6AAD3B435B51404EE','dominate':'8C0D796F72023FB017306D272A9441BB','dolphy':'E75827B521C31229AAD3B435B51404EE','dollars1':'D456D433BC6A41B9C2265B23734E0DAC','doitbig':'E13F262D76132450AAD3B435B51404EE','doinita':'CA217F811088BC03AAD3B435B51404EE','dogandcat':'78028473533F1F269D4D37E81433E320','dobby':'CB94142B12F47F70AAD3B435B51404EE','dmarie':'B6ECEACBE041745CAAD3B435B51404EE','djones':'976EC97B7362B073AAD3B435B51404EE','divya':'DB9BDEDFC37408AFAAD3B435B51404EE','disney365':'DA505805AB7FCF926AB0B9B4DA013120','discoball':'82D8597A58DDB87C066B9E64566C2479','diogenes':'452265562B3B49B293E28745B8BF4BA6','dingaling':'D220E3D19587CFFCDD4218F5E59DD23A','dinamovista':'DD28D12979294E0C9A7853FCD68523F6','dimsum':'14BB788F701AC833AAD3B435B51404EE','dientes':'316A62A9E5078672AAD3B435B51404EE','diegoandres':'631D8A94ACF9ADF45F6B9B201665ECFF','diego14':'AE310716574EDF80AAD3B435B51404EE','diego13':'B466FE535D0D691CAAD3B435B51404EE','dick12':'8A49C00370347B9BAAD3B435B51404EE','dianna1':'713505C5D104BA61AAD3B435B51404EE','diablo69':'A3BE40622851765909752A3293831D17','dhianne':'8D78D2735A3A4379AAD3B435B51404EE','devon7':'CC6DC07DB80BEF75AAD3B435B51404EE','devin11':'67B2EF958AF6F612AAD3B435B51404EE','deventer':'2013970F2DB4971C944E2DF489A880E4','devan1':'0ADD5FD620A482C2AAD3B435B51404EE','destined':'830BA0309744B2C24A3B108F3FA6CB6D','dessire':'975DB741FFD6D065AAD3B435B51404EE','design1':'4C1AB3AC6E05EB1DAAD3B435B51404EE','desiderio':'6A1C2D9A85892C26DF61CA35DEE5AA58','derry':'8416E3769D5FB679AAD3B435B51404EE','derrty':'28F566AF5658CCBAAAD3B435B51404EE','derek3':'9CF17DB1CB859EABAAD3B435B51404EE','derek22':'1B033D2858D542D9AAD3B435B51404EE','dennis01':'C8396E60C4987FB8C2265B23734E0DAC','denisuca':'9DC7423CB84514547584248B8D2C9F9E','denise14':'4E61A3280511BCC9FF17365FAF1FFE89','deniece':'6537F0EC75253266AAD3B435B51404EE','dementia':'75EEA72B6DE5C2EC7584248B8D2C9F9E','della1':'69666E0FCE54D3F1AAD3B435B51404EE','deliverance':'FC29BE7F95F1A281354EF550D6D616DF','delgadillo':'E77BDBF6AE2B3D930279963575FF2D48','deldel':'CDEA18537CB83F8FAAD3B435B51404EE','deion':'CE5205F9E3F1D158AAD3B435B51404EE','defoe18':'5811452C87D01332AAD3B435B51404EE','deeper':'A1886219150AE350AAD3B435B51404EE','deejay1':'B2DF80A944C9CB0CAAD3B435B51404EE','dee-dee':'1F17C1545FC9D496AAD3B435B51404EE','death2u':'539706F3890A934BAAD3B435B51404EE','dearly':'ACB63C873C70EDFFAAD3B435B51404EE','deadline':'D9989D2AEB2F392817306D272A9441BB','dead666':'56D719A22A00943EAAD3B435B51404EE','dayday123':'C7C9EB383B9066CCB75E0C8D76954A50','dayani':'269D8839491A9E6CAAD3B435B51404EE','day123':'76B6E26A7386D775AAD3B435B51404EE','daville':'1523F21BDB224D64AAD3B435B51404EE','davegrohl':'45B69A7B44018AB1EB90FCD89E798A49','dave21':'1173D0A5814FC7A5AAD3B435B51404EE','dario1':'684AAF0C9563CC51AAD3B435B51404EE','darin':'B29A4A5669527F0AAAD3B435B51404EE','darel':'963D8FB64DF80D69AAD3B435B51404EE','danyale':'1F1C9E6BDAF385BFAAD3B435B51404EE','dannyteamo':'C5660362EC7308AF6905068007DD26FD','danielle05':'41A5DF84E34D4393B0D866F8E2272AD6','daniel87':'D78FF2C06D3B72B27C3113B4A1A5E3A0','dani23':'F100ACF2FF2ADE44AAD3B435B51404EE','dani1':'175E69F189D25399AAD3B435B51404EE','dancok':'91824FD4185EE910AAD3B435B51404EE','dancingdiva':'81380963DA03E29697440C3488F02677','dancewithme':'C8B325EBE380D5C756496D0A2CF27A20','dancer26':'FEAE74B8E1947D9BC81667E9D738C5D9','dancer03':'894CBEC1AC35440D1AA818381E4E281B','dancechick':'449EC1BFA0F80FB272F8DA9D69F474D7','dancarter':'0F261E270277EE3E8963805A19B0ED49','dana1':'68E66D68E3AD2A0FAAD3B435B51404EE','dallas41':'73F3C9B1038B4422C2265B23734E0DAC','dallas15':'59CC3F718A14BD029C5014AE4718A7EE','dallas08':'C6FB9E6F7DB3F27036077A718CCDF409','dallas07':'C6FB9E6F7DB3F2707C3113B4A1A5E3A0','daisy!':'8EBDE0B1057D172EAAD3B435B51404EE','daine':'A6D663D5197DEA9AAAD3B435B51404EE','dailyn':'09F11E1A2F255F7FAAD3B435B51404EE','daddysboy':'69B507AAFDD2F72E655265D1314726C0','daddylove':'805D046BD8B55712B6FE535A75CB5552','daddy1234':'E42F9B3895F1F7B219F10A933D4868DC','daddy03':'82E76E96B3FCEFF3AAD3B435B51404EE','cynthia3':'BF65484F9EAA59351AA818381E4E281B','cymone':'DC9AF42629FA0778AAD3B435B51404EE','cutiepie16':'287ACD99E5A71EA619DB94ADC99423BF','cuters':'C435D7E4789C5286AAD3B435B51404EE','cupcake4':'E0009F6A725132CBFF17365FAF1FFE89','cunt69':'06AC388690048B3DAAD3B435B51404EE','cuddlebug':'375A10B1E5F43E45D17FDCAAB966EFA7','cuckoo':'E749D93A7BE08AC5AAD3B435B51404EE','cuadra':'C5D055D5402EA371AAD3B435B51404EE','cstrike':'4A3F0486D35169C7AAD3B435B51404EE','crystal11':'87650E47E4D141585D3872C04445E010','crystal01':'87650E47E4D1415873251AA2B4314B90','cristea':'64EC8CB777E8E2EDAAD3B435B51404EE','creeds':'FA0F7946A915FD06AAD3B435B51404EE','crazy4ever':'99EBBDF7699BA6D0B12FAE38C8ABEE13','crawfish':'0E4BABFD43EEC5C15ACDCD7C247FA83A','craig2':'2373F7A3D68F3C2BAAD3B435B51404EE','crackwhore':'47CA74079D90D70D5468F2AD1F3B98BA','cowgirl07':'131BE2583DE651E718FCD526FB48A829','cowboys5':'CC954BF64510840D9C5014AE4718A7EE','courtney4':'063DE5A00E0BBA01744F2D424178DE49','cotita':'F6E1814A19F0DC3DAAD3B435B51404EE','costas':'03B9455205B8464AAAD3B435B51404EE','corona12':'D60C67A63F8821691D71060D896B7A46','cornbread1':'75ED8AC86B42E2B9042370C4583C388F','corey13':'372336BE0DB2D2C8AAD3B435B51404EE','coolaid':'27F0F886CD1DE9C3AAD3B435B51404EE','cookie04':'4DBF38B0A644F62FFF17365FAF1FFE89','contagious':'496CE23950764D4A5BE30F58D2A941D5','contabilitate':'332F012EDCC53D731DAB72D6A1727041','connor02':'17A1143A6E6E42821D71060D896B7A46','conner2':'62CFCE559EC73CADAAD3B435B51404EE','comunidad':'8AD5C85747A9E2BD9C749B84168D712D','common1':'9E39676A49F99C52AAD3B435B51404EE','columbus1':'CADD68BDB2673CA30CC3EB564B0F9047','colon':'1AB94AAF9FE60AE1AAD3B435B51404EE','coley1':'51BD657CEB4457CFAAD3B435B51404EE','coldheart':'0532CF033F5955B35F034D624633DBF9','codylinley':'B671C9AF04D37B08F15DB3BDBAD92750','cody07':'DB455B1B86B8A058AAD3B435B51404EE','cody04':'3DC6DD72755CCFE9AAD3B435B51404EE','coco08':'F26B193B103F6AD4AAD3B435B51404EE','cocknose':'A2D4DFAFA94D7B5D17306D272A9441BB','clubber':'4027C45050D0DC04AAD3B435B51404EE','cleo':'3CAB28372DB1715EAAD3B435B51404EE','clears':'E9596D59EDCD0C22AAD3B435B51404EE','claudia12':'FDAA24DE0BCBEB854207FD0DF35A59A8','civics':'A21064406DD6C682AAD3B435B51404EE','citron':'9840ABA3424EBA78AAD3B435B51404EE','cintakamu':'9564A655D10805450BBD7D4C25A4DEFA','ciclope':'C155BC1446E5101CAAD3B435B51404EE','chute':'8DBDFBAA0CF03C76AAD3B435B51404EE','chula12':'5A4C094D73768C02AAD3B435B51404EE','chuche':'0652223D8E88744DAAD3B435B51404EE','christians':'630505E57DC5617E8E4DD189F947B5EC','chrischris':'B6760E35ED0103CD85BB7C52C41086D7','chris83':'E2F432F581BF8148AAD3B435B51404EE','chris2005':'41973827B8184CF96FB9A7EF37043CD6','chonchis':'80AAA6EB2DC0233093E28745B8BF4BA6','chocorrol':'501E06987BD244E97BB1D8438F805B5C','chocolatelover':'81D330F45391D6CDD21334332AE253C7','choclate1':'B1067D8E77DD0072A202B0A0CC08E46E','chivas15':'BA237827413B85849C5014AE4718A7EE','chimes':'7C03C765BFC529A9AAD3B435B51404EE','chikitalinda':'6A5DF180A72046188315F2B502B247DC','chikis1':'04AE55C4E6F75BB5AAD3B435B51404EE','chickenwings':'5E9019CC921146AC00F4888F92F39DF9','chicharron':'A138C2D50AC9C5063140C07381B6A165','chica15':'B3D2CC98E2BC3F63AAD3B435B51404EE','chevygirl':'725DA578E9D54C1D2DCA4431C6F3913D','cherry9':'616778FE4E03EB13AAD3B435B51404EE','cherries2':'328FB4F46E4A4EDA74F23606C66022B0','cherie1':'A30A2797D55E44E1AAD3B435B51404EE','cheetah2':'0BEBB7B76F9F09A01D71060D896B7A46','cheeseontoast':'34DC70895752909D63C3F1914814DF78','cheese23':'1B8DB7D20C6C244B1AA818381E4E281B','cheese14':'3A5F48756650F617FF17365FAF1FFE89','cheer55':'ADF3815AE0113EE6AAD3B435B51404EE','cheene':'214BA788DBF79432AAD3B435B51404EE','chean':'4A07877A1FD2D2E0AAD3B435B51404EE','chassy':'3569B3396667A9A2AAD3B435B51404EE','charlie88':'528A05A387553DC56C4691C0029EBE9F','charlie27':'528A05A387553DC5025A32A63FE04BEC','charlie20':'528A05A387553DC5143F8BD9AE9E0363','charlie18':'528A05A387553DC58347BB1E72CC9F76','chaparra1':'CEF00CDAA153648F65C4A55F32B3BF85','chaochao':'A3EB50453623B039E68AA26A841A86FA','changed1':'67B011CF3539A3E3C2265B23734E0DAC','chalie':'E80D509ABABDE9C4AAD3B435B51404EE','chakas':'5ED7F084F1839FD7AAD3B435B51404EE','chad69':'BF34878165770EEEAAD3B435B51404EE','chachie':'3091167271E136C2AAD3B435B51404EE','chabe':'BF1008C067449CFFAAD3B435B51404EE','cha123':'84126D3AA15B64D0AAD3B435B51404EE','cha-cha':'F7E38693146CED75AAD3B435B51404EE','cfc123':'B10D8883941986DCAAD3B435B51404EE','cesar2':'745B312CE52C3F88AAD3B435B51404EE','ceriwis':'D0BA7E97FC3E9290AAD3B435B51404EE','centinela':'E7D941ADA594566EB09321E47427AF3C','cena11':'4656FDC59974A241AAD3B435B51404EE','cena01':'268D5E2B37583A0AAAD3B435B51404EE','celtic08':'4467856BF476733B36077A718CCDF409','cedes':'8A337C28CE3265FFAAD3B435B51404EE','cecilita':'6D02B4EFE4C3AE017584248B8D2C9F9E','cdcdcd':'4DAB65D458627A62AAD3B435B51404EE','cbr900rr':'ADF862771AC0FA35944E2DF489A880E4','cazares':'19E925C350B4A289AAD3B435B51404EE','UpPeRlOwEr':'3630E260BC5D7049249A4622CE4C83C8','HarD222cRacK':'051B0FC10EAE6A6D22EF373F485DF6BD','resgocswit7hWQ':'90C0059BFB22BB44F657FC2061517840','lastBUTnotLEAS':'47AAF1C238FCBAB597FAF9A0F46316C1'} 53 | 54 | dontCrackWithNT = ["W$%23eu&*!rhs0","UpPeRlOwEr","HarD222cRacK","resgocswit7hWQ","lastBUTnotLEAS"] 55 | dontNTCrackTheseUsers = ["Bobbie.Mcgrane-admin","Rosalinda.Zusman-admin","Cecil.Mcinnis-admin", "Cory.Ruhoff-admin","Cliff.Adames-admin","Samuel.Zysk-admin"] 56 | 57 | f = open("customer.ntds","w") 58 | f2 = open("hashcat.potfile","w") 59 | wroteRHS = False 60 | 61 | for last in f_last: 62 | add_admin = False 63 | if count_first < len(first_list): 64 | firstName = first_list[count_first].rstrip().title() 65 | count_first = count_first + 1 66 | else: 67 | count_first = 0 68 | firstName = first_list[count_first].rstrip().title() 69 | lastName = last.rstrip().title() 70 | userName = firstName + "." + lastName 71 | if userName + "-admin" in domain_admins: 72 | userName = userName + "-admin" 73 | password = f_passwords.readline().rstrip() 74 | if domain_admins.has_key(password): 75 | print "Warn: duplicated password for administrator: " + password 76 | rid = str(random.randint(10000,500000)) 77 | domain = domains[random.randint(0,len(domains)-1)] 78 | if domain_admins.has_key(userName): 79 | password=domain_admins[userName] 80 | domain=domains[1] 81 | f_das.write(domain + "\\" + userName + "\n") 82 | if userName in ["Agnes.Aarons-admin","Alex.Revis-admin","Burton.Vonner-admin","Pete.Baysmore-admin"]: 83 | add_admin = True 84 | nt_hash = binascii.hexlify(hashlib.new('md4', password.encode('utf-16le')).digest()) 85 | lm_hash = "aad3b435b51404eeaad3b435b51404ee" # this is the LM hash of a blank password 86 | if lm_dict.has_key(password): 87 | lm_hash = lm_dict[password] 88 | f.write(domain + "\\" + userName + ":" + rid + ":" + lm_hash.lower() + ":" + nt_hash + ":::\n") 89 | # simulating password history, up to 24 entries generated for each user, 78% of these simulated as cracked 90 | for x in range(0, 24): 91 | if (random.randrange(0,100)<5): 92 | break 93 | hist_password = password + str(24 - x) 94 | hist_nt_hash = binascii.hexlify(hashlib.new('md4', hist_password.encode('utf-16le')).digest()) 95 | f.write(domain + "\\" + userName + "_history" + str(x) + ":" + rid + ":aad3b435b51404eeaad3b435b51404ee:" + hist_nt_hash + ":::\n") 96 | if (random.randrange(1,100)<78): 97 | f2.write(hist_nt_hash + ":" + hist_password + "\n") 98 | if (password not in dontCrackWithNT) and (userName not in dontNTCrackTheseUsers) and (password in lm_dict or random.randrange(1,100)<78): 99 | f2.write(nt_hash + ":" + password + "\n") 100 | if password in dontCrackWithNT: 101 | left_pass=password[0:8].upper() 102 | right_pass=password[8:15].upper() 103 | left_hash=lm_dict[password][0:16].lower() 104 | right_hash=lm_dict[password][16:32].lower() 105 | if password != "W$%23eu&*!rhs0" or not wroteRHS: 106 | f2.write(left_hash + ":" + left_pass + "\n") 107 | f2.write(right_hash + ":" + right_pass + "\n") 108 | if password == "W$%23eu&*!rhs0": 109 | wroteRHS = True 110 | if add_admin: 111 | f.write(domain + "\\" + userName.rstrip("-admin") + ":" + rid + ":" + lm_hash.lower() + ":" + nt_hash + ":::\n") 112 | f2.write("aad3b435b51404ee:\n") 113 | f.close() 114 | f2.close() 115 | f_first.close() 116 | f_last.close() 117 | f_passwords.close() -------------------------------------------------------------------------------- /tests/unit/test_core.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for DPAT core classes. 3 | 4 | This module contains unit tests for all the core classes in the DPAT tool, 5 | including configuration, data processing, and utility classes. 6 | """ 7 | 8 | import unittest 9 | import tempfile 10 | from pathlib import Path 11 | from unittest.mock import patch, MagicMock, mock_open 12 | import sqlite3 13 | 14 | # Import the classes we're testing 15 | import sys 16 | sys.path.insert(0, str(Path(__file__).parent.parent)) 17 | 18 | from dpat import ( 19 | Config, NTDSProcessor, HashProcessor, DataSanitizer, 20 | HTMLReportBuilder, DatabaseManager, GroupManager, CrackedPasswordProcessor, 21 | calculate_percentage 22 | ) 23 | from tests import TestConfig, TestDataGenerator, DatabaseTestHelper, DPATTestCase 24 | 25 | 26 | class TestConfig(DPATTestCase): 27 | """Test the Config dataclass.""" 28 | 29 | def test_config_creation(self): 30 | """Test basic config creation.""" 31 | config = Config( 32 | ntds_file="test.ntds", 33 | cracked_file="test.pot", 34 | min_password_length=8 35 | ) 36 | 37 | self.assertEqual(config.ntds_file, "test.ntds") 38 | self.assertEqual(config.cracked_file, "test.pot") 39 | self.assertEqual(config.min_password_length, 8) 40 | self.assertFalse(config.sanitize_output) 41 | self.assertFalse(config.include_machine_accounts) 42 | 43 | def test_config_sanitize_output(self): 44 | """Test config with sanitize output enabled.""" 45 | config = Config( 46 | ntds_file="test.ntds", 47 | cracked_file="test.pot", 48 | min_password_length=8, 49 | sanitize_output=True 50 | ) 51 | 52 | self.assertTrue(config.sanitize_output) 53 | self.assertEqual(config.report_directory, "DPAT Report - Sanitized") 54 | 55 | def test_config_post_init(self): 56 | """Test config post-initialization processing.""" 57 | config = Config( 58 | ntds_file="test.ntds", 59 | cracked_file="test.pot", 60 | min_password_length=8, 61 | sanitize_output=True 62 | ) 63 | 64 | # Check that report directory was modified 65 | self.assertIn("Sanitized", config.report_directory) 66 | 67 | # Check that report directory exists 68 | self.assertTrue(Path(config.report_directory).exists()) 69 | 70 | 71 | class TestNTDSProcessor(DPATTestCase): 72 | """Test the NTDSProcessor class.""" 73 | 74 | def test_parse_ntds_line_valid(self): 75 | """Test parsing valid NTDS lines.""" 76 | # Test pwdump format 77 | line1 = "DOMAIN\\user1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::" 78 | user, nt_hash = NTDSProcessor.parse_ntds_line(line1) 79 | self.assertEqual(user, "domain\\user1") # Method converts to lowercase 80 | self.assertEqual(nt_hash, "31d6cfe0d16ae931b73c59d7e0c089c0") 81 | 82 | # Test domain\\user format (first pattern - extracts just username) 83 | line2 = "DOMAIN\\user2:31d6cfe0d16ae931b73c59d7e0c089c0" 84 | user, nt_hash = NTDSProcessor.parse_ntds_line(line2) 85 | self.assertEqual(user, "user2") # First pattern extracts just username 86 | self.assertEqual(nt_hash, "31d6cfe0d16ae931b73c59d7e0c089c0") 87 | 88 | def test_parse_ntds_line_invalid(self): 89 | """Test parsing invalid NTDS lines.""" 90 | invalid_lines = [ 91 | "invalid line", 92 | "user:hash", 93 | "", 94 | "user:rid:lm:nt:extra:fields" 95 | ] 96 | 97 | for line in invalid_lines: 98 | user, nt_hash = NTDSProcessor.parse_ntds_line(line) 99 | self.assertIsNone(user) 100 | self.assertIsNone(nt_hash) 101 | 102 | def test_load_kerberoast_ntds(self): 103 | """Test loading Kerberoast NTDS data.""" 104 | test_data = [ 105 | "DOMAIN\\service1:1001:aad3b435b51404eeaad3b435b51404ee:31d6cfe0d16ae931b73c59d7e0c089c0::::", 106 | "DOMAIN\\service2:1002:aad3b435b51404eeaad3b435b51404ee:5d41402abc4b2a76b9719d911017c592::::", 107 | "invalid line", 108 | "DOMAIN\\service3:1003:aad3b435b51404eeaad3b435b51404ee:****************::::" 109 | ] 110 | 111 | # Create test file 112 | test_file = self.file_manager.create_file("kerberoast.txt", test_data) 113 | 114 | # Load data 115 | entries = NTDSProcessor.load_kerberoast_ntds(str(test_file)) 116 | 117 | # Should have 2 valid entries (service1 and service2) 118 | self.assertEqual(len(entries), 2) 119 | self.assertEqual(entries[0][0], "domain\\service1") # Method converts to lowercase 120 | self.assertEqual(entries[1][0], "domain\\service2") # Method converts to lowercase 121 | 122 | 123 | class TestHashProcessor(DPATTestCase): 124 | """Test the HashProcessor class.""" 125 | 126 | def test_generate_username_candidates(self): 127 | """Test username candidate generation.""" 128 | candidates = HashProcessor.generate_username_candidates("john", "DOMAIN\\john") 129 | 130 | # The method generates case variants for each candidate 131 | expected = {"john", "John", "JOHN", "DOMAIN\\john", "Domain\\john", "domain\\john", "DOMAIN\\JOHN"} 132 | self.assertEqual(candidates, expected) 133 | 134 | def test_generate_username_candidates_with_email(self): 135 | """Test username candidate generation with email format.""" 136 | candidates = HashProcessor.generate_username_candidates("john", "john@domain.com") 137 | 138 | self.assertIn("john@domain.com", candidates) 139 | self.assertIn("john", candidates) 140 | self.assertIn("John", candidates) 141 | 142 | def test_all_casings(self): 143 | """Test case generation.""" 144 | casings = list(HashProcessor.all_casings("ab")) 145 | expected = ["ab", "Ab", "aB", "AB"] 146 | self.assertEqual(set(casings), set(expected)) 147 | 148 | def test_all_casings_empty(self): 149 | """Test case generation with empty string.""" 150 | casings = list(HashProcessor.all_casings("")) 151 | self.assertEqual(casings, [""]) 152 | 153 | def test_all_casings_non_alpha(self): 154 | """Test case generation with non-alphabetic characters.""" 155 | casings = list(HashProcessor.all_casings("a1")) 156 | # Should only have 2 variants since '1' is not alphabetic 157 | self.assertEqual(len(casings), 2) 158 | self.assertIn("a1", casings) 159 | self.assertIn("A1", casings) 160 | 161 | def test_ntlm_hash_fallback(self): 162 | """Test NT hash generation with fallback mechanisms.""" 163 | # Test that the method exists and can be called 164 | try: 165 | result = HashProcessor.ntlm_hash("test_password") 166 | # If it succeeds, it should return a string 167 | self.assertIsInstance(result, str) 168 | self.assertEqual(len(result), 32) # MD4 hash is 32 chars 169 | except RuntimeError as e: 170 | # If no backend is available, that's expected in test environment 171 | self.assertIn("No NT hash backend available", str(e)) 172 | 173 | 174 | class TestDataSanitizer(DPATTestCase): 175 | """Test the DataSanitizer class.""" 176 | 177 | def test_sanitize_value_disabled(self): 178 | """Test sanitization when disabled.""" 179 | value = "password123" 180 | result = DataSanitizer.sanitize_value(value, should_sanitize=False) 181 | self.assertEqual(result, value) 182 | 183 | def test_sanitize_value_hash(self): 184 | """Test sanitization of hash values.""" 185 | hash_value = "31d6cfe0d16ae931b73c59d7e0c089c0" 186 | result = DataSanitizer.sanitize_value(hash_value, should_sanitize=True) 187 | self.assertEqual(result, "31d6************************89c0") 188 | 189 | def test_sanitize_value_password(self): 190 | """Test sanitization of password values.""" 191 | password = "password123" 192 | result = DataSanitizer.sanitize_value(password, should_sanitize=True) 193 | self.assertEqual(result, "p*********3") 194 | 195 | def test_sanitize_value_short(self): 196 | """Test sanitization of short values.""" 197 | short_value = "ab" 198 | result = DataSanitizer.sanitize_value(short_value, should_sanitize=True) 199 | self.assertEqual(result, "ab") 200 | 201 | def test_sanitize_value_empty(self): 202 | """Test sanitization of empty values.""" 203 | empty_value = "" 204 | result = DataSanitizer.sanitize_value(empty_value, should_sanitize=True) 205 | self.assertEqual(result, "") 206 | 207 | def test_sanitize_table_row(self): 208 | """Test sanitization of table rows.""" 209 | row = ("user1", "password123", "31d6cfe0d16ae931b73c59d7e0c089c0") 210 | result = DataSanitizer.sanitize_table_row(row, [1], [2], should_sanitize=True) 211 | 212 | self.assertEqual(result[0], "user1") # Username unchanged 213 | self.assertEqual(result[1], "p*********3") # Password sanitized 214 | self.assertEqual(result[2], "31d6************************89c0") # Hash sanitized 215 | 216 | 217 | class TestHTMLReportBuilder(DPATTestCase): 218 | """Test the HTMLReportBuilder class.""" 219 | 220 | def test_html_report_builder_creation(self): 221 | """Test HTML report builder creation.""" 222 | builder = HTMLReportBuilder(str(self.temp_dir)) 223 | self.assertEqual(builder.report_directory, str(self.temp_dir)) 224 | self.assertEqual(builder.body_content, "") 225 | 226 | def test_add_content(self): 227 | """Test adding content to HTML report.""" 228 | builder = HTMLReportBuilder(str(self.temp_dir)) 229 | builder.add_content("

Test

") 230 | 231 | self.assertIn("

Test

", builder.body_content) 232 | self.assertIn("section-space", builder.body_content) 233 | 234 | def test_add_table(self): 235 | """Test adding table to HTML report.""" 236 | builder = HTMLReportBuilder(str(self.temp_dir)) 237 | rows = [("user1", "pass1"), ("user2", "pass2")] 238 | headers = ["Username", "Password"] 239 | 240 | builder.add_table(rows, headers) 241 | 242 | self.assertIn("", builder.body_content) 244 | self.assertIn("", builder.body_content) 245 | self.assertIn("Username", builder.body_content) 246 | self.assertIn("Password", builder.body_content) 247 | 248 | def test_generate_html(self): 249 | """Test HTML generation.""" 250 | builder = HTMLReportBuilder(str(self.temp_dir)) 251 | builder.add_content("

Test Report

") 252 | 253 | html = builder.generate_html() 254 | 255 | self.assertIn("", html) 256 | self.assertIn(" and other variations 257 | self.assertIn("", html) 258 | self.assertIn("", html) 259 | self.assertIn("

Test Report

", html) 260 | self.assertIn("report.css", html) 261 | 262 | def test_navbar_link_default_filename(self): 263 | """Test navbar link uses default filename when not specified.""" 264 | builder = HTMLReportBuilder(str(self.temp_dir)) 265 | builder.add_content("

Test Report

") 266 | 267 | html = builder.generate_html() 268 | 269 | # Should use default filename in navbar link 270 | self.assertIn("href='_DomainPasswordAuditReport.html'", html) 271 | 272 | def test_navbar_link_custom_filename(self): 273 | """Test navbar link uses custom filename when specified.""" 274 | builder = HTMLReportBuilder(str(self.temp_dir), main_report_file="custom_report.html") 275 | builder.add_content("

Test Report

") 276 | 277 | html = builder.generate_html() 278 | 279 | # Should use custom filename in navbar link 280 | self.assertIn("href='custom_report.html'", html) 281 | # Should not contain default filename 282 | self.assertNotIn("href='_DomainPasswordAuditReport.html'", html) 283 | 284 | def test_write_report(self): 285 | """Test writing report to file.""" 286 | builder = HTMLReportBuilder(str(self.temp_dir)) 287 | builder.add_content("

Test Report

") 288 | 289 | filename = builder.write_report("test_report.html") 290 | 291 | self.assertEqual(filename, "test_report.html") 292 | report_path = self.temp_dir / "test_report.html" 293 | self.assert_file_exists(report_path) 294 | self.assert_file_contains(report_path, "

Test Report

") 295 | 296 | 297 | class TestDatabaseManager(DPATTestCase): 298 | """Test the DatabaseManager class.""" 299 | 300 | def test_database_manager_creation(self): 301 | """Test database manager creation.""" 302 | config = Config( 303 | ntds_file="test.ntds", 304 | cracked_file="test.pot", 305 | min_password_length=8 306 | ) 307 | 308 | db_manager = DatabaseManager(config) 309 | 310 | self.assertIsNotNone(db_manager.connection) 311 | self.assertIsNotNone(db_manager.cursor) 312 | 313 | def test_create_schema(self): 314 | """Test database schema creation.""" 315 | config = Config( 316 | ntds_file="test.ntds", 317 | cracked_file="test.pot", 318 | min_password_length=8 319 | ) 320 | 321 | db_manager = DatabaseManager(config) 322 | group_names = ["Domain Admins", "Enterprise Admins"] 323 | 324 | db_manager.create_schema(group_names) 325 | 326 | # Check that table exists 327 | cursor = db_manager.cursor 328 | cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='hash_infos'") 329 | result = cursor.fetchone() 330 | self.assertIsNotNone(result) 331 | 332 | # Check that group columns exist 333 | cursor.execute("PRAGMA table_info(hash_infos)") 334 | columns = [row[1] for row in cursor.fetchall()] 335 | 336 | for group_name in group_names: 337 | self.assertIn(group_name, columns) 338 | 339 | def test_close_database(self): 340 | """Test database connection closing.""" 341 | config = Config( 342 | ntds_file="test.ntds", 343 | cracked_file="test.pot", 344 | min_password_length=8 345 | ) 346 | 347 | db_manager = DatabaseManager(config) 348 | db_manager.close() 349 | 350 | # Connection should be closed 351 | with self.assertRaises(sqlite3.ProgrammingError): 352 | db_manager.cursor.execute("SELECT 1") 353 | 354 | 355 | class TestGroupManager(DPATTestCase): 356 | """Test the GroupManager class.""" 357 | 358 | def test_group_manager_creation(self): 359 | """Test group manager creation.""" 360 | config = Config( 361 | ntds_file="test.ntds", 362 | cracked_file="test.pot", 363 | min_password_length=8, 364 | groups_directory=str(self.temp_dir) 365 | ) 366 | 367 | manager = GroupManager(config) 368 | 369 | self.assertEqual(manager.groups, []) 370 | self.assertEqual(manager.group_users, {}) 371 | 372 | def test_load_groups_no_directory(self): 373 | """Test loading groups when no directory is specified.""" 374 | config = Config( 375 | ntds_file="test.ntds", 376 | cracked_file="test.pot", 377 | min_password_length=8 378 | ) 379 | 380 | manager = GroupManager(config) 381 | manager.load_groups() 382 | 383 | self.assertEqual(len(manager.groups), 0) 384 | 385 | def test_load_groups_with_files(self): 386 | """Test loading groups from files.""" 387 | # Create test group files 388 | group_files = self.file_manager.create_group_files({ 389 | "Domain Admins": ["DOMAIN\\admin1", "DOMAIN\\admin2"], 390 | "Enterprise Admins": ["DOMAIN\\admin1", "DOMAIN\\superadmin"] 391 | }) 392 | 393 | config = Config( 394 | ntds_file="test.ntds", 395 | cracked_file="test.pot", 396 | min_password_length=8, 397 | groups_directory=str(self.temp_dir) 398 | ) 399 | 400 | manager = GroupManager(config) 401 | manager.load_groups() 402 | 403 | self.assertEqual(len(manager.groups), 2) 404 | group_names = [group[0] for group in manager.groups] 405 | self.assertIn("Domain Admins", group_names) 406 | self.assertIn("Enterprise Admins", group_names) 407 | 408 | def test_load_group_members(self): 409 | """Test loading group members.""" 410 | # Create test group files 411 | group_files = self.file_manager.create_group_files({ 412 | "Domain Admins": ["DOMAIN\\admin1", "DOMAIN\\admin2"], 413 | "Enterprise Admins": ["DOMAIN\\admin1", "DOMAIN\\superadmin"] 414 | }) 415 | 416 | config = Config( 417 | ntds_file="test.ntds", 418 | cracked_file="test.pot", 419 | min_password_length=8, 420 | groups_directory=str(self.temp_dir) 421 | ) 422 | 423 | manager = GroupManager(config) 424 | manager.load_groups() 425 | manager.load_group_members() 426 | 427 | self.assertIn("Domain Admins", manager.group_users) 428 | self.assertIn("Enterprise Admins", manager.group_users) 429 | self.assertEqual(len(manager.group_users["Domain Admins"]), 2) 430 | self.assertEqual(len(manager.group_users["Enterprise Admins"]), 2) 431 | 432 | 433 | class TestCrackedPasswordProcessor(DPATTestCase): 434 | """Test the CrackedPasswordProcessor class.""" 435 | 436 | def test_cracked_password_processor_creation(self): 437 | """Test cracked password processor creation.""" 438 | config = Config( 439 | ntds_file="test.ntds", 440 | cracked_file="test.pot", 441 | min_password_length=8 442 | ) 443 | 444 | db_manager = DatabaseManager(config) 445 | processor = CrackedPasswordProcessor(config, db_manager) 446 | 447 | self.assertEqual(processor.config, config) 448 | self.assertEqual(processor.db_manager, db_manager) 449 | 450 | def test_process_cracked_line_nt_hash(self): 451 | """Test processing NT hash cracked line.""" 452 | config = Config( 453 | ntds_file="test.ntds", 454 | cracked_file="test.pot", 455 | min_password_length=8 456 | ) 457 | 458 | db_manager = DatabaseManager(config) 459 | db_manager.create_schema([]) 460 | 461 | # Insert test data 462 | cursor = db_manager.cursor 463 | cursor.execute(''' 464 | INSERT INTO hash_infos (username_full, username, nt_hash) 465 | VALUES (?, ?, ?) 466 | ''', ("DOMAIN\\user1", "user1", "31d6cfe0d16ae931b73c59d7e0c089c0")) 467 | 468 | processor = CrackedPasswordProcessor(config, db_manager) 469 | processor._process_cracked_line("31d6cfe0d16ae931b73c59d7e0c089c0:password123") 470 | 471 | # Check that password was updated 472 | cursor.execute("SELECT password FROM hash_infos WHERE nt_hash = ?", 473 | ("31d6cfe0d16ae931b73c59d7e0c089c0",)) 474 | result = cursor.fetchone() 475 | self.assertEqual(result[0], "password123") 476 | 477 | def test_process_cracked_line_lm_hash(self): 478 | """Test processing LM hash cracked line.""" 479 | config = Config( 480 | ntds_file="test.ntds", 481 | cracked_file="test.pot", 482 | min_password_length=8 483 | ) 484 | 485 | db_manager = DatabaseManager(config) 486 | db_manager.create_schema([]) 487 | 488 | # Insert test data 489 | cursor = db_manager.cursor 490 | cursor.execute(''' 491 | INSERT INTO hash_infos (username_full, username, lm_hash_left, lm_hash_right) 492 | VALUES (?, ?, ?, ?) 493 | ''', ("DOMAIN\\user1", "user1", "aad3b435b51404ee", "aad3b435b51404ee")) 494 | 495 | processor = CrackedPasswordProcessor(config, db_manager) 496 | processor._process_cracked_line("aad3b435b51404ee:password") 497 | 498 | # Check that LM password was updated 499 | cursor.execute("SELECT lm_pass_left FROM hash_infos WHERE lm_hash_left = ?", 500 | ("aad3b435b51404ee",)) 501 | result = cursor.fetchone() 502 | self.assertEqual(result[0], "password") 503 | 504 | def test_decode_hex_password(self): 505 | """Test hex password decoding.""" 506 | config = Config( 507 | ntds_file="test.ntds", 508 | cracked_file="test.pot", 509 | min_password_length=8 510 | ) 511 | 512 | db_manager = DatabaseManager(config) 513 | processor = CrackedPasswordProcessor(config, db_manager) 514 | 515 | # Test hex decoding 516 | hex_password = "$HEX[68656c6c6f]" 517 | result = processor._decode_hex_password(hex_password) 518 | self.assertEqual(result, "hello") 519 | 520 | # Test non-hex password 521 | normal_password = "password123" 522 | result = processor._decode_hex_password(normal_password) 523 | self.assertEqual(result, "password123") 524 | 525 | 526 | class TestUtilityFunctions(DPATTestCase): 527 | """Test utility functions.""" 528 | 529 | def test_calculate_percentage(self): 530 | """Test percentage calculation.""" 531 | # Normal case 532 | result = calculate_percentage(25, 100) 533 | self.assertEqual(result, 25.0) 534 | 535 | # Zero division case 536 | result = calculate_percentage(10, 0) 537 | self.assertEqual(result, 0.0) 538 | 539 | # Rounding case 540 | result = calculate_percentage(1, 3) 541 | self.assertEqual(result, 33.33) 542 | 543 | 544 | 545 | if __name__ == '__main__': 546 | unittest.main() 547 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "colorama" 5 | version = "0.4.6" 6 | description = "Cross-platform colored terminal text." 7 | optional = false 8 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 9 | groups = ["dev"] 10 | markers = "sys_platform == \"win32\"" 11 | files = [ 12 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 13 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 14 | ] 15 | 16 | [[package]] 17 | name = "coverage" 18 | version = "7.6.1" 19 | description = "Code coverage measurement for Python" 20 | optional = false 21 | python-versions = ">=3.8" 22 | groups = ["dev"] 23 | files = [ 24 | {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, 25 | {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, 26 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, 27 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, 28 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, 29 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, 30 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, 31 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, 32 | {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, 33 | {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, 34 | {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, 35 | {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, 36 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, 37 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, 38 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, 39 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, 40 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, 41 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, 42 | {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, 43 | {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, 44 | {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, 45 | {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, 46 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, 47 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, 48 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, 49 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, 50 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, 51 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, 52 | {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, 53 | {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, 54 | {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, 55 | {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, 56 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, 57 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, 58 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, 59 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, 60 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, 61 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, 62 | {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, 63 | {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, 64 | {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, 65 | {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, 66 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, 67 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, 68 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, 69 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, 70 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, 71 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, 72 | {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, 73 | {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, 74 | {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, 75 | {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, 76 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, 77 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, 78 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, 79 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, 80 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, 81 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, 82 | {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, 83 | {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, 84 | {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, 85 | {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, 86 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, 87 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, 88 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, 89 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, 90 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, 91 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, 92 | {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, 93 | {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, 94 | {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, 95 | {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, 96 | ] 97 | 98 | [package.dependencies] 99 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 100 | 101 | [package.extras] 102 | toml = ["tomli ; python_full_version <= \"3.11.0a6\""] 103 | 104 | [[package]] 105 | name = "exceptiongroup" 106 | version = "1.3.0" 107 | description = "Backport of PEP 654 (exception groups)" 108 | optional = false 109 | python-versions = ">=3.7" 110 | groups = ["dev"] 111 | markers = "python_version < \"3.11\"" 112 | files = [ 113 | {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, 114 | {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, 115 | ] 116 | 117 | [package.dependencies] 118 | typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} 119 | 120 | [package.extras] 121 | test = ["pytest (>=6)"] 122 | 123 | [[package]] 124 | name = "iniconfig" 125 | version = "2.1.0" 126 | description = "brain-dead simple config-ini parsing" 127 | optional = false 128 | python-versions = ">=3.8" 129 | groups = ["dev"] 130 | files = [ 131 | {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, 132 | {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, 133 | ] 134 | 135 | [[package]] 136 | name = "packaging" 137 | version = "25.0" 138 | description = "Core utilities for Python packages" 139 | optional = false 140 | python-versions = ">=3.8" 141 | groups = ["dev"] 142 | files = [ 143 | {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, 144 | {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, 145 | ] 146 | 147 | [[package]] 148 | name = "pluggy" 149 | version = "1.5.0" 150 | description = "plugin and hook calling mechanisms for python" 151 | optional = false 152 | python-versions = ">=3.8" 153 | groups = ["dev"] 154 | files = [ 155 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, 156 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, 157 | ] 158 | 159 | [package.extras] 160 | dev = ["pre-commit", "tox"] 161 | testing = ["pytest", "pytest-benchmark"] 162 | 163 | [[package]] 164 | name = "pytest" 165 | version = "8.3.5" 166 | description = "pytest: simple powerful testing with Python" 167 | optional = false 168 | python-versions = ">=3.8" 169 | groups = ["dev"] 170 | files = [ 171 | {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, 172 | {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, 173 | ] 174 | 175 | [package.dependencies] 176 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 177 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 178 | iniconfig = "*" 179 | packaging = "*" 180 | pluggy = ">=1.5,<2" 181 | tomli = {version = ">=1", markers = "python_version < \"3.11\""} 182 | 183 | [package.extras] 184 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 185 | 186 | [[package]] 187 | name = "pytest-cov" 188 | version = "5.0.0" 189 | description = "Pytest plugin for measuring coverage." 190 | optional = false 191 | python-versions = ">=3.8" 192 | groups = ["dev"] 193 | files = [ 194 | {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, 195 | {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, 196 | ] 197 | 198 | [package.dependencies] 199 | coverage = {version = ">=5.2.1", extras = ["toml"]} 200 | pytest = ">=4.6" 201 | 202 | [package.extras] 203 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] 204 | 205 | [[package]] 206 | name = "pytest-mock" 207 | version = "3.14.1" 208 | description = "Thin-wrapper around the mock package for easier use with pytest" 209 | optional = false 210 | python-versions = ">=3.8" 211 | groups = ["dev"] 212 | files = [ 213 | {file = "pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0"}, 214 | {file = "pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e"}, 215 | ] 216 | 217 | [package.dependencies] 218 | pytest = ">=6.2.5" 219 | 220 | [package.extras] 221 | dev = ["pre-commit", "pytest-asyncio", "tox"] 222 | 223 | [[package]] 224 | name = "tomli" 225 | version = "2.2.1" 226 | description = "A lil' TOML parser" 227 | optional = false 228 | python-versions = ">=3.8" 229 | groups = ["dev"] 230 | markers = "python_full_version <= \"3.11.0a6\"" 231 | files = [ 232 | {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, 233 | {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, 234 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, 235 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, 236 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, 237 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, 238 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, 239 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, 240 | {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, 241 | {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, 242 | {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, 243 | {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, 244 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, 245 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, 246 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, 247 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, 248 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, 249 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, 250 | {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, 251 | {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, 252 | {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, 253 | {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, 254 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, 255 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, 256 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, 257 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, 258 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, 259 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, 260 | {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, 261 | {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, 262 | {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, 263 | {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, 264 | ] 265 | 266 | [[package]] 267 | name = "typing-extensions" 268 | version = "4.13.2" 269 | description = "Backported and Experimental Type Hints for Python 3.8+" 270 | optional = false 271 | python-versions = ">=3.8" 272 | groups = ["dev"] 273 | markers = "python_version < \"3.11\"" 274 | files = [ 275 | {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, 276 | {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, 277 | ] 278 | 279 | [metadata] 280 | lock-version = "2.1" 281 | python-versions = "^3.8" 282 | content-hash = "96176c9214534068f3ea7c00be51c23e9d99a8b4862eed2791d0a0cb449ad1e0" 283 | -------------------------------------------------------------------------------- /sample_data/base/first.txt: -------------------------------------------------------------------------------- 1 | AARON 2 | ABBIE 3 | ABBY 4 | ABEL 5 | ABIGAIL 6 | ABRAHAM 7 | ADA 8 | ADAM 9 | ADAN 10 | ADDIE 11 | ADELA 12 | ADELAIDE 13 | ADELE 14 | ADELINE 15 | ADOLFO 16 | ADOLPH 17 | ADRIAN 18 | ADRIANA 19 | ADRIENNE 20 | AGNES 21 | AGUSTIN 22 | AIDA 23 | AILEEN 24 | AIMEE 25 | AISHA 26 | AL 27 | ALAN 28 | ALANA 29 | ALBA 30 | ALBERT 31 | ALBERTA 32 | ALBERTO 33 | ALEJANDRA 34 | ALEJANDRO 35 | ALEX 36 | ALEXANDER 37 | ALEXANDRA 38 | ALEXANDRIA 39 | ALEXIS 40 | ALFONSO 41 | ALFRED 42 | ALFREDA 43 | ALFREDO 44 | ALI 45 | ALICE 46 | ALICIA 47 | ALINE 48 | ALISA 49 | ALISHA 50 | ALISON 51 | ALISSA 52 | ALLAN 53 | ALLEN 54 | ALLENE 55 | ALLIE 56 | ALLISON 57 | ALLYSON 58 | ALMA 59 | ALONZO 60 | ALPHONSO 61 | ALTA 62 | ALTHEA 63 | ALTON 64 | ALVARO 65 | ALVIN 66 | ALYCE 67 | ALYSON 68 | ALYSSA 69 | AMALIA 70 | AMANDA 71 | AMBER 72 | AMELIA 73 | AMIE 74 | AMOS 75 | AMPARO 76 | AMY 77 | ANA 78 | ANASTASIA 79 | ANDRE 80 | ANDREA 81 | ANDRES 82 | ANDREW 83 | ANDY 84 | ANGEL 85 | ANGELA 86 | ANGELIA 87 | ANGELICA 88 | ANGELINA 89 | ANGELINE 90 | ANGELIQUE 91 | ANGELITA 92 | ANGELO 93 | ANGIE 94 | ANITA 95 | ANN 96 | ANNA 97 | ANNABELLE 98 | ANNE 99 | ANNETTE 100 | ANNIE 101 | ANNMARIE 102 | ANTHONY 103 | ANTIONETTE 104 | ANTOINE 105 | ANTOINETTE 106 | ANTON 107 | ANTONIA 108 | ANTONIO 109 | ANTONY 110 | APRIL 111 | ARACELI 112 | ARCHIE 113 | ARLENE 114 | ARLINE 115 | ARMAND 116 | ARMANDO 117 | ARNOLD 118 | ARRON 119 | ART 120 | ARTHUR 121 | ARTURO 122 | ASHLEE 123 | ASHLEIGH 124 | ASHLEY 125 | AUBREY 126 | AUDRA 127 | AUDREY 128 | AUGUST 129 | AUGUSTA 130 | AURELIA 131 | AURELIO 132 | AURORA 133 | AUSTIN 134 | AUTUMN 135 | AVA 136 | AVERY 137 | AVIS 138 | BARBARA 139 | BARBRA 140 | BARNEY 141 | BARRY 142 | BART 143 | BASIL 144 | BEATRICE 145 | BEATRIZ 146 | BEAU 147 | BECKY 148 | BELINDA 149 | BEN 150 | BENITA 151 | BENITO 152 | BENJAMIN 153 | BENNETT 154 | BENNIE 155 | BENNY 156 | BERNADETTE 157 | BERNADINE 158 | BERNARD 159 | BERNARDO 160 | BERNICE 161 | BERNIE 162 | BERT 163 | BERTA 164 | BERTHA 165 | BERTIE 166 | BERYL 167 | BESSIE 168 | BETH 169 | BETHANY 170 | BETSY 171 | BETTE 172 | BETTIE 173 | BETTY 174 | BETTYE 175 | BEULAH 176 | BEVERLEY 177 | BEVERLY 178 | BIANCA 179 | BILL 180 | BILLIE 181 | BILLY 182 | BLAINE 183 | BLAIR 184 | BLAKE 185 | BLANCA 186 | BLANCHE 187 | BOB 188 | BOBBI 189 | BOBBIE 190 | BOBBY 191 | BONITA 192 | BONNIE 193 | BOOKER 194 | BOYD 195 | BRAD 196 | BRADFORD 197 | BRADLEY 198 | BRADY 199 | BRAIN 200 | BRANDEN 201 | BRANDI 202 | BRANDIE 203 | BRANDON 204 | BRANDY 205 | BRENDA 206 | BRENDAN 207 | BRENT 208 | BRET 209 | BRETT 210 | BRIAN 211 | BRIANA 212 | BRIANNA 213 | BRIDGET 214 | BRIDGETT 215 | BRIDGETTE 216 | BRIGITTE 217 | BRITNEY 218 | BRITTANY 219 | BRITTNEY 220 | BROCK 221 | BROOKE 222 | BRUCE 223 | BRUNO 224 | BRYAN 225 | BRYANT 226 | BRYCE 227 | BRYON 228 | BUDDY 229 | BUFORD 230 | BURTON 231 | BYRON 232 | CAITLIN 233 | CALEB 234 | CALLIE 235 | CALVIN 236 | CAMERON 237 | CAMILLA 238 | CAMILLE 239 | CANDACE 240 | CANDICE 241 | CANDY 242 | CARA 243 | CAREY 244 | CARISSA 245 | CARL 246 | CARLA 247 | CARLENE 248 | CARLO 249 | CARLOS 250 | CARLTON 251 | CARLY 252 | CARMELA 253 | CARMELLA 254 | CARMELO 255 | CARMEN 256 | CAROL 257 | CAROLE 258 | CAROLINA 259 | CAROLINE 260 | CAROLYN 261 | CARRIE 262 | CARROLL 263 | CARSON 264 | CARTER 265 | CARY 266 | CARYN 267 | CASANDRA 268 | CASEY 269 | CASSANDRA 270 | CASSIE 271 | CATALINA 272 | CATHERINE 273 | CATHLEEN 274 | CATHRYN 275 | CATHY 276 | CECELIA 277 | CECIL 278 | CECILE 279 | CECILIA 280 | CEDRIC 281 | CELESTE 282 | CELIA 283 | CELINA 284 | CESAR 285 | CHAD 286 | CHANDRA 287 | CHARITY 288 | CHARLENE 289 | CHARLES 290 | CHARLEY 291 | CHARLIE 292 | CHARLOTTE 293 | CHARMAINE 294 | CHASE 295 | CHASITY 296 | CHELSEA 297 | CHELSEY 298 | CHERI 299 | CHERIE 300 | CHERRY 301 | CHERYL 302 | CHESTER 303 | CHRIS 304 | CHRISTA 305 | CHRISTI 306 | CHRISTIAN 307 | CHRISTIE 308 | CHRISTINA 309 | CHRISTINE 310 | CHRISTOPHER 311 | CHRISTY 312 | CHRYSTAL 313 | CHUCK 314 | CINDY 315 | CLAIR 316 | CLAIRE 317 | CLARA 318 | CLARE 319 | CLARENCE 320 | CLARICE 321 | CLARISSA 322 | CLARK 323 | CLAUDE 324 | CLAUDETTE 325 | CLAUDIA 326 | CLAUDINE 327 | CLAY 328 | CLAYTON 329 | CLEMENT 330 | CLEO 331 | CLEVELAND 332 | CLIFF 333 | CLIFFORD 334 | CLIFTON 335 | CLINT 336 | CLINTON 337 | CLYDE 338 | CODY 339 | COLBY 340 | COLE 341 | COLEEN 342 | COLETTE 343 | COLIN 344 | COLLEEN 345 | COLLIN 346 | CONCEPCION 347 | CONCETTA 348 | CONNIE 349 | CONRAD 350 | CONSTANCE 351 | CONSUELO 352 | CORA 353 | COREY 354 | CORINA 355 | CORINE 356 | CORINNE 357 | CORNELIA 358 | CORNELIUS 359 | CORNELL 360 | CORRINE 361 | CORTNEY 362 | CORY 363 | COURTNEY 364 | COY 365 | CRAIG 366 | CRISTINA 367 | CRUZ 368 | CRYSTAL 369 | CURT 370 | CURTIS 371 | CYNTHIA 372 | DAISY 373 | DALE 374 | DALLAS 375 | DALTON 376 | DAMIAN 377 | DAMIEN 378 | DAMON 379 | DAN 380 | DANA 381 | DANE 382 | DANIAL 383 | DANIEL 384 | DANIELLE 385 | DANNY 386 | DANTE 387 | DAPHNE 388 | DARCY 389 | DAREN 390 | DARIN 391 | DARIUS 392 | DARLA 393 | DARLENE 394 | DARNELL 395 | DARREL 396 | DARRELL 397 | DARREN 398 | DARRIN 399 | DARRYL 400 | DARWIN 401 | DARYL 402 | DAVE 403 | DAVID 404 | DAVIS 405 | DAWN 406 | DAYNA 407 | DEAN 408 | DEANA 409 | DEANN 410 | DEANNA 411 | DEANNE 412 | DEBBIE 413 | DEBORA 414 | DEBORAH 415 | DEBRA 416 | DEE 417 | DEENA 418 | DEIDRA 419 | DEIDRE 420 | DEIRDRE 421 | DELBERT 422 | DELIA 423 | DELLA 424 | DELMAR 425 | DELORES 426 | DELORIS 427 | DEMETRIUS 428 | DENA 429 | DENICE 430 | DENIS 431 | DENISE 432 | DENNIS 433 | DENNY 434 | DENVER 435 | DEREK 436 | DERICK 437 | DERRICK 438 | DESIREE 439 | DESMOND 440 | DESSIE 441 | DEVIN 442 | DEVON 443 | DEWAYNE 444 | DEWEY 445 | DEXTER 446 | DIANA 447 | DIANE 448 | DIANN 449 | DIANNA 450 | DIANNE 451 | DICK 452 | DIEGO 453 | DINA 454 | DION 455 | DIONNE 456 | DIRK 457 | DIXIE 458 | DOLLIE 459 | DOLLY 460 | DOLORES 461 | DOMINGO 462 | DOMINIC 463 | DOMINICK 464 | DOMINIQUE 465 | DON 466 | DONA 467 | DONALD 468 | DONNA 469 | DONNELL 470 | DONNIE 471 | DONNY 472 | DONOVAN 473 | DORA 474 | DOREEN 475 | DORETHA 476 | DORIS 477 | DOROTHEA 478 | DOROTHY 479 | DORTHY 480 | DOUG 481 | DOUGLAS 482 | DOYLE 483 | DREW 484 | DUANE 485 | DUDLEY 486 | DUSTIN 487 | DWAYNE 488 | DWIGHT 489 | DYLAN 490 | EARL 491 | EARLENE 492 | EARLINE 493 | EARNEST 494 | EARNESTINE 495 | EBONY 496 | ED 497 | EDDIE 498 | EDDY 499 | EDGAR 500 | EDITH 501 | EDMOND 502 | EDMUND 503 | EDNA 504 | EDUARDO 505 | EDWARD 506 | EDWARDO 507 | EDWIN 508 | EDWINA 509 | EDYTHE 510 | EFFIE 511 | EFRAIN 512 | EILEEN 513 | ELAINE 514 | ELBA 515 | ELBERT 516 | ELDA 517 | ELDON 518 | ELEANOR 519 | ELENA 520 | ELI 521 | ELIAS 522 | ELIJAH 523 | ELINOR 524 | ELISA 525 | ELISABETH 526 | ELISE 527 | ELISHA 528 | ELIZA 529 | ELIZABETH 530 | ELLA 531 | ELLEN 532 | ELLIOT 533 | ELLIOTT 534 | ELLIS 535 | ELMA 536 | ELMER 537 | ELNORA 538 | ELOISE 539 | ELSA 540 | ELSIE 541 | ELTON 542 | ELVA 543 | ELVIA 544 | ELVIN 545 | ELVIRA 546 | ELVIS 547 | ELWOOD 548 | EMANUEL 549 | EMERSON 550 | EMERY 551 | EMIL 552 | EMILIA 553 | EMILIE 554 | EMILIO 555 | EMILY 556 | EMMA 557 | EMMANUEL 558 | EMMETT 559 | EMORY 560 | ENID 561 | ENRIQUE 562 | ERIC 563 | ERICA 564 | ERICK 565 | ERICKA 566 | ERIK 567 | ERIKA 568 | ERIN 569 | ERMA 570 | ERNA 571 | ERNEST 572 | ERNESTINE 573 | ERNESTO 574 | ERNIE 575 | ERROL 576 | ERVIN 577 | ERWIN 578 | ESMERALDA 579 | ESPERANZA 580 | ESSIE 581 | ESTEBAN 582 | ESTELA 583 | ESTELLA 584 | ESTELLE 585 | ESTER 586 | ESTHER 587 | ETHAN 588 | ETHEL 589 | ETTA 590 | EUGENE 591 | EUGENIA 592 | EULA 593 | EUNICE 594 | EVA 595 | EVAN 596 | EVANGELINA 597 | EVANGELINE 598 | EVE 599 | EVELYN 600 | EVERETT 601 | FABIAN 602 | FAITH 603 | FANNIE 604 | FANNY 605 | FAY 606 | FAYE 607 | FEDERICO 608 | FELECIA 609 | FELICIA 610 | FELIPE 611 | FELIX 612 | FERN 613 | FERNANDO 614 | FIDEL 615 | FLETCHER 616 | FLORA 617 | FLORENCE 618 | FLORINE 619 | FLOSSIE 620 | FLOYD 621 | FORREST 622 | FRAN 623 | FRANCES 624 | FRANCESCA 625 | FRANCINE 626 | FRANCIS 627 | FRANCISCA 628 | FRANCISCO 629 | FRANK 630 | FRANKIE 631 | FRANKLIN 632 | FRED 633 | FREDA 634 | FREDDIE 635 | FREDDY 636 | FREDERIC 637 | FREDERICK 638 | FREDRICK 639 | FREIDA 640 | FRIEDA 641 | GABRIEL 642 | GABRIELA 643 | GABRIELLE 644 | GAIL 645 | GALE 646 | GALEN 647 | GARLAND 648 | GARRETT 649 | GARRY 650 | GARY 651 | GAVIN 652 | GAY 653 | GAYLA 654 | GAYLE 655 | GENA 656 | GENARO 657 | GENE 658 | GENEVA 659 | GENEVIEVE 660 | GEOFFREY 661 | GEORGE 662 | GEORGETTE 663 | GEORGIA 664 | GEORGINA 665 | GERALD 666 | GERALDINE 667 | GERARD 668 | GERARDO 669 | GERI 670 | GERMAINE 671 | GERMAN 672 | GERRY 673 | GERTRUDE 674 | GILBERT 675 | GILBERTO 676 | GILDA 677 | GINA 678 | GINGER 679 | GLADYS 680 | GLEN 681 | GLENDA 682 | GLENN 683 | GLENNA 684 | GLORIA 685 | GOLDIE 686 | GONZALO 687 | GORDON 688 | GRACE 689 | GRACIE 690 | GRACIELA 691 | GRADY 692 | GRAHAM 693 | GRANT 694 | GREG 695 | GREGG 696 | GREGORIO 697 | GREGORY 698 | GRETA 699 | GRETCHEN 700 | GROVER 701 | GUADALUPE 702 | GUILLERMO 703 | GUS 704 | GUSSIE 705 | GUSTAVO 706 | GUY 707 | GWEN 708 | GWENDOLYN 709 | HAL 710 | HALEY 711 | HALLIE 712 | HANNAH 713 | HANS 714 | HARLAN 715 | HARLEY 716 | HAROLD 717 | HARRIET 718 | HARRIETT 719 | HARRIS 720 | HARRISON 721 | HARRY 722 | HARVEY 723 | HATTIE 724 | HAZEL 725 | HEATH 726 | HEATHER 727 | HECTOR 728 | HEIDI 729 | HELEN 730 | HELENA 731 | HELENE 732 | HELGA 733 | HENRIETTA 734 | HENRY 735 | HERBERT 736 | HERIBERTO 737 | HERMAN 738 | HERMINIA 739 | HESTER 740 | HILARY 741 | HILDA 742 | HILLARY 743 | HIRAM 744 | HOLLIE 745 | HOLLIS 746 | HOLLY 747 | HOMER 748 | HOPE 749 | HORACE 750 | HOUSTON 751 | HOWARD 752 | HUBERT 753 | HUGH 754 | HUGO 755 | HUMBERTO 756 | HUNG 757 | HUNTER 758 | IAN 759 | IDA 760 | IGNACIO 761 | ILA 762 | ILENE 763 | IMELDA 764 | IMOGENE 765 | INA 766 | INES 767 | INEZ 768 | INGRID 769 | IRA 770 | IRENE 771 | IRIS 772 | IRMA 773 | IRVIN 774 | IRVING 775 | IRWIN 776 | ISAAC 777 | ISABEL 778 | ISABELLA 779 | ISABELLE 780 | ISAIAH 781 | ISIDRO 782 | ISMAEL 783 | ISRAEL 784 | ISSAC 785 | IVA 786 | IVAN 787 | IVY 788 | JACK 789 | JACKIE 790 | JACKLYN 791 | JACKSON 792 | JACLYN 793 | JACOB 794 | JACQUELINE 795 | JACQUELYN 796 | JACQUES 797 | JADE 798 | JAIME 799 | JAKE 800 | JAMAL 801 | JAME 802 | JAMES 803 | JAMI 804 | JAMIE 805 | JAN 806 | JANA 807 | JANE 808 | JANELL 809 | JANELLE 810 | JANET 811 | JANETTE 812 | JANICE 813 | JANIE 814 | JANINE 815 | JANIS 816 | JANNA 817 | JANNIE 818 | JARED 819 | JARROD 820 | JARVIS 821 | JASMIN 822 | JASMINE 823 | JASON 824 | JASPER 825 | JAVIER 826 | JAY 827 | JAYNE 828 | JAYSON 829 | JEAN 830 | JEANETTE 831 | JEANIE 832 | JEANINE 833 | JEANNE 834 | JEANNETTE 835 | JEANNIE 836 | JEANNINE 837 | JEFF 838 | JEFFERSON 839 | JEFFERY 840 | JEFFREY 841 | JEFFRY 842 | JENIFER 843 | JENNA 844 | JENNIE 845 | JENNIFER 846 | JENNY 847 | JERALD 848 | JEREMIAH 849 | JEREMY 850 | JERI 851 | JERMAINE 852 | JEROME 853 | JERRI 854 | JERRY 855 | JESS 856 | JESSE 857 | JESSICA 858 | JESSIE 859 | JESUS 860 | JEWEL 861 | JEWELL 862 | JILL 863 | JILLIAN 864 | JIM 865 | JIMMIE 866 | JIMMY 867 | JO 868 | JOAN 869 | JOANN 870 | JOANNA 871 | JOANNE 872 | JOAQUIN 873 | JOCELYN 874 | JODI 875 | JODIE 876 | JODY 877 | JOE 878 | JOEL 879 | JOESPH 880 | JOEY 881 | JOHANNA 882 | JOHN 883 | JOHNATHAN 884 | JOHNATHON 885 | JOHNNIE 886 | JOHNNY 887 | JOLENE 888 | JON 889 | JONATHAN 890 | JONATHON 891 | JONI 892 | JORDAN 893 | JORGE 894 | JOSE 895 | JOSEFA 896 | JOSEFINA 897 | JOSEPH 898 | JOSEPHINE 899 | JOSH 900 | JOSHUA 901 | JOSIE 902 | JOSUE 903 | JOY 904 | JOYCE 905 | JUAN 906 | JUANA 907 | JUANITA 908 | JUDI 909 | JUDITH 910 | JUDY 911 | JULIA 912 | JULIAN 913 | JULIANA 914 | JULIANNE 915 | JULIE 916 | JULIET 917 | JULIETTE 918 | JULIO 919 | JULIUS 920 | JUNE 921 | JUNIOR 922 | JUSTIN 923 | JUSTINA 924 | JUSTINE 925 | KAITLIN 926 | KAITLYN 927 | KARA 928 | KAREN 929 | KARI 930 | KARIN 931 | KARINA 932 | KARL 933 | KARLA 934 | KARYN 935 | KASEY 936 | KATE 937 | KATELYN 938 | KATHARINE 939 | KATHERINE 940 | KATHERYN 941 | KATHI 942 | KATHIE 943 | KATHLEEN 944 | KATHRINE 945 | KATHRYN 946 | KATHY 947 | KATIE 948 | KATINA 949 | KATRINA 950 | KATY 951 | KAY 952 | KAYE 953 | KAYLA 954 | KEISHA 955 | KEITH 956 | KELLEY 957 | KELLI 958 | KELLIE 959 | KELLY 960 | KELSEY 961 | KELVIN 962 | KEN 963 | KENDALL 964 | KENDRA 965 | KENDRICK 966 | KENNETH 967 | KENNY 968 | KENT 969 | KENYA 970 | KERI 971 | KERMIT 972 | KERRI 973 | KERRY 974 | KEVIN 975 | KIM 976 | KIMBERLEE 977 | KIMBERLEY 978 | KIMBERLY 979 | KIRBY 980 | KIRK 981 | KIRSTEN 982 | KITTY 983 | KRIS 984 | KRISTA 985 | KRISTEN 986 | KRISTI 987 | KRISTIE 988 | KRISTIN 989 | KRISTINA 990 | KRISTINE 991 | KRISTOPHER 992 | KRISTY 993 | KRYSTAL 994 | KURT 995 | KURTIS 996 | KYLE 997 | LACEY 998 | LACY 999 | LADONNA 1000 | LAKEISHA 1001 | LAKESHA 1002 | LAKISHA 1003 | LAMAR 1004 | LAMONT 1005 | LANA 1006 | LANCE 1007 | LANDON 1008 | LANE 1009 | LARA 1010 | LARRY 1011 | LASHONDA 1012 | LATANYA 1013 | LATASHA 1014 | LATISHA 1015 | LATONYA 1016 | LATOYA 1017 | LAURA 1018 | LAUREL 1019 | LAUREN 1020 | LAURENCE 1021 | LAURI 1022 | LAURIE 1023 | LAVERNE 1024 | LAVONNE 1025 | LAWANDA 1026 | LAWRENCE 1027 | LEA 1028 | LEAH 1029 | LEANN 1030 | LEANNA 1031 | LEANNE 1032 | LEE 1033 | LEEANN 1034 | LEIGH 1035 | LEILA 1036 | LELA 1037 | LELAND 1038 | LELIA 1039 | LENA 1040 | LENORA 1041 | LENORE 1042 | LEO 1043 | LEOLA 1044 | LEON 1045 | LEONA 1046 | LEONARD 1047 | LEONARDO 1048 | LEONEL 1049 | LEONOR 1050 | LEROY 1051 | LESA 1052 | LESLEY 1053 | LESLIE 1054 | LESSIE 1055 | LESTER 1056 | LETA 1057 | LETHA 1058 | LETICIA 1059 | LETITIA 1060 | LEVI 1061 | LEWIS 1062 | LIBBY 1063 | LIDIA 1064 | LILA 1065 | LILIA 1066 | LILIAN 1067 | LILIANA 1068 | LILLIAN 1069 | LILLIE 1070 | LILLY 1071 | LILY 1072 | LINA 1073 | LINCOLN 1074 | LINDA 1075 | LINDSAY 1076 | LINDSEY 1077 | LINWOOD 1078 | LIONEL 1079 | LISA 1080 | LIZ 1081 | LIZA 1082 | LIZZIE 1083 | LLOYD 1084 | LOGAN 1085 | LOIS 1086 | LOLA 1087 | LOLITA 1088 | LONNIE 1089 | LORA 1090 | LORAINE 1091 | LOREN 1092 | LORENA 1093 | LORENE 1094 | LORENZO 1095 | LORETTA 1096 | LORI 1097 | LORIE 1098 | LORNA 1099 | LORRAINE 1100 | LORRIE 1101 | LOTTIE 1102 | LOU 1103 | LOUELLA 1104 | LOUIE 1105 | LOUIS 1106 | LOUISA 1107 | LOUISE 1108 | LOURDES 1109 | LOWELL 1110 | LOYD 1111 | LUANN 1112 | LUCAS 1113 | LUCIA 1114 | LUCILE 1115 | LUCILLE 1116 | LUCINDA 1117 | LUCY 1118 | LUELLA 1119 | LUIS 1120 | LUISA 1121 | LUKE 1122 | LULA 1123 | LUPE 1124 | LUTHER 1125 | LUZ 1126 | LYDIA 1127 | LYLE 1128 | LYNDA 1129 | LYNETTE 1130 | LYNN 1131 | LYNNE 1132 | LYNNETTE 1133 | MA 1134 | MABEL 1135 | MABLE 1136 | MACK 1137 | MADELEINE 1138 | MADELINE 1139 | MADELYN 1140 | MADGE 1141 | MAE 1142 | MAGDALENA 1143 | MAGGIE 1144 | MAI 1145 | MALCOLM 1146 | MALINDA 1147 | MALLORY 1148 | MAMIE 1149 | MANDY 1150 | MANUEL 1151 | MANUELA 1152 | MARA 1153 | MARC 1154 | MARCEL 1155 | MARCELINO 1156 | MARCELLA 1157 | MARCI 1158 | MARCIA 1159 | MARCIE 1160 | MARCO 1161 | MARCOS 1162 | MARCUS 1163 | MARCY 1164 | MARGARET 1165 | MARGARITA 1166 | MARGERY 1167 | MARGIE 1168 | MARGO 1169 | MARGOT 1170 | MARGRET 1171 | MARGUERITE 1172 | MARI 1173 | MARIA 1174 | MARIAN 1175 | MARIANA 1176 | MARIANNE 1177 | MARIANO 1178 | MARIBEL 1179 | MARICELA 1180 | MARIE 1181 | MARIETTA 1182 | MARILYN 1183 | MARINA 1184 | MARIO 1185 | MARION 1186 | MARISA 1187 | MARISOL 1188 | MARISSA 1189 | MARITZA 1190 | MARJORIE 1191 | MARK 1192 | MARLA 1193 | MARLENE 1194 | MARLIN 1195 | MARLON 1196 | MARQUITA 1197 | MARSHA 1198 | MARSHALL 1199 | MARTA 1200 | MARTHA 1201 | MARTIN 1202 | MARTINA 1203 | MARTY 1204 | MARVA 1205 | MARVIN 1206 | MARY 1207 | MARYANN 1208 | MARYANNE 1209 | MARYELLEN 1210 | MARYLOU 1211 | MASON 1212 | MATHEW 1213 | MATILDA 1214 | MATT 1215 | MATTHEW 1216 | MATTIE 1217 | MAUDE 1218 | MAURA 1219 | MAUREEN 1220 | MAURICE 1221 | MAURICIO 1222 | MAVIS 1223 | MAX 1224 | MAXINE 1225 | MAXWELL 1226 | MAY 1227 | MAYNARD 1228 | MAYRA 1229 | MEAGAN 1230 | MEGAN 1231 | MEGHAN 1232 | MELANIE 1233 | MELBA 1234 | MELINDA 1235 | MELISA 1236 | MELISSA 1237 | MELLISA 1238 | MELODY 1239 | MELVA 1240 | MELVIN 1241 | MERCEDES 1242 | MEREDITH 1243 | MERLE 1244 | MERLIN 1245 | MERRILL 1246 | MIA 1247 | MICAH 1248 | MICHAEL 1249 | MICHAELA 1250 | MICHEAL 1251 | MICHEL 1252 | MICHELE 1253 | MICHELL 1254 | MICHELLE 1255 | MICKEY 1256 | MIGUEL 1257 | MIKE 1258 | MILAGROS 1259 | MILDRED 1260 | MILES 1261 | MILLARD 1262 | MILLICENT 1263 | MILLIE 1264 | MILTON 1265 | MINA 1266 | MINDY 1267 | MINERVA 1268 | MINNIE 1269 | MIRANDA 1270 | MIRIAM 1271 | MISTY 1272 | MITCHELL 1273 | MITZI 1274 | MOHAMMAD 1275 | MOISES 1276 | MOLLIE 1277 | MOLLY 1278 | MONA 1279 | MONICA 1280 | MONIKA 1281 | MONIQUE 1282 | MONROE 1283 | MONTE 1284 | MONTY 1285 | MORGAN 1286 | MORRIS 1287 | MOSES 1288 | MURIEL 1289 | MURRAY 1290 | MYRA 1291 | MYRNA 1292 | MYRON 1293 | MYRTLE 1294 | NADIA 1295 | NADINE 1296 | NAN 1297 | NANCY 1298 | NANETTE 1299 | NANNIE 1300 | NAOMI 1301 | NATALIA 1302 | NATALIE 1303 | NATASHA 1304 | NATHAN 1305 | NATHANIEL 1306 | NEAL 1307 | NED 1308 | NEIL 1309 | NELDA 1310 | NELL 1311 | NELLIE 1312 | NELLY 1313 | NELSON 1314 | NESTOR 1315 | NETTIE 1316 | NEVA 1317 | NICHOLAS 1318 | NICHOLE 1319 | NICK 1320 | NICKOLAS 1321 | NICOLAS 1322 | NICOLE 1323 | NIKKI 1324 | NINA 1325 | NITA 1326 | NOAH 1327 | NOE 1328 | NOEL 1329 | NOELLE 1330 | NOEMI 1331 | NOLA 1332 | NOLAN 1333 | NONA 1334 | NORA 1335 | NORBERT 1336 | NOREEN 1337 | NORMA 1338 | NORMAN 1339 | NORRIS 1340 | NUMBERS 1341 | OCTAVIA 1342 | OCTAVIO 1343 | ODELL 1344 | ODESSA 1345 | OFELIA 1346 | OLA 1347 | OLGA 1348 | OLIVE 1349 | OLIVER 1350 | OLIVIA 1351 | OLLIE 1352 | OMAR 1353 | OPAL 1354 | OPHELIA 1355 | ORA 1356 | ORLANDO 1357 | ORVILLE 1358 | OSCAR 1359 | OTIS 1360 | OTTO 1361 | OWEN 1362 | PABLO 1363 | PAIGE 1364 | PAM 1365 | PAMALA 1366 | PAMELA 1367 | PANSY 1368 | PASQUALE 1369 | PAT 1370 | PATRICA 1371 | PATRICE 1372 | PATRICIA 1373 | PATRICK 1374 | PATSY 1375 | PATTI 1376 | PATTY 1377 | PAUL 1378 | PAULA 1379 | PAULETTE 1380 | PAULINE 1381 | PEARL 1382 | PEARLIE 1383 | PEDRO 1384 | PEGGY 1385 | PENELOPE 1386 | PENNY 1387 | PERCY 1388 | PERRY 1389 | PETE 1390 | PETER 1391 | PETRA 1392 | PHIL 1393 | PHILIP 1394 | PHILLIP 1395 | PHOEBE 1396 | PHYLLIS 1397 | PIERRE 1398 | POLLY 1399 | PRESTON 1400 | PRISCILLA 1401 | QUEEN 1402 | QUENTIN 1403 | QUINCY 1404 | QUINTON 1405 | RACHAEL 1406 | RACHEL 1407 | RACHELLE 1408 | RAE 1409 | RAFAEL 1410 | RALPH 1411 | RAMIRO 1412 | RAMON 1413 | RAMONA 1414 | RANDAL 1415 | RANDALL 1416 | RANDI 1417 | RANDOLPH 1418 | RANDY 1419 | RAPHAEL 1420 | RAQUEL 1421 | RAUL 1422 | RAY 1423 | RAYMOND 1424 | RAYMUNDO 1425 | REBA 1426 | REBECCA 1427 | REBEKAH 1428 | REED 1429 | REGGIE 1430 | REGINA 1431 | REGINALD 1432 | RENA 1433 | RENAE 1434 | RENE 1435 | RENEE 1436 | REUBEN 1437 | REVA 1438 | REX 1439 | REYNA 1440 | REYNALDO 1441 | RHEA 1442 | RHODA 1443 | RHONDA 1444 | RICARDO 1445 | RICHARD 1446 | RICK 1447 | RICKEY 1448 | RICKIE 1449 | RICKY 1450 | RIGOBERTO 1451 | RILEY 1452 | RITA 1453 | ROB 1454 | ROBBIE 1455 | ROBBY 1456 | ROBERT 1457 | ROBERTA 1458 | ROBERTO 1459 | ROBIN 1460 | ROBYN 1461 | ROCCO 1462 | ROCHELLE 1463 | ROCIO 1464 | ROCKY 1465 | ROD 1466 | RODERICK 1467 | RODGER 1468 | RODNEY 1469 | RODOLFO 1470 | RODRIGO 1471 | ROGELIO 1472 | ROGER 1473 | ROLAND 1474 | ROLANDO 1475 | ROMAN 1476 | ROMEO 1477 | RON 1478 | RONALD 1479 | RONDA 1480 | RONNIE 1481 | ROOSEVELT 1482 | RORY 1483 | ROSA 1484 | ROSALIA 1485 | ROSALIE 1486 | ROSALIND 1487 | ROSALINDA 1488 | ROSALYN 1489 | ROSANNA 1490 | ROSANNE 1491 | ROSARIO 1492 | ROSCOE 1493 | ROSE 1494 | ROSEANN 1495 | ROSELLA 1496 | ROSEMARIE 1497 | ROSEMARY 1498 | ROSETTA 1499 | ROSIE 1500 | ROSLYN 1501 | ROSS 1502 | ROWENA 1503 | ROXANNE 1504 | ROXIE 1505 | ROY 1506 | ROYCE 1507 | RUBEN 1508 | RUBY 1509 | RUDOLPH 1510 | RUDY 1511 | RUFUS 1512 | RUSSEL 1513 | RUSSELL 1514 | RUSTY 1515 | RUTH 1516 | RUTHIE 1517 | RYAN 1518 | SABRINA 1519 | SADIE 1520 | SALLIE 1521 | SALLY 1522 | SALVADOR 1523 | SALVATORE 1524 | SAM 1525 | SAMANTHA 1526 | SAMMIE 1527 | SAMMY 1528 | SAMUEL 1529 | SANDRA 1530 | SANDY 1531 | SANFORD 1532 | SANTIAGO 1533 | SANTOS 1534 | SARA 1535 | SARAH 1536 | SASHA 1537 | SAUL 1538 | SAUNDRA 1539 | SAVANNAH 1540 | SCOT 1541 | SCOTT 1542 | SCOTTY 1543 | SEAN 1544 | SEBASTIAN 1545 | SELENA 1546 | SELINA 1547 | SELMA 1548 | SERENA 1549 | SERGIO 1550 | SETH 1551 | SHANA 1552 | SHANE 1553 | SHANNA 1554 | SHANNON 1555 | SHARI 1556 | SHARLENE 1557 | SHARON 1558 | SHARRON 1559 | SHAUN 1560 | SHAUNA 1561 | SHAWN 1562 | SHAWNA 1563 | SHEENA 1564 | SHEILA 1565 | SHELBY 1566 | SHELDON 1567 | SHELIA 1568 | SHELLEY 1569 | SHELLY 1570 | SHELTON 1571 | SHEREE 1572 | SHERI 1573 | SHERMAN 1574 | SHERRI 1575 | SHERRIE 1576 | SHERRY 1577 | SHERYL 1578 | SHIRLEY 1579 | SIDNEY 1580 | SIERRA 1581 | SILAS 1582 | SILVIA 1583 | SIMON 1584 | SIMONE 1585 | SOCORRO 1586 | SOFIA 1587 | SOLOMON 1588 | SON 1589 | SONDRA 1590 | SONIA 1591 | SONJA 1592 | SONNY 1593 | SONYA 1594 | SOPHIA 1595 | SOPHIE 1596 | SPENCER 1597 | STACEY 1598 | STACI 1599 | STACIE 1600 | STACY 1601 | STAN 1602 | STANLEY 1603 | STEFAN 1604 | STEFANIE 1605 | STELLA 1606 | STEPHAN 1607 | STEPHANIE 1608 | STEPHEN 1609 | STERLING 1610 | STEVE 1611 | STEVEN 1612 | STEWART 1613 | STUART 1614 | SUE 1615 | SUMMER 1616 | SUSAN 1617 | SUSANA 1618 | SUSANNA 1619 | SUSANNE 1620 | SUSIE 1621 | SUZANNE 1622 | SUZETTE 1623 | SYBIL 1624 | SYDNEY 1625 | SYLVESTER 1626 | SYLVIA 1627 | TABATHA 1628 | TABITHA 1629 | TAMARA 1630 | TAMEKA 1631 | TAMERA 1632 | TAMI 1633 | TAMIKA 1634 | TAMMI 1635 | TAMMIE 1636 | TAMMY 1637 | TAMRA 1638 | TANIA 1639 | TANISHA 1640 | TANYA 1641 | TARA 1642 | TASHA 1643 | TAYLOR 1644 | TED 1645 | TEDDY 1646 | TERENCE 1647 | TERESA 1648 | TERI 1649 | TERRA 1650 | TERRANCE 1651 | TERRELL 1652 | TERRENCE 1653 | TERRI 1654 | TERRIE 1655 | TERRY 1656 | TESSA 1657 | THADDEUS 1658 | THELMA 1659 | THEODORE 1660 | THERESA 1661 | THERESE 1662 | THERON 1663 | THOMAS 1664 | THURMAN 1665 | TIA 1666 | TIFFANY 1667 | TIM 1668 | TIMMY 1669 | TIMOTHY 1670 | TINA 1671 | TISHA 1672 | TOBY 1673 | TODD 1674 | TOM 1675 | TOMAS 1676 | TOMMIE 1677 | TOMMY 1678 | TONI 1679 | TONIA 1680 | TONY 1681 | TONYA 1682 | TORI 1683 | TRACEY 1684 | TRACI 1685 | TRACIE 1686 | TRACY 1687 | TRAVIS 1688 | TRENT 1689 | TRENTON 1690 | TREVOR 1691 | TRICIA 1692 | TRINA 1693 | TRISHA 1694 | TRISTAN 1695 | TROY 1696 | TRUDY 1697 | TRUMAN 1698 | TWILA 1699 | TY 1700 | TYLER 1701 | TYRONE 1702 | TYSON 1703 | ULYSSES 1704 | URSULA 1705 | VALARIE 1706 | VALERIA 1707 | VALERIE 1708 | VAN 1709 | VANCE 1710 | VANESSA 1711 | VAUGHN 1712 | VELMA 1713 | VERA 1714 | VERN 1715 | VERNA 1716 | VERNON 1717 | VERONICA 1718 | VICENTE 1719 | VICKI 1720 | VICKIE 1721 | VICKY 1722 | VICTOR 1723 | VICTORIA 1724 | VILMA 1725 | VINCE 1726 | VINCENT 1727 | VIOLA 1728 | VIOLET 1729 | VIRGIE 1730 | VIRGIL 1731 | VIRGINIA 1732 | VITO 1733 | VIVIAN 1734 | VONDA 1735 | WADE 1736 | WALLACE 1737 | WALTER 1738 | WANDA 1739 | WARD 1740 | WARREN 1741 | WAYNE 1742 | WELDON 1743 | WENDELL 1744 | WENDI 1745 | WENDY 1746 | WESLEY 1747 | WHITNEY 1748 | WILBERT 1749 | WILBUR 1750 | WILDA 1751 | WILEY 1752 | WILFORD 1753 | WILFRED 1754 | WILFREDO 1755 | WILL 1756 | WILLA 1757 | WILLARD 1758 | WILLIAM 1759 | WILLIAMS 1760 | WILLIE 1761 | WILLIS 1762 | WILMA 1763 | WILMER 1764 | WILSON 1765 | WINFRED 1766 | WINIFRED 1767 | WINNIE 1768 | WINSTON 1769 | WM 1770 | WOODROW 1771 | XAVIER 1772 | YESENIA 1773 | YOLANDA 1774 | YOUNG 1775 | YVETTE 1776 | YVONNE 1777 | ZACHARY 1778 | ZACHERY 1779 | ZELDA 1780 | ZELMA 1781 | -------------------------------------------------------------------------------- /tests/integration/test_sample_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests using real sample data files. 3 | 4 | This module contains integration tests that use the actual sample data files 5 | from the sample_data directory to test DPAT functionality with real-world data. 6 | """ 7 | 8 | import unittest 9 | import tempfile 10 | from pathlib import Path 11 | from unittest.mock import patch, MagicMock 12 | import sqlite3 13 | import os 14 | 15 | # Import the classes we're testing 16 | import sys 17 | sys.path.insert(0, str(Path(__file__).parent.parent.parent)) 18 | 19 | from dpat import ( 20 | Config, NTDSProcessor, HashProcessor, DataSanitizer, 21 | HTMLReportBuilder, DatabaseManager, GroupManager, CrackedPasswordProcessor, 22 | parse_arguments, main 23 | ) 24 | from tests import ( 25 | TestConfig, TestDataGenerator, DatabaseTestHelper, DPATTestCase, 26 | SAMPLE_NTDS_DATA, SAMPLE_CRACKED_DATA, SAMPLE_GROUP_DATA 27 | ) 28 | 29 | 30 | class TestSampleDataIntegration(DPATTestCase): 31 | """Integration tests using real sample data files.""" 32 | 33 | def setUp(self): 34 | """Set up test environment with sample data paths.""" 35 | super().setUp() 36 | 37 | # Get the project root directory (DPAT directory) 38 | self.project_root = Path(__file__).parent.parent.parent 39 | self.sample_data_dir = self.project_root / "sample_data" 40 | 41 | # Verify sample data files exist 42 | self.ntds_file = self.sample_data_dir / "customer.ntds" 43 | self.cracked_file = self.sample_data_dir / "oclHashcat.pot" 44 | self.domain_admins_file = self.sample_data_dir / "Domain Admins.txt" 45 | self.enterprise_admins_file = self.sample_data_dir / "Enterprise Admins.txt" 46 | self.powerview_file = self.sample_data_dir / "Enterprise Admins PowerView Output.txt" 47 | 48 | # Verify files exist 49 | self.assertTrue(self.ntds_file.exists(), f"NTDS file not found: {self.ntds_file}") 50 | self.assertTrue(self.cracked_file.exists(), f"Cracked file not found: {self.cracked_file}") 51 | self.assertTrue(self.domain_admins_file.exists(), f"Domain Admins file not found: {self.domain_admins_file}") 52 | self.assertTrue(self.enterprise_admins_file.exists(), f"Enterprise Admins file not found: {self.enterprise_admins_file}") 53 | self.assertTrue(self.powerview_file.exists(), f"PowerView file not found: {self.powerview_file}") 54 | 55 | def test_sample_data_ntds_processing(self): 56 | """Test processing the real customer.ntds file.""" 57 | config = Config( 58 | ntds_file=str(self.ntds_file), 59 | cracked_file=str(self.cracked_file), 60 | min_password_length=8, 61 | report_directory=str(self.temp_dir) 62 | ) 63 | 64 | # Process data 65 | db_manager = DatabaseManager(config) 66 | ntds_processor = NTDSProcessor(config, db_manager) 67 | cracked_processor = CrackedPasswordProcessor(config, db_manager) 68 | 69 | db_manager.create_schema([]) 70 | ntds_processor.process_ntds_file() 71 | cracked_processor.process_cracked_file() 72 | 73 | # Verify results 74 | cursor = db_manager.cursor 75 | 76 | # Check that accounts were processed 77 | cursor.execute("SELECT COUNT(*) FROM hash_infos WHERE history_index = -1") 78 | account_count = cursor.fetchone()[0] 79 | self.assertGreater(account_count, 0, "Should have processed accounts from customer.ntds") 80 | 81 | # Check that some passwords were cracked 82 | cursor.execute("SELECT COUNT(*) FROM hash_infos WHERE password IS NOT NULL AND history_index = -1") 83 | cracked_count = cursor.fetchone()[0] 84 | self.assertGreater(cracked_count, 0, "Should have cracked passwords from oclHashcat.pot") 85 | 86 | # Check for specific known cracked passwords 87 | cursor.execute("SELECT username_full, password FROM hash_infos WHERE password = 'password' AND history_index = -1") 88 | password_results = cursor.fetchall() 89 | self.assertGreater(len(password_results), 0, "Should have found 'password' in cracked data") 90 | 91 | db_manager.close() 92 | 93 | def test_sample_data_with_groups(self): 94 | """Test processing with Domain Admins and Enterprise Admins group files.""" 95 | # Create a groups directory with the sample group files 96 | groups_dir = self.temp_dir / "groups" 97 | groups_dir.mkdir() 98 | 99 | # Copy group files to test directory 100 | import shutil 101 | shutil.copy2(self.domain_admins_file, groups_dir / "Domain Admins.txt") 102 | shutil.copy2(self.enterprise_admins_file, groups_dir / "Enterprise Admins.txt") 103 | 104 | config = Config( 105 | ntds_file=str(self.ntds_file), 106 | cracked_file=str(self.cracked_file), 107 | min_password_length=8, 108 | groups_directory=str(groups_dir), 109 | report_directory=str(self.temp_dir) 110 | ) 111 | 112 | # Process data 113 | db_manager = DatabaseManager(config) 114 | group_manager = GroupManager(config) 115 | ntds_processor = NTDSProcessor(config, db_manager) 116 | cracked_processor = CrackedPasswordProcessor(config, db_manager) 117 | 118 | # Load groups 119 | group_manager.load_groups() 120 | group_manager.load_group_members() 121 | 122 | # Create schema with group columns 123 | group_names = [group[0] for group in group_manager.groups] 124 | db_manager.create_schema(group_names) 125 | 126 | # Process data 127 | ntds_processor.process_ntds_file() 128 | ntds_processor.update_group_membership(group_manager) 129 | cracked_processor.process_cracked_file() 130 | 131 | # Verify group processing 132 | cursor = db_manager.cursor 133 | 134 | # Check that groups were loaded 135 | self.assertGreater(len(group_manager.groups), 0, "Should have loaded group files") 136 | self.assertIn("Domain Admins", [group[0] for group in group_manager.groups]) 137 | self.assertIn("Enterprise Admins", [group[0] for group in group_manager.groups]) 138 | 139 | # Check that group members were loaded 140 | self.assertIn("Domain Admins", group_manager.group_users) 141 | self.assertIn("Enterprise Admins", group_manager.group_users) 142 | 143 | # Check that group membership was updated in database 144 | cursor.execute('SELECT COUNT(*) FROM hash_infos WHERE "Domain Admins" = 1') 145 | domain_admins_count = cursor.fetchone()[0] 146 | self.assertGreater(domain_admins_count, 0, "Should have Domain Admins members") 147 | 148 | cursor.execute('SELECT COUNT(*) FROM hash_infos WHERE "Enterprise Admins" = 1') 149 | enterprise_admins_count = cursor.fetchone()[0] 150 | self.assertGreater(enterprise_admins_count, 0, "Should have Enterprise Admins members") 151 | 152 | # Verify specific admin accounts are in the database 153 | cursor.execute('SELECT username_full FROM hash_infos WHERE "Domain Admins" = 1 LIMIT 5') 154 | domain_admin_users = [row[0] for row in cursor.fetchall()] 155 | self.assertGreater(len(domain_admin_users), 0, "Should have Domain Admin users") 156 | 157 | # Check that admin users have -admin suffix 158 | admin_users_with_suffix = [user for user in domain_admin_users if user.endswith('-admin')] 159 | self.assertGreater(len(admin_users_with_suffix), 0, "Should have users with -admin suffix") 160 | 161 | db_manager.close() 162 | 163 | def test_sample_data_powerview_format(self): 164 | """Test processing PowerView formatted group file.""" 165 | # Create a groups directory with the PowerView file 166 | groups_dir = self.temp_dir / "groups" 167 | groups_dir.mkdir() 168 | 169 | # Copy PowerView file to test directory and convert to UTF-8 170 | import shutil 171 | powerview_dest = groups_dir / "Enterprise Admins PowerView.txt" 172 | 173 | # Read the PowerView file with UTF-16 encoding and write as UTF-8 174 | with open(self.powerview_file, 'r', encoding='utf-16') as f: 175 | content = f.read() 176 | 177 | # Remove leading empty lines to ensure first line is not empty 178 | lines = content.split('\n') 179 | while lines and not lines[0].strip(): 180 | lines.pop(0) 181 | content = '\n'.join(lines) 182 | 183 | with open(powerview_dest, 'w', encoding='utf-8') as f: 184 | f.write(content) 185 | 186 | config = Config( 187 | ntds_file=str(self.ntds_file), 188 | cracked_file=str(self.cracked_file), 189 | min_password_length=8, 190 | groups_directory=str(groups_dir), 191 | report_directory=str(self.temp_dir), 192 | kerberoast_encoding='utf-8' # Use UTF-8 for group files 193 | ) 194 | 195 | # Process data 196 | db_manager = DatabaseManager(config) 197 | group_manager = GroupManager(config) 198 | ntds_processor = NTDSProcessor(config, db_manager) 199 | cracked_processor = CrackedPasswordProcessor(config, db_manager) 200 | 201 | # Load groups 202 | group_manager.load_groups() 203 | group_manager.load_group_members() 204 | 205 | # Create schema with group columns 206 | group_names = [group[0] for group in group_manager.groups] 207 | db_manager.create_schema(group_names) 208 | 209 | # Process data 210 | ntds_processor.process_ntds_file() 211 | ntds_processor.update_group_membership(group_manager) 212 | cracked_processor.process_cracked_file() 213 | 214 | # Verify PowerView processing 215 | cursor = db_manager.cursor 216 | 217 | # Check that PowerView group was loaded 218 | powerview_groups = [group[0] for group in group_manager.groups if "PowerView" in group[0]] 219 | self.assertGreater(len(powerview_groups), 0, "Should have loaded PowerView group") 220 | 221 | # Check that PowerView group has members 222 | powerview_group_name = powerview_groups[0] 223 | self.assertIn(powerview_group_name, group_manager.group_users) 224 | 225 | # Check that PowerView group membership was updated in database 226 | cursor.execute(f'SELECT COUNT(*) FROM hash_infos WHERE "{powerview_group_name}" = 1') 227 | powerview_count = cursor.fetchone()[0] 228 | self.assertGreater(powerview_count, 0, "Should have PowerView group members") 229 | 230 | # Verify specific PowerView members are in the database 231 | cursor.execute(f'SELECT username_full FROM hash_infos WHERE "{powerview_group_name}" = 1 LIMIT 5') 232 | powerview_users = [row[0] for row in cursor.fetchall()] 233 | self.assertGreater(len(powerview_users), 0, "Should have PowerView users") 234 | 235 | db_manager.close() 236 | 237 | def test_sample_data_report_generation(self): 238 | """Test HTML report generation with sample data.""" 239 | config = Config( 240 | ntds_file=str(self.ntds_file), 241 | cracked_file=str(self.cracked_file), 242 | min_password_length=8, 243 | report_directory=str(self.temp_dir) 244 | ) 245 | 246 | # Process data 247 | db_manager = DatabaseManager(config) 248 | ntds_processor = NTDSProcessor(config, db_manager) 249 | cracked_processor = CrackedPasswordProcessor(config, db_manager) 250 | 251 | db_manager.create_schema([]) 252 | ntds_processor.process_ntds_file() 253 | cracked_processor.process_cracked_file() 254 | 255 | # Generate reports 256 | sanitizer = DataSanitizer() 257 | 258 | # Generate all hashes report 259 | cursor = db_manager.cursor 260 | cursor.execute(''' 261 | SELECT username_full, password, LENGTH(password) as plen, nt_hash, only_lm_cracked 262 | FROM hash_infos 263 | WHERE history_index = -1 264 | ORDER BY plen DESC, password 265 | ''') 266 | 267 | rows = cursor.fetchall() 268 | sanitized_rows = [sanitizer.sanitize_table_row(row, [1], [3], config.sanitize_output) 269 | for row in rows] 270 | 271 | report_builder = HTMLReportBuilder(config.report_directory) 272 | report_builder.add_table(sanitized_rows, 273 | ["Username", "Password", "Password Length", "NT Hash", "Only LM Cracked"]) 274 | report_builder.write_report("sample_data_report.html") 275 | 276 | # Verify report was created 277 | report_path = Path(config.report_directory) / "sample_data_report.html" 278 | self.assert_file_exists(report_path) 279 | self.assert_file_contains(report_path, "= 0") 581 | history_entries = cursor.fetchone()[0] 582 | self.assertGreater(history_entries, 0, "No password history entries found") 583 | 584 | # Generate reports 585 | sanitizer = DataSanitizer() 586 | 587 | # Generate all hashes report 588 | cursor.execute('''SELECT username_full, password, LENGTH(password) as plen, nt_hash, 589 | CASE WHEN lm_hash != "aad3b435b51404eeaad3b435b51404ee" THEN "Yes" ELSE "No" END as lm_cracked 590 | FROM hash_infos WHERE history_index = -1 ORDER BY username_full''') 591 | rows = cursor.fetchall() 592 | 593 | sanitized_rows = [sanitizer.sanitize_table_row(row, [1], [3], config.sanitize_output) 594 | for row in rows] 595 | 596 | report_builder = HTMLReportBuilder(config.report_directory) 597 | report_builder.add_table(sanitized_rows, 598 | ["Username", "Password", "Password Length", "NT Hash", "Only LM Cracked"]) 599 | report_filename = report_builder.write_report("history_test_all_hashes.html") 600 | 601 | # Verify report was created 602 | report_path = Path(config.report_directory) / report_filename 603 | self.assert_file_exists(report_path) 604 | self.assert_file_contains(report_path, "Username") 605 | 606 | # Generate password history report 607 | cursor.execute('''SELECT username_full, password, LENGTH(password) as plen, nt_hash, history_index 608 | FROM hash_infos WHERE history_index >= 0 ORDER BY username_full, history_index''') 609 | history_rows = cursor.fetchall() 610 | 611 | if history_rows: 612 | sanitized_history_rows = [sanitizer.sanitize_table_row(row, [1], [3], config.sanitize_output) 613 | for row in history_rows] 614 | 615 | history_builder = HTMLReportBuilder(config.report_directory) 616 | history_builder.add_table(sanitized_history_rows, 617 | ["Username", "Password", "Password Length", "NT Hash", "History Index"]) 618 | history_filename = history_builder.write_report("history_test_password_history.html") 619 | 620 | # Verify history report was created 621 | history_report_path = Path(config.report_directory) / history_filename 622 | self.assert_file_exists(history_report_path) 623 | self.assert_file_contains(history_report_path, "History Index") 624 | 625 | db_manager.close() 626 | 627 | # Log test results 628 | print(f"\nHistory Data Test Results:") 629 | print(f" Total accounts processed: {total_accounts}") 630 | print(f" Cracked accounts: {cracked_accounts}") 631 | print(f" Password history entries: {history_entries}") 632 | if total_accounts > 0: 633 | print(f" Crack rate: {(cracked_accounts/total_accounts)*100:.1f}%") 634 | else: 635 | print(f" Crack rate: N/A (no accounts processed)") 636 | 637 | 638 | if __name__ == '__main__': 639 | unittest.main() 640 | --------------------------------------------------------------------------------