├── .dockerignore ├── .gitignore ├── README.md ├── legacy_whodat ├── README.md ├── email.txt ├── index.php ├── screenshots │ ├── ss1.png │ ├── ss2.png │ ├── ss3.png │ └── ss4.png └── update.py └── pydat ├── .dockerignore ├── .gitignore ├── Dockerfile ├── README.md ├── backend ├── .gitignore ├── MANIFEST.in ├── config_example.py ├── es_populate_config.yml.example ├── pydat │ ├── api │ │ ├── __init__.py │ │ ├── controller │ │ │ ├── __init__.py │ │ │ ├── exceptions.py │ │ │ ├── v1 │ │ │ │ ├── __init__.py │ │ │ │ └── whois.py │ │ │ ├── v2 │ │ │ │ ├── __init__.py │ │ │ │ ├── settings.py │ │ │ │ └── whois.py │ │ │ └── whois_shared.py │ │ └── static │ │ │ └── index.html │ ├── core │ │ ├── __init__.py │ │ ├── config_parser.py │ │ ├── elastic │ │ │ ├── __init__.py │ │ │ ├── exceptions.py │ │ │ ├── ingest │ │ │ │ ├── __init__.py │ │ │ │ ├── data_processors.py │ │ │ │ ├── debug_levels.py │ │ │ │ ├── event_tracker.py │ │ │ │ ├── file_reader.py │ │ │ │ ├── ingest_handler.py │ │ │ │ ├── process_wrapper.py │ │ │ │ └── stat_tracker.py │ │ │ ├── search │ │ │ │ ├── __init__.py │ │ │ │ ├── flask_handler.py │ │ │ │ └── search_handler.py │ │ │ └── templates │ │ │ │ └── es7.data.template │ │ ├── logger.py │ │ ├── parsetab.py │ │ ├── plugins.py │ │ └── query_parser.py │ ├── plugins │ │ ├── .gitkeep │ │ └── dnsdb │ │ │ └── __init__.py │ └── scripts │ │ ├── api.py │ │ └── elasticsearch_populate.py ├── setup.cfg ├── setup.py ├── tests │ ├── conftest.py │ ├── test_config.py │ ├── test_elastic_handler.py │ ├── test_elastic_ingest_data_processors.py │ ├── test_elastic_ingest_event_tracker.py │ ├── test_elastic_ingest_file_reader.py │ ├── test_elastic_ingest_handler.py │ ├── test_elastic_ingest_stat_tracker.py │ ├── test_elastic_populator.py │ ├── test_factory.py │ ├── test_passive_plugin.py │ ├── test_plugin.py │ ├── test_settings.py │ ├── test_whois_v1.py │ └── test_whois_v2.py └── tox.ini ├── entry.sh └── frontend ├── .gitignore ├── README.md ├── package-lock.json ├── package.json ├── public ├── favicon.ico ├── index.html ├── logo192.png ├── logo512.png ├── manifest.json └── robots.txt └── src ├── active_resolution.jsx ├── components ├── helpers │ ├── data_exporters.jsx │ ├── dropdown_cell.jsx │ ├── fetchers.jsx │ ├── loaders.jsx │ ├── preferences.jsx │ └── search_tools.jsx ├── layout │ ├── dashboard.jsx │ ├── dialogs.jsx │ ├── index.jsx │ ├── notfound.jsx │ └── themes.jsx ├── plugins │ └── index.jsx └── whois │ ├── expandable.jsx │ ├── help.jsx │ ├── index.jsx │ ├── stats.jsx │ ├── status.jsx │ ├── table_cells.jsx │ ├── table_pagination.jsx │ └── whois_table.jsx ├── index.css ├── index.js ├── plugins ├── dnsdb │ ├── dnsdb.jsx │ ├── index.jsx │ ├── table_cells.jsx │ ├── table_pagination.jsx │ └── web_handler.jsx └── index.jsx ├── pydat.jsx ├── serviceWorker.js ├── settings.js └── setupTests.js /.dockerignore: -------------------------------------------------------------------------------- 1 | .git/ 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | pydat/pydat/custom_settings.py 2 | *.pyc 3 | *.swp 4 | *.vscode 5 | 6 | .DS_Store 7 | .env.local 8 | .env.development.local 9 | .env.test.local 10 | .env.production.local 11 | -------------------------------------------------------------------------------- /legacy_whodat/README.md: -------------------------------------------------------------------------------- 1 | WHODAT (originally by Chris Clark) is a front end for whoisxmlapi data (or any whois data living in mongo DB inserted by that csv format) it integrated Whois data, current IP resolutions, and PDNS. In addition to providing an interactive, pivotable web-frontend for analysts to perform research, it also has an api which will allow output in JSON, CSV, or a list of suspicious domains. Finally it will pull updates daily, check them against a list of known malicious registrants and email an alert to a specified email containing the registrant, domain, and current IP. 2 | 3 | The hardware required to power this with 140,000,000 domains is non trivial even with only 4 indexed fields it takes 400GB of DB space for all of the primary TLDs. 4 | 5 | Notification emails appear as such (optionally PGP encrypted): 6 | 7 | ``` 8 | WhoDat Known Bad Registrant Alert 9 | ------------2013-08-03----------- 10 | 11 | Registrant Email Current IP Domain Name 12 | blahblah@gmail.com 27.98.198.192 777eddlhdb.com 13 | blahblah@gmail.com 127.0.0.1 txbdddbw.net 14 | ``` 15 | 16 | Installation steps: 17 | 18 | - Install 19 | -- MongoDB 20 | -- PHP 21 | -- Mongo PHP Drives 22 | -- pymongo 23 | 24 | - Download latest trimmed (smallest possible) whoisxmlapi quarterly DB dump 25 | 26 | - Extract the csv files (will be about 100gig) and do something like this 27 | ``` 28 | for file in */*.csv; do echo $file && mongoimport --db whois --collection whois --file $file --type csv --headerline --upsert --upsertFields domainName; done 29 | ``` 30 | 31 | - Fill in your ISC DNSDB Key in keys.php 32 | - Fill in your PassiveTotal Key in keys.php 33 | 34 | - Index on domainName, registrant_name, and contactEmail 35 | ``` 36 | db.whois.ensureIndex( {domainName: 1}) 37 | db.whois.ensureIndex( {contactEmail: 1}) 38 | db.whois.ensureIndex( {registrant_name: 1}) 39 | db.whois.ensureIndex( {registrant_telephone: 1}) 40 | ``` 41 | 42 | - Fill in relevant environmental and alerting data in the update.py script as well as your user/pass to download daily updates 43 | 44 | - Enter known bad registrants you wish to track in a file and specify its location in update.py variable registrantpath 45 | 46 | - Create a cronjob to run the update script at 0430 or so EST 30 "4 * * * /usr/bin/python /YOURUPDATEWORKINGDIR/update.py >/dev/null 2>&1" 47 | 48 | - Place index.php in your webroot of choice 49 | 50 | Usage from the API .. again not a complete guide ;) 51 | ``` 52 | You can query by any indexed fields, returning either a domain list, CSV, or JSON (these values are apparent from using the webfront just play with it and you can eaisly construct the query, they are all GET values) 53 | 54 | Hidden values are "&nodl=yes" == print CSV to standard out vs download and "&limit=XXXXX" == defines the number of results to return, default is set in the php page at 2000. 55 | ``` 56 | Screenshots: 57 | 58 | ![Image1](https://raw.github.com/MITRECND/WhoDat/master/legacy_whodat/screenshots/ss1.png) 59 | ![Image2](https://raw.github.com/MITRECND/WhoDat/master/legacy_whodat/screenshots/ss2.png) 60 | ![Image3](https://raw.github.com/MITRECND/WhoDat/master/legacy_whodat/screenshots/ss3.png) 61 | ![Image4](https://raw.github.com/MITRECND/WhoDat/master/legacy_whodat/screenshots/ss4.png) 62 | 63 | License: 64 | 65 | WhoDat is copyrighted by Chris Clark 2013. Contact him at Chris@xenosys.org. 66 | 67 | WhoDat is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. 68 | 69 | WhoDat is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 70 | 71 | You should have received a copy of the GNU General Public License along with WhoDat. If not, see http://www.gnu.org/licenses/. 72 | -------------------------------------------------------------------------------- /legacy_whodat/email.txt: -------------------------------------------------------------------------------- 1 | test@test.com 2 | anotherbadguy@test.com -------------------------------------------------------------------------------- /legacy_whodat/screenshots/ss1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/legacy_whodat/screenshots/ss1.png -------------------------------------------------------------------------------- /legacy_whodat/screenshots/ss2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/legacy_whodat/screenshots/ss2.png -------------------------------------------------------------------------------- /legacy_whodat/screenshots/ss3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/legacy_whodat/screenshots/ss3.png -------------------------------------------------------------------------------- /legacy_whodat/screenshots/ss4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/legacy_whodat/screenshots/ss4.png -------------------------------------------------------------------------------- /legacy_whodat/update.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | #Daily Update Script for Whodat 3 | #Downloads, unzips, cuts, and upserts daily CSV's and alerts on known bad registrars (or any other data you want to check for inside the new registrations) 4 | #By Chris@xenosec.org 5 | 6 | import sys, urllib, urllib2, os, socket 7 | from datetime import datetime, timedelta 8 | from pymongo import MongoClient 9 | 10 | 11 | tlds = ["com","org","net","mobi","us","coop","pro","info","biz"] 12 | updatepath = "/path/to/useforupdateprocessing/" 13 | registrantpath = "/path/to/badregistrants.txt" 14 | login = 'whosxmlapiusername' 15 | password = 'whoisxmlapipassword' 16 | notifyemail = 'example@dot.com' 17 | sendingemail = 'alerts@whodatisawesome.fake' 18 | pgp = False 19 | 20 | def updatesearch(date): 21 | client = MongoClient() 22 | whoisColl = client['test']['whois'] 23 | emails = [line.strip() for line in open(registrantpath, 'r')] 24 | outfile = open( updatepath + "newdomains.txt", 'w') 25 | outfile.write(" WhoDat Known Bad Registrant Alert\n") 26 | outfile.write(" ------------"+date+"-----------\n\n") 27 | outfile.write("Registrant Email Current IP Domain Name \n") 28 | count = 0 29 | for email in emails: 30 | for domain in whoisColl.find({u"contactEmail":email, u"standardRegCreatedDate":{'$regex': date+".*"}}): 31 | try: 32 | data = socket.gethostbyname(domain[u'domainName']) 33 | ip = str(data) 34 | except Exception: 35 | ip = "No DNS Record" 36 | outfile.write(domain[u'contactEmail'] + " "+ ip + " " + domain[u'domainName'] + "\n") 37 | count += 1 38 | outfile.close() 39 | if count > 0: 40 | if pgp == False: 41 | os.system('mail -s "WhoDat Registration Alert ( '+ date+' )" '+ notifyemail +' -- -f '+ sendingemail+ ' < ' + updatepath + 'newdomains.txt') 42 | else: 43 | os.system('gpg --trust-model always -ea -r '+ notifyemail +' -o - ' + updatepath + 'newdomains.txt | mail -s "WhoDat Registration Alert ( '+ date+' )" '+ notifyemail +' -- -f '+ sendingemail) 44 | os.remove('' + updatepath + 'newdomains.txt') 45 | 46 | def downloads(date): 47 | for tld in tlds: 48 | try: 49 | passman = urllib2.HTTPPasswordMgrWithDefaultRealm() 50 | passman.add_password(None, "http://bestwhois.org", username, password) 51 | authhandler = urllib2.HTTPBasicAuthHandler(passman) 52 | opener = urllib2.build_opener(authhandler) 53 | url = "http://bestwhois.org/domain_name_data/domain_names_whois/" + date + tld +".csv.gz" 54 | print url 55 | result = opener.open(urllib2.Request(url)) 56 | downloadfile = result.read() 57 | if len(downloadfile) > 0: 58 | fo = open(updatepath + date + tld + ".csv.gz", "w") 59 | fo.write(downloadfile) 60 | fo.close() 61 | except Exception: 62 | continue 63 | if not os.path.isfile(updatepath + date + "com" + ".csv.gz"): 64 | os.system('echo "Error downloading updates for WhoDat!" | mail -s "[!] WhoDat Update Error " '+ notifyemail +' -- -f ' + sendingemail ) 65 | 66 | 67 | def unzip(date): 68 | for tld in tlds: 69 | os.system('gunzip' + updatepath +date+tld+'.csv.gz') 70 | 71 | def cropfile(date): 72 | for tld in tlds: 73 | os.system('cut -d "," -f 1-43 '+ updatepath +date + tld + '.csv > ' + updatepath + date + tld +'.done.csv') 74 | 75 | def insertfile(date): 76 | for tld in tlds: 77 | os.system('mongoimport --collection whois --file ' + updatepath+ date + tld +'.done.csv --type csv --headerline --upsert --upsertFields domainName') 78 | 79 | def deletefiles(date): 80 | for file in os.listdir(updatepath): 81 | if file.startswith(date): 82 | os.remove(updatepath+ file) 83 | def main(): 84 | yesterday = datetime.now()-timedelta(days=1) 85 | date = yesterday.strftime("%Y_%m_%d_") 86 | dbyesterday = datetime.now()-timedelta(days=2) 87 | searchdate = dbyesterday.strftime("%Y-%m-%d") 88 | #use if the automagic missed a day 89 | #date = "2013_07_08_" 90 | #searchdate = "2013-07-07" 91 | downloads(date) 92 | unzip(date) 93 | cropfile(date) 94 | insertfile(date) 95 | deletefiles(date) 96 | updatesearch(searchdate) 97 | if __name__ == '__main__': 98 | main() -------------------------------------------------------------------------------- /pydat/.dockerignore: -------------------------------------------------------------------------------- 1 | frontend/node_modules 2 | frontend/build 3 | backend/.tox 4 | -------------------------------------------------------------------------------- /pydat/.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | 3 | *.pyc 4 | __pycache__ 5 | .tox 6 | 7 | instance/ 8 | 9 | .pytest_cache/ 10 | .coverage 11 | htmlcov/ 12 | 13 | build/ 14 | *.egg-info 15 | .flaskenv 16 | .vscode 17 | 18 | # dependencies 19 | node_modules/ 20 | /.pnp 21 | .pnp.js 22 | 23 | # testing 24 | coverage/ 25 | 26 | # production 27 | /build 28 | 29 | # misc 30 | .DS_Store 31 | .env.local 32 | .env.development.local 33 | .env.test.local 34 | .env.production.local 35 | 36 | npm-debug.log* 37 | yarn-debug.log* 38 | yarn-error.log* 39 | -------------------------------------------------------------------------------- /pydat/Dockerfile: -------------------------------------------------------------------------------- 1 | # Stage 1 - Build Frontend 2 | FROM node:lts AS FRONTEND 3 | WORKDIR /opt/pydat 4 | COPY frontend /opt/pydat/frontend 5 | ENV GENERATE_SOURCEMAP=false 6 | RUN \ 7 | cd frontend && \ 8 | npm install && \ 9 | npm run build:isolated 10 | 11 | 12 | # Stage 2 - Python Backend plus compiled frontend assets 13 | FROM python:3.8-alpine 14 | COPY backend /tmp/pydat/backend 15 | COPY entry.sh / 16 | 17 | RUN \ 18 | mkdir /opt/pydat && \ 19 | cd /opt/pydat && \ 20 | python3 -m venv pydat-env && \ 21 | /opt/pydat/pydat-env/bin/pip install gunicorn && \ 22 | cd /tmp/pydat && \ 23 | /opt/pydat/pydat-env/bin/pip install ./backend && \ 24 | touch /opt/pydat/config.py 25 | 26 | COPY --from=FRONTEND /opt/pydat/frontend/build /opt/pydat/ui 27 | 28 | WORKDIR /opt/pydat 29 | 30 | CMD /entry.sh -------------------------------------------------------------------------------- /pydat/backend/.gitignore: -------------------------------------------------------------------------------- 1 | ## https://github.com/github/gitignore/blob/master/Python.gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | cover/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | .pybuilder/ 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | # For a library or package, you might want to ignore these files since the code is 89 | # intended to run in multiple environments; otherwise, check them in: 90 | .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 100 | __pypackages__/ 101 | 102 | # Celery stuff 103 | celerybeat-schedule 104 | celerybeat.pid 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | 125 | # mkdocs documentation 126 | /site 127 | 128 | # mypy 129 | .mypy_cache/ 130 | .dmypy.json 131 | dmypy.json 132 | 133 | # Pyre type checker 134 | .pyre/ 135 | 136 | # pytype static type analyzer 137 | .pytype/ 138 | 139 | # Cython debug symbols 140 | cython_debug/ -------------------------------------------------------------------------------- /pydat/backend/MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-exclude *.py[cod] 2 | graft pydat/plugins 3 | graft pydat/api/static 4 | graft pydat/api/templates 5 | graft pydat/core/elastic/templates 6 | -------------------------------------------------------------------------------- /pydat/backend/config_example.py: -------------------------------------------------------------------------------- 1 | DEBUG = True 2 | SSLVERIFY = True 3 | ELASTICSEARCH = { 4 | 'uri': 'localhost:9200', 5 | 'indexPrefix': 'pydat', 6 | 'user': None, 7 | 'pass': None, 8 | 'cacert': None, 9 | } 10 | 11 | PDNSSOURCES = { 12 | "dnsdb": { 13 | "APIKEY": '1234' 14 | } 15 | } 16 | 17 | PROXIES = { 18 | 'http': 'http://127.0.0.1', 19 | 'https': 'https://127.0.0.1', 20 | } 21 | 22 | SEARCHKEYS = [ 23 | ('domainName', 'Domain'), 24 | ('registrant_name', 'Registrant Name'), 25 | ('contactEmail', 'Contact Email'), 26 | ('registrant_telephone', 'Telephone') 27 | ] 28 | -------------------------------------------------------------------------------- /pydat/backend/es_populate_config.yml.example: -------------------------------------------------------------------------------- 1 | # Elastic Configuration Options 2 | es: 3 | uri: 4 | - localhost:9200 5 | user: test 6 | password: test_pass 7 | index_prefix: test_pydat 8 | disable_sniffing: true 9 | rollover_docs: 500000 10 | # ca_cert: 11 | 12 | # General ingest and processing options 13 | # extension: 'csv' 14 | # include: [] 15 | # exclude: [] 16 | ignore_field_prefixes: 17 | - zoneContact 18 | - billingContact 19 | - technicalContact 20 | 21 | # Performance Tuning Options 22 | pipelines: 4 23 | shipper_threads: 2 24 | fetcher_threads: 2 25 | bulk_fetch_size: 50 26 | bulk_ship_size: 10 27 | 28 | # Output Options 29 | # verbose: false 30 | # debug: false 31 | # debug_level: 1 32 | # stats: false 33 | -------------------------------------------------------------------------------- /pydat/backend/pydat/api/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import logging 4 | from flask import Flask, send_from_directory 5 | from flask_caching import Cache 6 | from pydat.core.config_parser import ConfigParser, DEFAULT_CONFIG 7 | from pydat.core.elastic.search.flask_handler import FlaskElasticHandler 8 | 9 | 10 | CACHE_TIMEOUT = 300 # Flask cache timeout 11 | 12 | 13 | elasticsearch_handler = FlaskElasticHandler() 14 | flask_cache = Cache() 15 | 16 | 17 | def create_app(config=None): 18 | # Application Factory 19 | app = Flask(__name__) 20 | app.config.from_mapping(SECRET_KEY=os.urandom(16),) 21 | 22 | app.config.from_object(DEFAULT_CONFIG) 23 | 24 | if config is not None: 25 | app.config.from_mapping(config) 26 | 27 | config_parser = ConfigParser(app) 28 | config_parser.parse() 29 | 30 | # Setup cache configuration 31 | app.config["CACHE_TYPE"] = "SimpleCache" 32 | app.config["CACHE_DEFAULT_TIMEOUT"] = CACHE_TIMEOUT 33 | 34 | flask_cache.init_app(app) 35 | 36 | if app.config['DEBUG']: 37 | app.logger.setLevel(logging.DEBUG) 38 | else: 39 | app.logger.setLevel(logging.INFO) 40 | 41 | static_folder = app.config.get('STATICFOLDER', '') 42 | if static_folder != '': 43 | app.static_folder = static_folder 44 | 45 | # Initialize Plugins 46 | elasticsearch_handler.init_app(app) 47 | 48 | # Register Error Handler 49 | from pydat.api.controller import exceptions 50 | exceptions.register_errors(app) 51 | 52 | # Register Framework Blueprints 53 | from pydat.api.controller.v1.whois import whoisv1_bp 54 | from pydat.api.controller.v2.whois import whoisv2_bp 55 | from pydat.api.controller.v2.settings import settings_bp 56 | app.register_blueprint(settings_bp, url_prefix="/api/v2") 57 | app.register_blueprint(whoisv2_bp, url_prefix="/api/v2") 58 | 59 | # version 1 backwards compatibility 60 | app.register_blueprint(whoisv1_bp, url_prefix="/api/v1") 61 | 62 | from pydat.core.plugins import PluginManager 63 | plugin_manager = PluginManager() 64 | 65 | # Register Plugin Blueprints and JSfiles 66 | # add error handling 67 | installed_plugins = [] 68 | with app.app_context(): 69 | try: 70 | plugin_manager.gather_plugins() 71 | except ValueError as e: 72 | print(f"Unable to instantiate plugins: {str(e)}") 73 | sys.exit(1) 74 | 75 | for plugin in plugin_manager.plugins: 76 | installed_plugins.append(plugin.name) 77 | url_prefix = os.path.join(plugin.prefix, plugin.name) 78 | app.register_blueprint(plugin.blueprint, url_prefix=url_prefix) 79 | 80 | app.config['PYDAT_PLUGINS'] = installed_plugins 81 | 82 | # Remove default 'static' endpoint and mapping 83 | # which interferes with routing frontend components 84 | for rule in app.url_map.iter_rules('static'): 85 | app.url_map._rules.remove(rule) 86 | 87 | # Catch invalid backend calls 88 | @app.route("/api", defaults={"path": ""}) 89 | @app.route("/api/") 90 | def invalid(path): 91 | raise exceptions.ClientError("Nonexistant view {}".format(path), 404) 92 | 93 | # Serve React App 94 | 95 | @app.route('/', defaults={'path': ''}) 96 | @app.route('/') 97 | def serve(path): 98 | if path != "" and os.path.exists(app.static_folder + '/' + path): 99 | return send_from_directory(app.static_folder, path) 100 | else: 101 | return send_from_directory(app.static_folder, 'index.html') 102 | 103 | return app 104 | -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/api/controller/__init__.py -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/exceptions.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, current_app 2 | 3 | 4 | class BaseError(Exception): 5 | """Exception class for signaling response errors 6 | 7 | Attributes: 8 | message: Optional customizable error message 9 | status_code: Error code 10 | payload: Optional dict payload 11 | """ 12 | 13 | def __init__(self, message, status_code=None, payload=None, nolog=False): 14 | Exception.__init__(self) 15 | self.message = message 16 | if status_code is not None: 17 | self.status_code = status_code 18 | self.payload = payload 19 | self.nolog = nolog 20 | 21 | def to_dict(self): 22 | rv = dict(self.payload or ()) 23 | rv["status"] = self.status_code 24 | rv["error"] = self.message 25 | return rv 26 | 27 | 28 | class ClientError(BaseError): 29 | """Exception class for signaling client errors. Child of BaseError 30 | 31 | Attributes: 32 | status_code: Default error code 400. 33 | """ 34 | status_code = 400 35 | 36 | 37 | class ServerError(BaseError): 38 | """Exception class for signaling server errors. Child of BaseError 39 | 40 | Attributes: 41 | status_code: Default error code 500. 42 | """ 43 | status_code = 500 44 | 45 | 46 | def handle_error(error): 47 | if isinstance(error, ServerError) and not error.nolog: 48 | current_app.logger.exception(error.message) 49 | response = jsonify(error.to_dict()) 50 | response.status_code = error.status_code 51 | return response 52 | 53 | 54 | def register_errors(app): 55 | app.register_error_handler(BaseError, handle_error) 56 | -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/api/controller/v1/__init__.py -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/v2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/api/controller/v2/__init__.py -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/v2/settings.py: -------------------------------------------------------------------------------- 1 | from flask import ( 2 | current_app, 3 | Blueprint, 4 | ) 5 | 6 | 7 | settings_bp = Blueprint("settings", __name__) 8 | 9 | 10 | @settings_bp.route("/settings", methods=["GET"]) 11 | def get_settings(): 12 | 13 | settings = { 14 | 'enable_active_resolution': not current_app.config.get( 15 | "DISABLERESOLVE", False) 16 | } 17 | 18 | for plugin in current_app.config.get('PYDAT_PLUGINS', []): 19 | settings[f"enable_plugin_{plugin}"] = True 20 | 21 | return settings 22 | -------------------------------------------------------------------------------- /pydat/backend/pydat/api/controller/whois_shared.py: -------------------------------------------------------------------------------- 1 | from pydat.api.controller.exceptions import ClientError, ServerError 2 | from pydat.api import elasticsearch_handler as es_handler 3 | from pydat.core.elastic.exceptions import ( 4 | ESConnectionError, 5 | ESQueryError 6 | ) 7 | 8 | 9 | def metadata(version=None): 10 | """Shared metadata functionality between v1 and v2 11 | 12 | Args: 13 | version (int, optional): Specific metadata version. Defaults to None. 14 | 15 | Raises: 16 | ClientError: Version is not a valid integer 17 | ClientError: Call caused an invalid query 18 | ServerError: Search failed to connect 19 | ServerError: Unexpected failure 20 | ClientError: Specific version or metadata does not exist 21 | 22 | Returns: 23 | dict: Found metadata for all or specific version 24 | """ 25 | try: 26 | if version: 27 | version = int(version) 28 | if version < 0: 29 | raise ValueError 30 | except ValueError: 31 | raise ClientError(f"Version {version} must be a valid int") 32 | 33 | try: 34 | results = es_handler.metadata(version) 35 | except ESConnectionError: 36 | raise ServerError("Unable to connect to search engine") 37 | except ESQueryError: 38 | raise ServerError("Unexpected issue when requesting search") 39 | 40 | if len(results) == 0: 41 | raise ClientError("Cound not find metadata", 404) 42 | 43 | return results 44 | 45 | 46 | def diff(domainName, v1, v2): 47 | """Shared diff functionality between whoisv1 and whoisv2 48 | 49 | Args: 50 | domainName (str): Name of the domain to diff versions between 51 | v1 (int): First version of the domainName 52 | v2 (int): Second version to compare the first to 53 | 54 | Raises: 55 | ClientError: Versions are not ints 56 | ServerError: Search failed to connect 57 | ClientError: Parameters created an invalid query 58 | ServerError: Unexpected exception 59 | ClientError: v1 and/or v2 does not exist 60 | 61 | Returns: 62 | dict: Contains data fields of v1 and v2 with the value specifying if 63 | the data is the same, different, or nonexistant between versions 64 | """ 65 | try: 66 | v1 = int(v1) 67 | v2 = int(v2) 68 | except ValueError: 69 | raise ClientError("Input paramaters are of the wrong type") 70 | 71 | try: 72 | v1_result = es_handler.search( 73 | "domainName", domainName, filt=None, low=v1) 74 | v2_result = es_handler.search( 75 | "domainName", domainName, filt=None, low=v2) 76 | except ValueError: 77 | raise ClientError(f"Invalid search of {domainName} and {v1} or {v2}") 78 | except ESConnectionError: 79 | raise ServerError("Unable to connect to search engine") 80 | except ESQueryError: 81 | raise ServerError("Unexpected issue when requesting search") 82 | except RuntimeError: 83 | raise ServerError("Failed to process results") 84 | 85 | if not v1_result["data"] or not v2_result["data"]: 86 | raise ClientError( 87 | f"Provided domain {domainName} and version has no data", 404 88 | ) 89 | v1_result = v1_result["data"][0] 90 | v2_result = v2_result["data"][0] 91 | 92 | blacklist = {"Version", "UpdateVersion", "domainName", "dataFirstSeen"} 93 | v1_keys = set(v1_result.keys()) 94 | v2_keys = set(v2_result.keys()) 95 | keys = (v1_keys | v2_keys) - blacklist 96 | results = {} 97 | 98 | for key in keys: 99 | if key in v1_keys and key in v2_keys: 100 | if v1_result[key] == v2_result[key]: 101 | results[key] = v1_result[key] 102 | else: 103 | results[key] = [v1_result[key], v2_result[key]] 104 | elif key in v1_keys: 105 | results[key] = [v1_result[key], ""] 106 | else: 107 | results[key] = ["", v2_result[key]] 108 | 109 | return results 110 | -------------------------------------------------------------------------------- /pydat/backend/pydat/api/static/index.html: -------------------------------------------------------------------------------- 1 | This is a Placeholder file as the PyDat backend does not have any web-presentable assets. Please ensure the fronend component has been built if you intend to use PyDat from a browser 2 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/core/__init__.py -------------------------------------------------------------------------------- /pydat/backend/pydat/core/config_parser.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cerberus 3 | 4 | 5 | DEFAULT_CONFIG = type('config', (), { 6 | 'STATICFOLDER': '', 7 | 'DISABLERESOLVE': False, 8 | 'ELASTICSEARCH': { 9 | 'uri': 'localhost:9200', 10 | 'indexPrefix': 'pydat', 11 | 'disable_sniffing': False, 12 | }, 13 | 'DEBUG': False, 14 | 'SSLVERIFY': True, 15 | 'SEARCHKEYS': [ 16 | 'domainName', 17 | 'registrant_name', 18 | 'contactEmail', 19 | 'registrant_telephone', 20 | ], 21 | 'PROXIES': { 22 | }, 23 | 'PDNSSOURCES': { 24 | }, 25 | 'PLUGINS': { 26 | } 27 | }) 28 | 29 | BASE_SCHEMA = { 30 | 'STATICFOLDER': {'type': 'string'}, 31 | 'DISABLERESOLVE': {'type': 'boolean'}, 32 | 'DEBUG': {'type': 'boolean'}, 33 | 'SSLVERIFY': {'type': 'boolean'}, 34 | 'PROXIES': { 35 | 'type': 'dict', 36 | 'allow_unknown': False, 37 | 'schema': { 38 | 'http': { 39 | 'type': 'string', 40 | 'nullable': True 41 | }, 42 | 'https': { 43 | 'type': 'string', 44 | 'nullable': True 45 | } 46 | } 47 | }, 48 | 'ELASTICSEARCH': { 49 | 'type': 'dict', 50 | 'schema': { 51 | 'uri': {'type': 'string'}, 52 | 'indexPrefix': {'type': 'string'}, 53 | 'user': { 54 | 'type': 'string', 55 | 'nullable': True, 56 | }, 57 | 'pass': { 58 | 'type': 'string', 59 | 'nullable': True 60 | }, 61 | 'cacert': { 62 | 'type': 'string', 63 | 'nullable': True 64 | }, 65 | 'disable_sniffing': { 66 | 'type': 'boolean' 67 | } 68 | } 69 | }, 70 | 'SEARCHKEYS': { 71 | 'type': 'list', 72 | 'schema': { 73 | 'type': 'string', 74 | } 75 | }, 76 | 'PDNSSOURCES': { 77 | 'type': 'dict', 78 | 'allow_unknown': True 79 | }, 80 | 'PLUGINS': { 81 | 'type': 'dict', 82 | 'allow_unknown': True 83 | } 84 | 85 | } 86 | 87 | 88 | class ConfigParser: 89 | ENV_CONFIG_FILE = "PYDATCONFIG" 90 | 91 | def __init__(self, app): 92 | self.app = app 93 | self._config_ = dict() 94 | self.schema = BASE_SCHEMA 95 | 96 | if self.ENV_CONFIG_FILE in os.environ.keys(): 97 | self.app.config.from_envvar(self.ENV_CONFIG_FILE) 98 | 99 | self.fromEnv() 100 | 101 | def fromEnv(self): 102 | for (key, value) in os.environ.items(): 103 | if key.startswith('PYDAT_'): 104 | hierarchy = key.split('_') 105 | if len(hierarchy) <= 1 or hierarchy[-1] == '': 106 | raise ValueError(f"Incomplete env variable {key}") 107 | fields = hierarchy[1:] 108 | tlkey = fields[0] 109 | if tlkey in ['SSLVERIFY', 'DEBUG', 'DISABLERESOLVE']: 110 | if value.lower() == 'true': 111 | value = True 112 | elif value.lower() == 'false': 113 | value = False 114 | else: 115 | raise ValueError(f"Unexpected value for {tlkey}") 116 | 117 | self.app.config.from_mapping(**{tlkey: value}) 118 | elif tlkey == 'SEARCHKEYS': 119 | elements = value.split(',') 120 | self.app.config.from_mapping({tlkey: elements}) 121 | else: 122 | if len(fields[1:]) == 0: 123 | self.app.config.from_mapping({tlkey: value}) 124 | else: 125 | self.updateDictField(key, tlkey, fields[1:], value) 126 | 127 | def updateDictField(self, orig_key, tlkey, fields, value): 128 | if len(fields) == 1: 129 | if tlkey not in self.app.config.keys(): 130 | self.app.config.from_mapping({tlkey: {fields[0]: value}}) 131 | else: 132 | self.app.config[tlkey].update({fields[0]: value}) 133 | else: 134 | local_key = fields[0] 135 | local_fields = fields[1:] 136 | if tlkey not in self.app.config.keys(): 137 | self.app.config[tlkey] = dict() 138 | local_config = self.app.config[tlkey] 139 | while True: 140 | if len(local_fields) == 0: 141 | break 142 | 143 | if local_key not in local_config.keys(): 144 | local_config[local_key] = dict() 145 | 146 | local_config = local_config[local_key] 147 | local_key = local_fields.pop(0) 148 | 149 | local_config[local_key] = value 150 | 151 | def parse(self): 152 | tmp_config = dict() 153 | 154 | for (name, value) in self.app.config.items(): 155 | if name in self.schema.keys(): 156 | tmp_config[name] = value 157 | 158 | v = cerberus.Validator(self.schema) 159 | valid = v.validate(tmp_config) 160 | if not valid: 161 | raise ValueError(v._errors) 162 | 163 | nconfig = v.normalized(tmp_config) 164 | 165 | for key in ['PDNSSOURCES', 'PLUGINS']: 166 | if key not in nconfig: 167 | nconfig[key] = dict() 168 | 169 | self.app.config.from_mapping(**nconfig) 170 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/__init__.py: -------------------------------------------------------------------------------- 1 | from types import SimpleNamespace 2 | import elasticsearch 3 | 4 | 5 | class ElasticHandler: 6 | """Base Elasticsearch handler class 7 | 8 | This class abstract interaction with Elasticsearch and enables 9 | common usage of elasticsearch without needing direct knowledge of 10 | things such as which metadata keys exist or how to check 11 | the version of the cluster. It is meant to be used as a base 12 | class for futher interactions with elastic 13 | """ 14 | 15 | def __init__( 16 | self, 17 | hosts, 18 | username=None, 19 | password=None, 20 | cacert=None, 21 | disable_sniffing=False, 22 | max_retries=10, 23 | retry_on_timeout=True, 24 | timeout=30, 25 | # Add other options not currently handled for es config 26 | otherOptions=None, 27 | indexPrefix="pydat", 28 | logger=None 29 | ): 30 | if logger is None: 31 | import logging 32 | self.logger = logging.getLogger(__name__) 33 | else: 34 | self.logger = logger 35 | 36 | self.indexNames = SimpleNamespace() 37 | self._indexFormatter(indexPrefix) 38 | self.top_level_keys = [ 39 | 'domainName', 40 | 'tld', 41 | '_score' 42 | ] 43 | self.metadata_key_map = SimpleNamespace(**{ 44 | 'VERSION_KEY': 'dataVersion', 45 | 'FIRST_SEEN': 'dataFirstSeen', 46 | 'DATE_FIRST_SEEN': 'dateFirstSeen', 47 | 'DATE_LAST_SEEN': 'dateLastSeen', 48 | 'DATE_CREATED': 'dateCreated', 49 | 'DATE_UPDATED': 'dateUpdated', 50 | 'HISTORICAL': 'historical' 51 | }) 52 | self.metadata_keys = list(vars(self.metadata_key_map).values()) 53 | self.top_level_keys.extend(self.metadata_keys) 54 | 55 | self._es = None 56 | self._es_version = None 57 | self.elastic_args = { 58 | 'hosts': hosts, 59 | 'sniff_on_start': (not disable_sniffing), 60 | 'sniff_on_connection_fail': (not disable_sniffing), 61 | 'sniff_timeout': (None if disable_sniffing else 100), 62 | 'max_retries': 100, 63 | 'retry_on_timeout': True, 64 | 'timeout': 100 65 | } 66 | 67 | security_args = dict() 68 | 69 | if username is not None and password is None: 70 | raise ValueError("password must be supplied with username") 71 | 72 | if (username is not None and password is not None): 73 | security_args["http_auth"] = ( 74 | username, 75 | password 76 | ) 77 | 78 | if cacert is not None: 79 | security_args["use_ssl"] = True 80 | security_args["ca_certs"] = cacert 81 | 82 | if len(security_args) > 0: 83 | self.elastic_args.update(security_args) 84 | 85 | if isinstance(otherOptions, dict): 86 | self.elastic_args.update(otherOptions) 87 | 88 | def _indexFormatter(self, prefix): 89 | self.indexNames.prefix = prefix 90 | self.indexNames.orig_write = "%s-data-write" % prefix 91 | self.indexNames.delta_write = "%s-data-delta-write" % prefix 92 | self.indexNames.orig_search = "%s-data-orig" % prefix 93 | self.indexNames.delta_search = "%s-data-delta" % prefix 94 | self.indexNames.search = "%s-data-search" % prefix 95 | self.indexNames.meta = "%s-meta" % prefix 96 | self.indexNames.template_pattern = "%s-data-*" % prefix 97 | self.indexNames.template_name = "%s-template" % prefix 98 | 99 | def connect(self): 100 | """Return an instance of Elasticsearch connection object 101 | 102 | This class will check if an existing instance exists and if not 103 | will create one before returning said instance 104 | 105 | Raises: 106 | RuntimeError: If elasticsearch indicates it is 107 | improperly configured 108 | RuntimeError: If a generic elasticsearch exception occurrs 109 | 110 | Returns: 111 | elasticsearch.Elasticsearch(): An instance of the 112 | Elasticsearch objet 113 | """ 114 | 115 | if self._es is None: 116 | try: 117 | self._es = elasticsearch.Elasticsearch(**self.elastic_args) 118 | except elasticsearch.ImproperlyConfigured as e: 119 | raise RuntimeError(e) 120 | except elasticsearch.ElasticsearchException as e: 121 | raise RuntimeError(e) 122 | except Exception: 123 | self.logger.exception( 124 | "Unexpected exception making elastic connection") 125 | raise 126 | return self._es 127 | 128 | def getVersion(self): 129 | """Get the version of the cluster 130 | 131 | Raises: 132 | ValueError: If the major version is less than 7 133 | 134 | Returns: 135 | int: The highest version of Elastic in the cluster 136 | """ 137 | if self._es_version is None: 138 | es = self.connect() 139 | 140 | try: 141 | node_versions = [] 142 | for version in es.cat.nodes(h='version').strip().split('\n'): 143 | node_versions.append([int(i) for i in version.split('.')]) 144 | except Exception: 145 | self.logger.exception("Unexpected exception checking versions") 146 | raise 147 | 148 | highest_version = 0 149 | for version in node_versions: 150 | if version[0] > highest_version: 151 | highest_version = version[0] 152 | if version[0] < 7: 153 | raise ValueError( 154 | "Elasticsearch 7.0 is the minimum supported version") 155 | 156 | self._es_version = highest_version 157 | 158 | return self._es_version 159 | 160 | def checkVersion(self): 161 | """Check cluster version against library version 162 | 163 | Raises: 164 | RuntimeError: If installed python library version does 165 | not match version of cluster 166 | """ 167 | library_version = elasticsearch.VERSION[0] 168 | 169 | if self.getVersion() != library_version: 170 | raise RuntimeError( 171 | "Python library installed does not " 172 | "match with greatest (major) version in cluster") 173 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/exceptions.py: -------------------------------------------------------------------------------- 1 | class ESConnectionError(Exception): 2 | """Custom error exception that denotes a failure to establish 3 | a python ElasticSearch client handle, thus implying a connectivity 4 | problem to the ElasticSearch instance. 5 | """ 6 | pass 7 | 8 | 9 | class ESQueryError(Exception): 10 | """Custom error exception that denotes a failure when making a query call 11 | to ElasticSearch instance 12 | """ 13 | pass 14 | 15 | 16 | class ESNotFoundError(Exception): 17 | """Custom error that denotes no records could be found that match a search 18 | """ 19 | pass 20 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/ingest/debug_levels.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | 4 | class DebugLevel(IntEnum): 5 | DISABLED = 0 6 | DEFAULT = 1 7 | VERBOSE = 2 8 | NOISY = 3 9 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/ingest/event_tracker.py: -------------------------------------------------------------------------------- 1 | import multiprocessing 2 | 3 | 4 | class EventTracker: 5 | def __init__(self): 6 | self._pauseEvent = multiprocessing.Event() 7 | self._shutdownEvent = multiprocessing.Event() 8 | self._bulkShipEvent = multiprocessing.Event() 9 | self._bulkFetchEvent = multiprocessing.Event() 10 | self._fileReaderDoneEvent = multiprocessing.Event() 11 | 12 | @property 13 | def paused(self): 14 | return self._pauseEvent.is_set() 15 | 16 | def pause(self): 17 | self._pauseEvent.set() 18 | 19 | def unpause(self): 20 | self._pauseEvent.clear() 21 | 22 | @property 23 | def shutdown(self): 24 | return self._shutdownEvent.is_set() 25 | 26 | def setShutdown(self): 27 | self._shutdownEvent.set() 28 | 29 | @property 30 | def shipError(self): 31 | return self._bulkShipEvent.is_set() 32 | 33 | def setShipError(self): 34 | self._bulkShipEvent.set() 35 | 36 | @property 37 | def fetchError(self): 38 | return self._bulkFetchEvent.is_set() 39 | 40 | def setFetchError(self): 41 | self._bulkFetchEvent.set() 42 | 43 | @property 44 | def bulkError(self): 45 | return self.shipError or self.fetchError 46 | 47 | @property 48 | def fileReaderDone(self): 49 | return self._fileReaderDoneEvent.is_set() 50 | 51 | def setFileReaderDone(self): 52 | self._fileReaderDoneEvent.set() 53 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/ingest/file_reader.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import logging 4 | from threading import Thread 5 | 6 | 7 | class FileReader(Thread): 8 | """Simple data file organizer 9 | 10 | This class focuses on iterating through directories and putting 11 | found files into a queue for processing by pipelines 12 | """ 13 | 14 | def __init__( 15 | self, 16 | file_queue, 17 | eventTracker, 18 | directory, 19 | _file, 20 | extension, 21 | logger=None, 22 | ): 23 | super().__init__() 24 | self.daemon = True 25 | 26 | if logger is not None: 27 | self.logger = logger 28 | else: 29 | self.logger = logging.getLogger('fileReader') 30 | 31 | self.file_queue = file_queue 32 | self.eventTracker = eventTracker 33 | self.directory = directory 34 | self.file = _file 35 | self.extension = extension 36 | self._shutdown = False 37 | 38 | def shutdown(self): 39 | self._shutdown = True 40 | 41 | def run(self): 42 | try: 43 | if self.directory: 44 | self.scan_directory(self.directory) 45 | elif self.file: 46 | self.file_queue.put(self.file) 47 | else: 48 | self.logger.error("File or Directory required") 49 | except Exception: 50 | self.logger.exception("Unknown exception in File Reader") 51 | finally: 52 | self.file_queue.join() 53 | self.logger.debug("Setting FileReaderDone event") 54 | self.eventTracker.setFileReaderDone() 55 | 56 | def scan_directory(self, directory): 57 | for path in sorted(os.listdir(directory)): 58 | fp = os.path.join(directory, path) 59 | 60 | if os.path.isdir(fp): 61 | self.scan_directory(fp) 62 | elif os.path.isfile(fp): 63 | if self._shutdown: 64 | return 65 | if self.extension != '': 66 | fn, ext = os.path.splitext(path) 67 | if ext == '' or not ext.endswith(self.extension): 68 | continue 69 | self.file_queue.put(fp) 70 | else: 71 | self.logger.warning("%s is neither a file nor directory" % fp) 72 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/ingest/stat_tracker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from threading import Thread 4 | from multiprocessing import Queue as mpQueue 5 | import queue 6 | 7 | 8 | class _StatTracker: 9 | MAX_CHUNK_SIZE = 100 10 | 11 | def __init__(self, queue): 12 | self._queue = queue 13 | self._chunk = [] 14 | 15 | def __del__(self): 16 | try: 17 | self.flush() 18 | except Exception: 19 | pass 20 | 21 | def flush(self): 22 | self._queue.put(self._chunk) 23 | self._chunk = [] 24 | 25 | def addChanged(self, field): 26 | self._chunk.append(('chn', field)) 27 | if len(self._chunk) >= self.MAX_CHUNK_SIZE: 28 | self.flush() 29 | 30 | def incr(self, field): 31 | self._chunk.append(('stat', field)) 32 | if len(self._chunk) >= self.MAX_CHUNK_SIZE: 33 | self.flush() 34 | 35 | 36 | class StatTracker(Thread): 37 | """Multi-processing safe stat tracking class 38 | 39 | This class can be provided to all pipelines to keep track of different 40 | stats about the domains being ingested 41 | """ 42 | 43 | def __init__(self, logger=None, **kwargs): 44 | super().__init__(**kwargs) 45 | self.daemon = True 46 | self._stats = {'total': 0, 47 | 'new': 0, 48 | 'updated': 0, 49 | 'unchanged': 0, 50 | 'duplicates': 0} 51 | self._stat_queue = mpQueue() 52 | self._shutdown = False 53 | self._changed = dict() 54 | if logger is None: 55 | import logging 56 | self.logger = logging.getLogger(__name__) 57 | else: 58 | self.logger = logger 59 | 60 | def get_tracker(self): 61 | return _StatTracker(self._stat_queue) 62 | 63 | @property 64 | def total(self): 65 | return self._stats['total'] 66 | 67 | @property 68 | def new(self): 69 | return self._stats['new'] 70 | 71 | @property 72 | def updated(self): 73 | return self._stats['updated'] 74 | 75 | @property 76 | def unchanged(self): 77 | return self._stats['unchanged'] 78 | 79 | @property 80 | def duplicates(self): 81 | return self._stats['duplicates'] 82 | 83 | @property 84 | def stats(self): 85 | return self._stats 86 | 87 | @property 88 | def changed_stats(self): 89 | return self._changed 90 | 91 | def seed(self, stats): 92 | self._stats.update(stats) 93 | 94 | def seedChanged(self, changed): 95 | for (name, value) in changed.items(): 96 | self._changed[name] = int(value) 97 | 98 | def shutdown(self): 99 | self._shutdown = True 100 | 101 | def run(self): 102 | while 1: 103 | try: 104 | chunk = self._stat_queue.get(True, 0.2) 105 | except queue.Empty: 106 | if self._shutdown: 107 | break 108 | continue 109 | 110 | for (typ, field) in chunk: 111 | if typ == 'stat': 112 | if field not in self._stats: 113 | self.logger.error("Unknown field %s" % field) 114 | else: 115 | self._stats[field] += 1 116 | elif typ == 'chn': 117 | if field not in self._changed: 118 | self._changed[field] = 0 119 | self._changed[field] += 1 120 | else: 121 | self.logger.error("Unknown stat type") 122 | 123 | self._stat_queue.close() 124 | 125 | def addChanged(self, field): 126 | self._stat_queue.put([('chn', field)]) 127 | 128 | def incr(self, field): 129 | self._stat_queue.put([('stat', field)]) 130 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/search/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/core/elastic/search/__init__.py -------------------------------------------------------------------------------- /pydat/backend/pydat/core/elastic/search/flask_handler.py: -------------------------------------------------------------------------------- 1 | from pydat.core.elastic.search.search_handler import SearchHandler 2 | 3 | 4 | class FlaskElasticHandler(SearchHandler): 5 | """Wrapper class around SearchHandler that adds support for flask 6 | 7 | This class wraps the SearchHandler class to enable usage by flask, 8 | specifically deffering initialization of elastic capabilities with 9 | an init_app function that can be called by code using the application 10 | factories pattern 11 | """ 12 | def __init__(self): 13 | pass 14 | 15 | def _generate_config(self, app): 16 | # Collate elastic arguments 17 | elastic_config = app.config['ELASTICSEARCH'] 18 | self.elastic_arguments = { 19 | 'hosts': elastic_config['uri'], 20 | 'username': elastic_config.get('user', None), 21 | 'password': elastic_config.get('pass', None), 22 | 'cacert': elastic_config.get('cacert', None), 23 | 'disable_sniffing': elastic_config.get('disable_sniffing', False), 24 | 'indexPrefix': elastic_config['indexPrefix'], 25 | 'max_retries': 100, 26 | 'retry_on_timeout': True, 27 | } 28 | 29 | def init_app(self, app): 30 | """Support flask deferred initialization 31 | 32 | Args: 33 | app (flask.Flask): An instance of a Flask object 34 | """ 35 | self._generate_config(app) 36 | self._search_keys = app.config['SEARCHKEYS'] 37 | 38 | try: 39 | super().__init__( 40 | search_keys=self._search_keys, 41 | **self.elastic_arguments) 42 | except RuntimeError: 43 | raise 44 | except Exception: 45 | raise 46 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | from threading import Thread 5 | from multiprocessing import ( 6 | JoinableQueue as jmpQueue 7 | ) 8 | import logging 9 | import logging.handlers 10 | import queue 11 | 12 | DEBUG_LEVEL = logging.DEBUG 13 | DEFAULT_LEVEL = logging.INFO 14 | 15 | 16 | def getLogger(name=None, debug=False, mpSafe=True, **kwargs): 17 | """Convenience function to get a logger with configured level 18 | 19 | Args: 20 | name (str, optional): Name to use for logger. Defaults to None. 21 | debug (bool, optional): Enable debug level. Defaults to False. 22 | 23 | Returns: 24 | Logger: Logger instance returned by logging 25 | """ 26 | if mpSafe: 27 | # Remove existing handlers and use QueueHandler instead 28 | queue_handler = logging.handlers.QueueHandler(mpLogger.logQueue) 29 | root_logger = logging.getLogger() 30 | root_logger.handlers = [] 31 | root_logger.addHandler(queue_handler) 32 | 33 | logger = logging.getLogger(name, **kwargs) 34 | logger.setLevel( 35 | DEBUG_LEVEL if debug else DEFAULT_LEVEL 36 | ) 37 | 38 | return logger 39 | 40 | 41 | class mpLogger(Thread): 42 | """Multiprocessing 'safe' logger implementation/wrapper 43 | 44 | This class enabled a main thread to support a QueueHandler based logger 45 | created by the 'getLogger' class in this file. It should be started 46 | before starting child processes and then join'd after child processes 47 | are finished 48 | """ 49 | 50 | logQueue = jmpQueue() 51 | 52 | def __init__(self, name=__name__, debug=False, **kwargs): 53 | Thread.__init__(self, **kwargs) 54 | self._debug = debug 55 | self.daemon = True 56 | self.name = name 57 | self._stop_processing = False 58 | 59 | def stop(self): 60 | self._stop_processing = True 61 | 62 | def join(self, **kwargs): 63 | self._stop_processing = True 64 | self.logQueue.join() 65 | 66 | def run(self): 67 | while 1: 68 | try: 69 | record = self.logQueue.get(True, 0.2) 70 | try: 71 | logger = logging.getLogger(record.name) 72 | logger.handle(record) 73 | except EOFError: 74 | break 75 | except BrokenPipeError: 76 | print( 77 | "Broken Pipe -- unable to output further logs", 78 | file=sys.stderr 79 | ) 80 | break 81 | finally: 82 | self.logQueue.task_done() 83 | except queue.Empty: 84 | if self._stop_processing: 85 | break 86 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/parsetab.py: -------------------------------------------------------------------------------- 1 | 2 | # parsetab.py 3 | # This file is automatically generated. Do not edit. 4 | # pylint: disable=W,C,R 5 | _tabversion = '3.10' 6 | 7 | _lr_method = 'LALR' 8 | 9 | _lr_signature = 'leftANDORrightNOTleftCOLONAND COLON DATE FUZZY LPAREN NOT NULL OR QUOTED REGEX RPAREN WILDCARD WORDquery : LPAREN query RPARENquery : NOT queryquery : query query %prec ANDquery : query AND queryquery : query OR queryquery : specific\n | daterange\n | termqueryspecific : FUZZY WORD COLON WORDspecific : WORD COLON WORDspecific : FUZZY WORD COLON QUOTEDspecific : WORD COLON QUOTEDspecific : WORD COLON NULLspecific : WORD COLON WILDCARDspecific : WORD COLON REGEXdaterange : WORD COLON DATEdaterange : WORD COLON DATE COLON DATEtermquery : QUOTEDtermquery : WORD' 10 | 11 | _lr_action_items = {'LPAREN':([0,1,2,3,4,5,6,8,9,10,11,12,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[2,2,2,2,-6,-7,-8,-19,-18,-3,2,2,2,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'NOT':([0,1,2,3,4,5,6,8,9,10,11,12,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[3,3,3,3,-6,-7,-8,-19,-18,3,3,3,3,3,3,3,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'FUZZY':([0,1,2,3,4,5,6,8,9,10,11,12,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[7,7,7,7,-6,-7,-8,-19,-18,-3,7,7,7,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'WORD':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,16,17,18,19,20,21,22,23,24,25,26,27,28,30,],[8,8,8,8,-6,-7,-8,15,-19,-18,-3,8,8,8,-2,21,-4,-5,-1,27,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'QUOTED':([0,1,2,3,4,5,6,8,9,10,11,12,13,14,16,17,18,19,20,21,22,23,24,25,26,27,28,30,],[9,9,9,9,-6,-7,-8,-19,-18,-3,9,9,9,-2,22,-4,-5,-1,28,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'$end':([1,4,5,6,8,9,10,14,17,18,19,21,22,23,24,25,26,27,28,30,],[0,-6,-7,-8,-19,-18,-3,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'AND':([1,4,5,6,8,9,10,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[11,-6,-7,-8,-19,-18,-3,11,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'OR':([1,4,5,6,8,9,10,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[12,-6,-7,-8,-19,-18,-3,12,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'RPAREN':([4,5,6,8,9,10,13,14,17,18,19,21,22,23,24,25,26,27,28,30,],[-6,-7,-8,-19,-18,-3,19,-2,-4,-5,-1,-10,-12,-13,-14,-15,-16,-9,-11,-17,]),'COLON':([8,15,26,],[16,20,29,]),'NULL':([16,],[23,]),'WILDCARD':([16,],[24,]),'REGEX':([16,],[25,]),'DATE':([16,29,],[26,30,]),} 12 | 13 | _lr_action = {} 14 | for _k, _v in _lr_action_items.items(): 15 | for _x,_y in zip(_v[0],_v[1]): 16 | if not _x in _lr_action: _lr_action[_x] = {} 17 | _lr_action[_x][_k] = _y 18 | del _lr_action_items 19 | 20 | _lr_goto_items = {'query':([0,1,2,3,10,11,12,13,14,17,18,],[1,10,13,14,10,17,18,10,10,10,10,]),'specific':([0,1,2,3,10,11,12,13,14,17,18,],[4,4,4,4,4,4,4,4,4,4,4,]),'daterange':([0,1,2,3,10,11,12,13,14,17,18,],[5,5,5,5,5,5,5,5,5,5,5,]),'termquery':([0,1,2,3,10,11,12,13,14,17,18,],[6,6,6,6,6,6,6,6,6,6,6,]),} 21 | 22 | _lr_goto = {} 23 | for _k, _v in _lr_goto_items.items(): 24 | for _x, _y in zip(_v[0], _v[1]): 25 | if not _x in _lr_goto: _lr_goto[_x] = {} 26 | _lr_goto[_x][_k] = _y 27 | del _lr_goto_items 28 | _lr_productions = [ 29 | ("S' -> query","S'",1,None,None,None), 30 | ('query -> LPAREN query RPAREN','query',3,'p_query_group','advanced_es.py',285), 31 | ('query -> NOT query','query',2,'p_query_not','advanced_es.py',289), 32 | ('query -> query query','query',2,'p_query_query','advanced_es.py',310), 33 | ('query -> query AND query','query',3,'p_query_and_query','advanced_es.py',314), 34 | ('query -> query OR query','query',3,'p_query_or_query','advanced_es.py',318), 35 | ('query -> specific','query',1,'p_query_terminals','advanced_es.py',331), 36 | ('query -> daterange','query',1,'p_query_terminals','advanced_es.py',332), 37 | ('query -> termquery','query',1,'p_query_terminals','advanced_es.py',333), 38 | ('specific -> FUZZY WORD COLON WORD','specific',4,'p_specific_fuzzy_word','advanced_es.py',367), 39 | ('specific -> WORD COLON WORD','specific',3,'p_specific_word','advanced_es.py',383), 40 | ('specific -> FUZZY WORD COLON QUOTED','specific',4,'p_specific_fuzzy_quoted','advanced_es.py',394), 41 | ('specific -> WORD COLON QUOTED','specific',3,'p_specific_quoted','advanced_es.py',437), 42 | ('specific -> WORD COLON NULL','specific',3,'p_field_missing','advanced_es.py',495), 43 | ('specific -> WORD COLON WILDCARD','specific',3,'p_specific_wildcard','advanced_es.py',561), 44 | ('specific -> WORD COLON REGEX','specific',3,'p_specific_regex','advanced_es.py',569), 45 | ('daterange -> WORD COLON DATE','daterange',3,'p_daterange_single','advanced_es.py',592), 46 | ('daterange -> WORD COLON DATE COLON DATE','daterange',5,'p_daterange_range','advanced_es.py',604), 47 | ('termquery -> QUOTED','termquery',1,'p_termquery_quoted','advanced_es.py',619), 48 | ('termquery -> WORD','termquery',1,'p_termquery_word','advanced_es.py',671), 49 | ] 50 | -------------------------------------------------------------------------------- /pydat/backend/pydat/core/plugins.py: -------------------------------------------------------------------------------- 1 | import pkgutil 2 | import importlib 3 | import pydat.plugins 4 | from flask import Blueprint, current_app 5 | 6 | # list of valid Plugin objects 7 | PLUGINS = set() 8 | 9 | 10 | class PluginBase: 11 | """Plugin base class that all plugins should extend. 12 | 13 | Attributes: 14 | name: A string that stores the plugin's identifying name. 15 | blueprint: A Blueprint that defines the plugin. 16 | """ 17 | 18 | def __init__(self, name, blueprint): 19 | self.name = name 20 | self.blueprint = blueprint 21 | self._prefix = '/api/plugin/' 22 | 23 | @property 24 | def prefix(self): 25 | return self._prefix 26 | 27 | def setConfig(self, **kwargs): 28 | """Function to allow plugin to handle config 29 | 30 | Raises: 31 | NotImplementedError: Must check for proper configuration. 32 | Raise ValueError if needed config isn't there""" 33 | raise NotImplementedError("Plugin must handle configuration") 34 | 35 | @property 36 | def blueprint(self): 37 | """Returns blueprint""" 38 | return self._blueprint 39 | 40 | @blueprint.setter 41 | def blueprint(self, new_blueprint): 42 | if not isinstance(new_blueprint, Blueprint): 43 | raise TypeError("blueprint must of of type Blueprint") 44 | self._blueprint = new_blueprint 45 | 46 | 47 | class PassivePluginBase(PluginBase): 48 | """Plugin base class that all passive plugins should extend. 49 | 50 | Attributes: 51 | name: A string that stores the plugin's identifying name. 52 | blueprint: A Blueprint that defines the plugin. 53 | """ 54 | 55 | def __init__(self, name, blueprint): 56 | super().__init__(name, blueprint) 57 | self._prefix += 'passive/' 58 | 59 | @property 60 | def blueprint(self): 61 | """Returns blueprint""" 62 | return self._blueprint 63 | 64 | @blueprint.setter 65 | def blueprint(self, passive_bp): 66 | if not isinstance(passive_bp, Blueprint): 67 | raise TypeError("blueprint must of of type Blueprint") 68 | 69 | passive_bp.route("/forward", methods=["POST"])(self.forward) 70 | passive_bp.route("/reverse", methods=["POST"])(self.reverse) 71 | 72 | self._blueprint = passive_bp 73 | 74 | def forward(self): 75 | """Required forward pdns functionality for passive plugin 76 | 77 | Raises: 78 | NotImplementedError: subclasses must implement""" 79 | raise NotImplementedError("Passive Plugin must have forward pdns") 80 | 81 | def reverse(self): 82 | """Required reverse pdns functionality for passive plugin 83 | 84 | Raises: 85 | NotImplementedError: subclasses must implement""" 86 | raise NotImplementedError("Passive Plugin must have reverse pdns") 87 | 88 | 89 | class PluginManager: 90 | def __init__(self, namespace=pydat.plugins): 91 | """Initializes PluginManager 92 | 93 | Args: 94 | namespace (module, optional): Namespace package to search 95 | for plugins. Defaults to pydat.plugins. 96 | """ 97 | self.namespace = namespace 98 | self._plugins = [] 99 | 100 | def gather_plugins(self): 101 | """Iterates through namespace to execute global module code 102 | """ 103 | 104 | plugins = pkgutil.iter_modules( 105 | self.namespace.__path__, self.namespace.__name__ + ".") 106 | 107 | for (finder, name, ispkg) in plugins: 108 | importlib.import_module(name) 109 | 110 | for plugin_class in PLUGINS: 111 | plugin = plugin_class() 112 | 113 | current_app.logger.info(f"Setting up plugin {plugin.name}") 114 | 115 | if isinstance(plugin, PassivePluginBase): 116 | config = current_app.config['PDNSSOURCES'].get( 117 | plugin.name, None) 118 | else: 119 | config = current_app.config['PLUGINS'].get(plugin.name, None) 120 | 121 | if config is None: 122 | current_app.logger.warning( 123 | f"No config for plugin '{plugin.name}', disabling") 124 | continue 125 | 126 | try: 127 | plugin.setConfig(**config) 128 | except (KeyError, ValueError): 129 | raise ValueError( 130 | f"Plugin '{plugin.name}' missing proper configuration") 131 | 132 | try: 133 | blueprint = plugin.blueprint 134 | except Exception: 135 | raise ValueError( 136 | f"Plugin '{plugin.name}' unable to get blueprint") 137 | 138 | if not isinstance(blueprint, Blueprint): 139 | raise ValueError( 140 | f"Plugin '{plugin.name}' providing invalid blueprint") 141 | 142 | self._plugins.append(plugin) 143 | 144 | @property 145 | def plugins(self): 146 | return self._plugins 147 | 148 | 149 | def register_plugin(plugin): 150 | """Decorator for registering plugins. 151 | 152 | If the plugin is a valid plugin, the plugin object will be added to 153 | the global PLUGINS. 154 | 155 | Args: 156 | plugin: Expects a subclass of PluginBase 157 | 158 | Raises: 159 | TypeError: plugin is not a subclass of PluginBase 160 | 161 | Returns: 162 | Original class after registering it 163 | """ 164 | 165 | if not issubclass(plugin, PluginBase): 166 | raise TypeError("Plugin must be subclass of PluginBase") 167 | 168 | PLUGINS.add(plugin) 169 | 170 | return plugin 171 | 172 | 173 | def register_passive_plugin(plugin): 174 | """Decorator for registering passive plugins. 175 | 176 | If the plugin is a valid passive plugin, the plugin object will be added to 177 | the global PLUGINS. 178 | 179 | Args: 180 | plugin: Expects a subclass of PassivePluginBase 181 | 182 | Raises: 183 | TypeError: The function did not return a valid PassivePluginBase plugin 184 | ValueError: The proper configuration values were not provided 185 | 186 | Returns: 187 | Wrapped function that registers and returns valid passive plugins. 188 | """ 189 | 190 | if not issubclass(plugin, PassivePluginBase): 191 | raise TypeError("Plugin must be subclass of PassivePluginBase") 192 | 193 | return register_plugin(plugin) 194 | -------------------------------------------------------------------------------- /pydat/backend/pydat/plugins/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/backend/pydat/plugins/.gitkeep -------------------------------------------------------------------------------- /pydat/backend/pydat/scripts/api.py: -------------------------------------------------------------------------------- 1 | from pydat.api import create_app 2 | import argparse 3 | 4 | 5 | def _get_argparser(): 6 | parser = argparse.ArgumentParser() 7 | 8 | parser.add_argument( 9 | "--debug", 10 | action="store_true", 11 | dest="debug", 12 | default=False, 13 | help="Enable debug mode for flask" 14 | ) 15 | 16 | parser.add_argument( 17 | "--host", 18 | action="store", 19 | dest="host", 20 | default="127.0.0.1", 21 | help="Host for running app" 22 | ) 23 | 24 | parser.add_argument( 25 | "--port", 26 | action="store", 27 | dest="port", 28 | default=5000, 29 | help="Port for running app" 30 | ) 31 | 32 | return parser 33 | 34 | 35 | def main(): 36 | parser = _get_argparser() 37 | options = vars(parser.parse_args()) 38 | 39 | app = create_app() 40 | app.run(debug=options["debug"], host=options["host"], port=options["port"]) 41 | -------------------------------------------------------------------------------- /pydat/backend/setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | testpaths = tests 3 | 4 | [coverage:run] 5 | branch = True 6 | source = 7 | pydat 8 | -------------------------------------------------------------------------------- /pydat/backend/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_namespace_packages, setup 2 | 3 | setup( 4 | name="pydat", 5 | version="5.0.0", 6 | packages=find_namespace_packages(include=['pydat.*']), 7 | include_package_data=True, 8 | zip_safe=False, 9 | install_requires=[ 10 | "flask", 11 | "cerberus", 12 | "elasticsearch>=7.0.0,<8.0.0", 13 | "ply", 14 | "flask-caching", 15 | "requests", 16 | "pyyaml" 17 | ], 18 | tests_require=[ 19 | "pytest", 20 | "pytest-cov", 21 | "flake8", 22 | "blinker" 23 | ], 24 | classifiers=["Programming language :: Python :: 3", ], 25 | python_requires=">=3.6", 26 | entry_points={ 27 | "console_scripts": [ 28 | "pydat-dev-server = pydat.scripts.api:main", 29 | "pydat-populator = pydat.scripts.elasticsearch_populate:main" 30 | ] 31 | }, 32 | ) 33 | -------------------------------------------------------------------------------- /pydat/backend/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydat.api import create_app, elasticsearch_handler 3 | from pydat.core.plugins import ( 4 | PluginBase, 5 | PassivePluginBase, 6 | ) 7 | from flask import Blueprint 8 | 9 | 10 | @pytest.fixture 11 | def fake_create_app(monkeypatch): 12 | return create_app 13 | 14 | 15 | @pytest.fixture 16 | def config_app(fake_create_app): 17 | app = fake_create_app( 18 | { 19 | "TESTING": True, 20 | "SEARCHKEYS": [ 21 | 'domainName', 22 | 'registrant_name', 23 | 'contactEmail', 24 | 'registrant_telephone', 25 | ], 26 | "PDNSSOURCES": { 27 | "TestPassive": {} 28 | } 29 | } 30 | ) 31 | return app 32 | 33 | 34 | @pytest.fixture 35 | def fake_app(fake_create_app): 36 | app = fake_create_app() 37 | 38 | return app 39 | 40 | 41 | @pytest.fixture 42 | def client(fake_create_app): 43 | app = fake_create_app({"TESTING": True, }) 44 | return app.test_client() 45 | 46 | 47 | # simple test plugin, returns created valid plugin 48 | @pytest.fixture 49 | def sample_plugin(): 50 | 51 | class TestPlugin(PluginBase): 52 | bp = Blueprint('test_plugin', __name__) 53 | 54 | def __init__(self): 55 | super().__init__('test_plugin', self.bp) 56 | self.bp.route('/hello')(self.hello) 57 | 58 | @property 59 | def jsfiles(self): 60 | return ['testfile.js', 'testfile2.js'] 61 | 62 | def hello(self): 63 | return "Success!" 64 | 65 | def setConfig(self, **kwargs): 66 | pass 67 | 68 | return TestPlugin 69 | 70 | 71 | # simple test passive plugin, returns created valid plugin 72 | @pytest.fixture 73 | def sample_passive_plugin(): 74 | 75 | class TestPassivePlugin(PassivePluginBase): 76 | bp = Blueprint('passive_plugin', __name__) 77 | 78 | def __init__(self): 79 | super().__init__('passive_plugin', self.bp) 80 | self.bp.route("/hello")(self.hello) 81 | 82 | @property 83 | def jsfiles(self): 84 | return ['testfile1.js', 'testfile2.js'] 85 | 86 | def forward(self): 87 | return {} 88 | 89 | def reverse(self): 90 | return {} 91 | 92 | def setConfig(self, **kwargs): 93 | self.config = kwargs 94 | 95 | def hello(self): 96 | return "Success!" 97 | 98 | return TestPassivePlugin 99 | 100 | 101 | @pytest.fixture 102 | def es_handler(): 103 | """ElasticSearch Handler""" 104 | return elasticsearch_handler 105 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_config.py: -------------------------------------------------------------------------------- 1 | from pydat.core.config_parser import ConfigParser 2 | import pytest 3 | from unittest import mock 4 | 5 | 6 | def test_config_parser(fake_app): 7 | parser = ConfigParser(fake_app) 8 | parser.parse() 9 | 10 | 11 | def test_config_parser_env_invalid(monkeypatch, fake_app): 12 | app = fake_app 13 | fake_environ = mock.MagicMock(return_value=[ 14 | ('PYDAT_', 'test') 15 | ]) 16 | 17 | with monkeypatch.context() as monkey: 18 | monkey.setattr('os.environ.items', fake_environ) 19 | with pytest.raises(ValueError): 20 | ConfigParser(app) 21 | 22 | 23 | def test_config_parser_env_searchkeys(monkeypatch, fake_app): 24 | app = fake_app 25 | search_keys = ['domainName', 'registrant_name'] 26 | fake_environ = mock.MagicMock(return_value=[ 27 | ('PYDAT_SEARCHKEYS', ','.join(search_keys)) 28 | ]) 29 | 30 | with monkeypatch.context() as monkey: 31 | monkey.setattr('os.environ.items', fake_environ) 32 | ConfigParser(app) 33 | assert app.config['SEARCHKEYS'] 34 | assert app.config['SEARCHKEYS'] == search_keys 35 | 36 | 37 | def test_config_parser_envvar(monkeypatch, fake_app): 38 | app = fake_app 39 | fake_environ_keys = mock.MagicMock(return_value=[ 40 | 'PYDATCONFIG' 41 | ]) 42 | fake_app_config = mock.MagicMock() 43 | 44 | with monkeypatch.context() as monkey: 45 | monkey.setattr('os.environ.keys', fake_environ_keys) 46 | monkey.setattr(app, 'config', fake_app_config) 47 | ConfigParser(app) 48 | fake_app_config.from_envvar.assert_called_with( 49 | ConfigParser.ENV_CONFIG_FILE) 50 | 51 | 52 | def test_config_parser_env_boolean(monkeypatch, fake_app): 53 | app = fake_app 54 | fake_environ = mock.MagicMock(return_value=[ 55 | ('PYDAT_SSLVERIFY', 'test') 56 | ]) 57 | 58 | with monkeypatch.context() as monkey: 59 | monkey.setattr('os.environ.items', fake_environ) 60 | with pytest.raises(ValueError): 61 | ConfigParser(app) 62 | 63 | app = fake_app 64 | fake_environ = mock.MagicMock(return_value=[ 65 | ('PYDAT_DEBUG', 'test') 66 | ]) 67 | with monkeypatch.context() as monkey: 68 | monkey.setattr('os.environ.items', fake_environ) 69 | with pytest.raises(ValueError): 70 | ConfigParser(app) 71 | 72 | app = fake_app 73 | fake_environ = mock.MagicMock(return_value=[ 74 | ('PYDAT_DEBUG', 'false') 75 | ]) 76 | with monkeypatch.context() as monkey: 77 | monkey.setattr('os.environ.items', fake_environ) 78 | ConfigParser(app) 79 | assert(not app.config['DEBUG']) 80 | 81 | app = fake_app 82 | fake_environ = mock.MagicMock(return_value=[ 83 | ('PYDAT_DEBUG', 'true') 84 | ]) 85 | with monkeypatch.context() as monkey: 86 | monkey.setattr('os.environ.items', fake_environ) 87 | ConfigParser(app) 88 | assert(app.config['DEBUG']) 89 | 90 | 91 | def test_config_env_fields(monkeypatch, fake_app): 92 | app = fake_app 93 | fake_environ = mock.MagicMock(return_value=[ 94 | ('PYDAT_TEST', 'value') 95 | ]) 96 | with monkeypatch.context() as monkey: 97 | monkey.setattr('os.environ.items', fake_environ) 98 | ConfigParser(app) 99 | assert(app.config['TEST'] == 'value') 100 | 101 | 102 | @pytest.mark.parametrize( 103 | "env_items, expected_name, expected_value", [ 104 | ( 105 | [('PYDAT_TEST_FIELD', 'value')], 106 | ['TEST', 'FIELD'], 107 | 'value' 108 | ), 109 | ( 110 | [('PYDAT_TEST_FIELD_NESTED', 'value')], 111 | ['TEST', 'FIELD', 'NESTED'], 112 | 'value' 113 | ), 114 | ( 115 | [ 116 | ('PYDAT_TEST_FIELD_NESTED', 'value'), 117 | ('PYDAT_TEST_FIELD_NESTED2', 'value') 118 | ], 119 | ['TEST', 'FIELD', 'NESTED2'], 120 | 'value' 121 | ), 122 | ( 123 | [('PYDAT_ELASTICSEARCH_uri', 'localhost:9001')], 124 | ['ELASTICSEARCH', 'uri'], 125 | 'localhost:9001' 126 | ) 127 | ] 128 | ) 129 | def test_config_env_dicts( 130 | monkeypatch, 131 | fake_app, 132 | env_items, 133 | expected_name, 134 | expected_value 135 | ): 136 | fake_environ = mock.MagicMock(return_value=env_items) 137 | with monkeypatch.context() as monkey: 138 | monkey.setattr('os.environ.items', fake_environ) 139 | ConfigParser(fake_app) 140 | local_config = fake_app.config 141 | for name in expected_name: 142 | local_config = local_config[name] 143 | assert(local_config == expected_value) 144 | 145 | 146 | def test_config_invalidated(monkeypatch, fake_app, capsys): 147 | app = fake_app 148 | fake_app_config = mock.MagicMock() 149 | fake_app_config.items = mock.MagicMock(return_value=[('DEBUG', 'test')]) 150 | 151 | with monkeypatch.context() as monkey: 152 | monkey.setattr(app, 'config', fake_app_config) 153 | parser = ConfigParser(app) 154 | with pytest.raises(ValueError): 155 | parser.parse() 156 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest import mock 3 | from pydat.core.elastic import ElasticHandler 4 | import elasticsearch 5 | 6 | 7 | def test_handler(): 8 | elastic_handler = ElasticHandler( 9 | "localhost:9200", 10 | indexPrefix="testing" 11 | ) 12 | assert elastic_handler 13 | assert elastic_handler.indexNames.prefix == "testing" 14 | 15 | 16 | def test_handler_security_args(): 17 | with pytest.raises(ValueError): 18 | ElasticHandler("localhost:9200", username="foo") 19 | 20 | elastic_handler = ElasticHandler("localhost:9200", cacert="test") 21 | assert elastic_handler.elastic_args["use_ssl"] 22 | assert elastic_handler.elastic_args["ca_certs"] == "test" 23 | 24 | 25 | def test_get_version(monkeypatch): 26 | fake_connect = mock.Mock() 27 | fake_connect.return_value.cat.nodes.return_value = "7.0\n7.10\n" 28 | 29 | elastic_handler = ElasticHandler("localhost:9200") 30 | monkeypatch.setattr(elastic_handler, "connect", fake_connect) 31 | assert elastic_handler.getVersion() == 7 32 | 33 | 34 | def test_get_version_old(monkeypatch): 35 | fake_connect = mock.Mock() 36 | fake_connect.return_value.cat.nodes.return_value = "6.7\n6.8\n" 37 | 38 | elastic_handler = ElasticHandler("localhost:9200") 39 | monkeypatch.setattr(elastic_handler, "connect", fake_connect) 40 | with pytest.raises(ValueError): 41 | elastic_handler.getVersion() 42 | 43 | 44 | def test_check_version(monkeypatch): 45 | fake_get_version = mock.MagicMock(return_value="7") 46 | elastic_handler = ElasticHandler("localhost:9200") 47 | monkeypatch.setattr(elastic_handler, "getVersion", fake_get_version) 48 | 49 | with monkeypatch.context() as monkey: 50 | monkey.setattr(elasticsearch, 'VERSION', ["7", "0"]) 51 | elastic_handler.checkVersion() 52 | 53 | with monkeypatch.context() as monkey: 54 | monkey.setattr(elasticsearch, 'VERSION', ["6", "0"]) 55 | with pytest.raises(RuntimeError): 56 | elastic_handler.checkVersion() 57 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_ingest_data_processors.py: -------------------------------------------------------------------------------- 1 | import os 2 | from types import SimpleNamespace 3 | import pytest 4 | from unittest import mock 5 | 6 | from pydat.core.elastic.ingest.process_wrapper import PopulatorOptions 7 | from pydat.core.elastic.ingest.data_processors import ( 8 | _generateDocId, 9 | DataReader 10 | ) 11 | 12 | 13 | def test_generate_doc_id(): 14 | assert _generateDocId("mitre.org") == "org.mitre" 15 | 16 | with pytest.raises(RuntimeError): 17 | assert _generateDocId(set()) 18 | 19 | longdomain = "mitre" * 100 + ".org" 20 | assert _generateDocId(longdomain) == \ 21 | "org.h.2844603a197e9cbe3dde56e467422d01d1c5ab40" 22 | 23 | 24 | @pytest.fixture 25 | def fake_data_reader(): 26 | fake_file_queue = mock.Mock() 27 | fake_data_queue = mock.Mock() 28 | fake_eventTracker = mock.Mock() 29 | process_options = PopulatorOptions( 30 | verbose=True, 31 | debug=True, 32 | ) 33 | 34 | return DataReader( 35 | 0, 36 | fake_file_queue, 37 | fake_data_queue, 38 | fake_eventTracker, 39 | process_options 40 | ) 41 | 42 | 43 | def test_data_reader(): 44 | fake_file_queue = mock.Mock() 45 | fake_data_queue = mock.Mock() 46 | fake_eventTracker = mock.Mock() 47 | process_options = PopulatorOptions( 48 | verbose=True, 49 | debug=True, 50 | ) 51 | 52 | assert DataReader( 53 | 0, 54 | fake_file_queue, 55 | fake_data_queue, 56 | fake_eventTracker, 57 | process_options 58 | ) 59 | 60 | 61 | def test_data_reader_check_header(fake_data_reader): 62 | assert fake_data_reader.check_header( 63 | ['test', 'test', 'test', 'domainName']) 64 | assert not fake_data_reader.check_header( 65 | ['test', 'test', 'test', 'test']) 66 | 67 | 68 | def test_data_reader_parse_csv(monkeypatch, fake_data_reader): 69 | fake_open = mock.mock_open() 70 | fake_parse_csv_fn = mock.MagicMock() 71 | with mock.patch('builtins.open', fake_open): 72 | fake_stat = mock.Mock(return_value=SimpleNamespace(st_size=100)) 73 | with monkeypatch.context() as monkey: 74 | monkey.setattr(os, 'stat', fake_stat) 75 | monkey.setattr(DataReader, "_parse_csv", fake_parse_csv_fn) 76 | fake_data_reader.parse_csv('testfile.csv') 77 | 78 | fake_stat = mock.Mock(side_effect=Exception()) 79 | with monkeypatch.context() as monkey: 80 | monkey.setattr(os, 'stat', fake_stat) 81 | monkey.setattr(DataReader, "_parse_csv", fake_parse_csv_fn) 82 | fake_data_reader.parse_csv('testfile.csv') 83 | 84 | fake_open = mock.mock_open() 85 | with mock.patch('builtins.open', fake_open) as mock_file: 86 | mock_file.side_effect = FileNotFoundError() 87 | 88 | fake_stat = mock.Mock(return_value=SimpleNamespace(st_size=100)) 89 | with monkeypatch.context() as monkey: 90 | monkey.setattr(os, 'stat', fake_stat) 91 | monkey.setattr(DataReader, "_parse_csv", fake_parse_csv_fn) 92 | fake_data_reader.parse_csv('testfile.csv') 93 | 94 | 95 | def test_data_reader_real_parse_csv(monkeypatch, fake_data_reader, caplog): 96 | csvdata = [ 97 | "domainName,registrantName", 98 | "mitre.org,Some Person" 99 | ] 100 | fake_data_reader._parse_csv('fakefile', csvdata) 101 | 102 | csvdata = [ 103 | "mitre.org,Some Person" 104 | ] 105 | fake_data_reader._parse_csv('fakefile', csvdata) 106 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_ingest_event_tracker.py: -------------------------------------------------------------------------------- 1 | from pydat.core.elastic.ingest.event_tracker import EventTracker 2 | 3 | 4 | def test_event_tracker(): 5 | event_tracker = EventTracker() 6 | 7 | assert not event_tracker.shutdown 8 | assert not event_tracker.fileReaderDone 9 | 10 | event_tracker.setShutdown() 11 | assert event_tracker.shutdown 12 | 13 | event_tracker.setFileReaderDone() 14 | assert event_tracker.fileReaderDone 15 | 16 | 17 | def test_event_tracker_bulk(): 18 | event_tracker = EventTracker() 19 | 20 | assert not event_tracker.shipError 21 | assert not event_tracker.fetchError 22 | assert not event_tracker.bulkError 23 | 24 | event_tracker.setShipError() 25 | assert event_tracker.shipError 26 | assert event_tracker.bulkError 27 | assert not event_tracker.fetchError 28 | 29 | event_tracker._bulkShipEvent.clear() 30 | 31 | event_tracker.setFetchError() 32 | assert event_tracker.fetchError 33 | assert event_tracker.bulkError 34 | assert not event_tracker.shipError 35 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_ingest_file_reader.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from unittest import mock 4 | from pydat.core.elastic.ingest.file_reader import FileReader 5 | 6 | 7 | def fake_idsir(path): 8 | if path in [ 9 | '/tmp/fake/subdir', 10 | ]: 11 | return True 12 | else: 13 | return False 14 | 15 | 16 | def fake_isfile(path): 17 | if path in [ 18 | '/tmp/fake/file1.csv', 19 | '/tmp/fake/file2.csv', 20 | '/tmp/fake/file1.txt', 21 | '/tmp/fake/file3.txt' 22 | ]: 23 | return True 24 | else: 25 | return False 26 | 27 | 28 | def test_file_reader_file(monkeypatch): 29 | fake_eventTracker = mock.MagicMock() 30 | fake_eventTracker.setFileReaderDone = mock.MagicMock() 31 | fake_queue = mock.MagicMock() 32 | 33 | file_reader = FileReader( 34 | fake_queue, 35 | fake_eventTracker, 36 | None, 37 | "test.csv", 38 | "csv" 39 | ) 40 | 41 | file_reader.run() 42 | assert fake_queue.put.called 43 | assert fake_queue.join.called 44 | assert fake_eventTracker.setFileReaderDone.called 45 | 46 | 47 | @pytest.mark.parametrize( 48 | "pathlist,call_count", [ 49 | ( 50 | ['file1.csv', 'file2.csv'], 51 | 2 52 | ), 53 | ( 54 | ['subdir'], 55 | 0 56 | ), 57 | ( 58 | ['file3.txt'], 59 | 0 60 | ) 61 | ] 62 | ) 63 | def test_file_reader_directory(monkeypatch, pathlist, call_count): 64 | fake_eventTracker = mock.MagicMock() 65 | fake_eventTracker.setFileReaderDone = mock.MagicMock() 66 | fake_queue = mock.MagicMock() 67 | 68 | file_reader = FileReader( 69 | fake_queue, 70 | fake_eventTracker, 71 | "/tmp/fake", 72 | None, 73 | "csv" 74 | ) 75 | 76 | monkeypatch.setattr(os.path, "isdir", fake_idsir) 77 | monkeypatch.setattr(os.path, "isfile", fake_isfile) 78 | 79 | fake_listdir = mock.MagicMock(return_value=pathlist) 80 | with monkeypatch.context() as monkey: 81 | monkey.setattr(os, "listdir", fake_listdir) 82 | file_reader.run() 83 | assert fake_queue.put.call_count == call_count 84 | 85 | 86 | def test_file_reader_shutdown(monkeypatch): 87 | fake_eventTracker = mock.MagicMock() 88 | fake_eventTracker.setFileReaderDone = mock.MagicMock() 89 | fake_queue = mock.MagicMock() 90 | 91 | file_reader = FileReader( 92 | fake_queue, 93 | fake_eventTracker, 94 | "/tmp/fake", 95 | None, 96 | "" 97 | ) 98 | 99 | file_reader.shutdown() 100 | assert file_reader._shutdown is True 101 | 102 | fake_listdir = mock.MagicMock(return_value=['file1.csv']) 103 | with monkeypatch.context() as monkey: 104 | monkey.setattr(os, "listdir", fake_listdir) 105 | file_reader.run() 106 | assert fake_queue.put.call_count == 0 107 | 108 | 109 | def test_file_reader_noextension_check(monkeypatch): 110 | fake_eventTracker = mock.MagicMock() 111 | fake_eventTracker.setFileReaderDone = mock.MagicMock() 112 | fake_queue = mock.MagicMock() 113 | 114 | file_reader = FileReader( 115 | fake_queue, 116 | fake_eventTracker, 117 | "/tmp/fake", 118 | None, 119 | "" 120 | ) 121 | 122 | monkeypatch.setattr(os.path, "isdir", fake_idsir) 123 | monkeypatch.setattr(os.path, "isfile", fake_isfile) 124 | fake_listdir = mock.MagicMock(return_value=['file1.txt']) 125 | with monkeypatch.context() as monkey: 126 | monkey.setattr(os, "listdir", fake_listdir) 127 | file_reader.run() 128 | assert fake_queue.put.call_count == 1 129 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_ingest_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest import mock 3 | from pydat.core.elastic.ingest.ingest_handler import IngestHandler 4 | import elasticsearch 5 | 6 | 7 | @pytest.fixture 8 | def mock_handler(monkeypatch): 9 | fake_connect = mock.MagicMock() 10 | ingest_handler = IngestHandler(hosts="localhost:9200") 11 | monkeypatch.setattr(ingest_handler, "connect", fake_connect) 12 | 13 | return ingest_handler 14 | 15 | 16 | def test_template_fn(mock_handler): 17 | mock_handler.connect.return_value.indices.get_template.return_value = \ 18 | {} 19 | 20 | assert mock_handler.templateExists 21 | 22 | mock_handler.connect.return_value.indices.get_template.side_effect = \ 23 | elasticsearch.exceptions.NotFoundError 24 | 25 | assert not mock_handler.templateExists 26 | 27 | 28 | def test_metaexists_fn(mock_handler): 29 | mock_handler.connect.return_value.indices.exists.return_value = \ 30 | True 31 | 32 | assert mock_handler.metaExists 33 | 34 | mock_handler.connect.return_value.indices.exists.return_value = \ 35 | False 36 | 37 | assert not mock_handler.metaExists 38 | 39 | 40 | def test_metarecord_fn(mock_handler): 41 | mock_handler.connect.return_value.get.return_value = { 42 | 'found': True, 43 | '_source': {} 44 | } 45 | 46 | assert mock_handler.metaRecord == {} 47 | 48 | mock_handler.connect.return_value.get.return_value = { 49 | 'found': False, 50 | '_source': {} 51 | } 52 | 53 | assert mock_handler.metaRecord is None 54 | 55 | mock_handler.connect.return_value.get.side_effect = Exception() 56 | 57 | with pytest.raises(RuntimeError): 58 | mock_handler.metaRecord 59 | 60 | 61 | def test_getmetadata_fn(mock_handler): 62 | mock_handler.connect.return_value.get.return_value = { 63 | 'test': 'record', 64 | '_source': {} 65 | } 66 | assert mock_handler.getMetadata(1) == {} 67 | 68 | mock_handler.connect.return_value.get.side_effect = Exception() 69 | with pytest.raises(RuntimeError): 70 | mock_handler.getMetadata(1) 71 | 72 | 73 | def test_fetchdocuments_fn(mock_handler): 74 | mock_handler.connect.return_value.mget.return_value = { 75 | 'docs': ['test1', 'test2', 'test3'] 76 | } 77 | 78 | assert mock_handler.fetchDocuments([]) 79 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_ingest_stat_tracker.py: -------------------------------------------------------------------------------- 1 | from pydat.core.elastic.ingest.stat_tracker import StatTracker 2 | 3 | 4 | def test_stat_tracker(): 5 | stat_tracker = StatTracker() 6 | assert stat_tracker 7 | 8 | assert stat_tracker.total == 0 9 | assert stat_tracker.new == 0 10 | assert stat_tracker.updated == 0 11 | assert stat_tracker.unchanged == 0 12 | assert stat_tracker.duplicates == 0 13 | 14 | assert stat_tracker.stats == { 15 | 'total': 0, 16 | 'new': 0, 17 | 'updated': 0, 18 | 'unchanged': 0, 19 | 'duplicates': 0 20 | } 21 | 22 | stat_tracker.shutdown() 23 | assert stat_tracker._shutdown 24 | 25 | 26 | def test_stat_tracker_seed(): 27 | stat_tracker = StatTracker() 28 | 29 | stats = [ 30 | 'total', 31 | 'new', 32 | 'updated', 33 | 'unchanged', 34 | 'duplicates' 35 | ] 36 | 37 | stat_tracker.seed({ 38 | 'total': 10 39 | }) 40 | 41 | for key in stats: 42 | assert key in stat_tracker.stats 43 | 44 | 45 | def test_stat_tracker_changed(): 46 | stat_tracker = StatTracker() 47 | 48 | assert stat_tracker.changed_stats == {} 49 | 50 | stat_tracker.seedChanged({ 51 | 'registrant_name': 10 52 | }) 53 | 54 | assert 'registrant_name' in stat_tracker.changed_stats.keys() 55 | 56 | 57 | def test_stat_tracker_run(): 58 | stat_tracker = StatTracker() 59 | 60 | stat_tracker.incr('total') 61 | stat_tracker.addChanged('registrant_name') 62 | 63 | assert stat_tracker._stat_queue.qsize() == 2 64 | 65 | stat_tracker.start() 66 | stat_tracker.shutdown() 67 | 68 | 69 | def test_stat_tracker_run_failure(caplog): 70 | stat_tracker = StatTracker() 71 | stat_tracker.incr('badkey') 72 | stat_tracker.shutdown() 73 | stat_tracker.run() 74 | 75 | assert "Unknown field" in caplog.record_tuples[0][2] 76 | stat_tracker._stat_queue.close() 77 | stat_tracker._stat_queue.join_thread() 78 | 79 | 80 | def test_stat_tracker_run_failure2(): 81 | stat_tracker = StatTracker() 82 | stat_tracker.addChanged('registrant_name') 83 | stat_tracker.addChanged('registrant_name') 84 | stat_tracker.shutdown() 85 | stat_tracker.run() 86 | 87 | assert 'registrant_name' in stat_tracker.changed_stats 88 | assert stat_tracker.changed_stats['registrant_name'] == 2 89 | stat_tracker._stat_queue.close() 90 | stat_tracker._stat_queue.join_thread() 91 | 92 | 93 | def test_stat_tracker_client(): 94 | stat_tracker = StatTracker() 95 | client = stat_tracker.get_tracker() 96 | 97 | client.addChanged('total') 98 | client.incr('registrant_name') 99 | 100 | assert len(client._chunk) == 2 101 | client.flush() 102 | assert stat_tracker._stat_queue.qsize() == 1 103 | stat_tracker.shutdown() 104 | stat_tracker.run() 105 | stat_tracker._stat_queue.close() 106 | stat_tracker._stat_queue.join_thread() 107 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_elastic_populator.py: -------------------------------------------------------------------------------- 1 | from types import SimpleNamespace 2 | import pytest 3 | from unittest import mock 4 | import logging 5 | from pydat.scripts.elasticsearch_populate import ( 6 | generate_parser, 7 | process_elastic_args, 8 | process_config_file, 9 | process_additional_configuration, 10 | ) 11 | 12 | 13 | @pytest.fixture 14 | def populator_config(): 15 | return [ 16 | "--es-uri", "localhost:9200", 17 | "--es-disable-sniffing", 18 | "--rollover-size", "100000", 19 | # "--es-user", None, 20 | # "--es-password", None, 21 | # "--es-ca-cert", None, 22 | # "--es-index-prefix", "pydat", 23 | "--include", ','.join([ 24 | "registrarName", 25 | "contactEmail", 26 | "whoisServer", 27 | "nameServers", 28 | "registrant-email", 29 | "registrant-name", 30 | "registrant-organization" 31 | ]), 32 | # "--ignore-field-prefixes", ','.join([ 33 | # "zoneContact", 34 | # "billingContact", 35 | # "technicalContact" 36 | # ]), 37 | "--pipelines", "2", 38 | "--shipper-threads", "1", 39 | "--fetcher-threads", "2", 40 | "--bulk-fetch-size", "50", 41 | "--bulk-ship-size", "10", 42 | "--verbose", 43 | "--debug", 44 | "--debug-level", "1", 45 | "--stats", 46 | "--extension", "csv", 47 | # "--exclude", None 48 | ] 49 | 50 | 51 | def test_parser(populator_config): 52 | parser = generate_parser() 53 | assert(parser) 54 | assert(parser.parse_args(populator_config)) 55 | 56 | 57 | def test_parser_elastic_handling(populator_config): 58 | parser = generate_parser() 59 | args = parser.parse_args(populator_config) 60 | (args, elastic) = process_elastic_args(vars(args)) 61 | assert(elastic == { 62 | "uri": ["localhost:9200"], 63 | "disable_sniffing": True, 64 | "rollover_docs": 100000, 65 | }) 66 | 67 | 68 | def test_config_ingest(monkeypatch): 69 | mock_open = mock.mock_open(read_data="""--- 70 | # Elastic Configuration Options 71 | es: 72 | uri: 73 | - localhost:9200 74 | disable_sniffing: true 75 | rollover_docs: 500000 76 | 77 | # General ingest and processing options 78 | ignore_field_prefixes: 79 | - zoneContact 80 | - billingContact 81 | - technicalContact 82 | 83 | # Performance Tuning Options 84 | pipelines: 4 85 | shipper_threads: 2 86 | fetcher_threads: 2 87 | bulk_fetch_size: 50 88 | bulk_ship_size: 10 89 | 90 | """) 91 | 92 | with monkeypatch.context() as m: 93 | m.setattr('builtins.open', mock_open) 94 | config = process_config_file("foobarfoo") 95 | 96 | assert(config) 97 | 98 | 99 | @pytest.fixture 100 | def ingest_test_data(): 101 | return SimpleNamespace( 102 | ingest_day=None, 103 | redo=False, 104 | ask_password=False, 105 | config_template_only=False, 106 | clear_interrupted=False, 107 | ingest_file="file", 108 | ingest_directory=None 109 | ) 110 | 111 | 112 | def test_additional_config_ingest_day(ingest_test_data, caplog): 113 | process_additional_configuration( 114 | ingest_test_data, 115 | SimpleNamespace(), 116 | logging.getLogger("test") 117 | ) 118 | 119 | assert("assuming today" in caplog.text) 120 | 121 | 122 | def test_additional_config_ingest_day_parse_failure_1( 123 | ingest_test_data, 124 | caplog 125 | ): 126 | ingest_test_data.ingest_day = "WRONG" 127 | 128 | with pytest.raises(SystemExit): 129 | process_additional_configuration( 130 | ingest_test_data, 131 | SimpleNamespace(), 132 | logging.getLogger("test") 133 | ) 134 | 135 | assert("ingest_day format is" in caplog.text) 136 | 137 | 138 | def test_additional_config_ingest_day_parse_failure_2( 139 | ingest_test_data, 140 | caplog 141 | ): 142 | ingest_test_data.ingest_day = "2021-00-01" 143 | 144 | with pytest.raises(SystemExit): 145 | process_additional_configuration( 146 | ingest_test_data, 147 | SimpleNamespace(), 148 | logging.getLogger("test") 149 | ) 150 | 151 | assert("Unable to verify date" in caplog.text) 152 | 153 | 154 | def test_additional_config_ingest_day_parse_failure_3( 155 | ingest_test_data, 156 | caplog 157 | ): 158 | ingest_test_data.ingest_day = "2021-01-32" 159 | 160 | with pytest.raises(SystemExit): 161 | process_additional_configuration( 162 | ingest_test_data, 163 | SimpleNamespace(), 164 | logging.getLogger("test") 165 | ) 166 | 167 | assert("Unable to verify date" in caplog.text) 168 | 169 | 170 | def test_additional_config_inges_day_parse_1(ingest_test_data): 171 | ingest_test_data.ingest_day = "2021-01-00" 172 | 173 | process_additional_configuration( 174 | ingest_test_data, 175 | SimpleNamespace(), 176 | logging.getLogger("test") 177 | ) 178 | 179 | 180 | def test_additional_config_inges_day_parse_2(ingest_test_data): 181 | ingest_test_data.ingest_day = "2021-01-01" 182 | 183 | process_additional_configuration( 184 | ingest_test_data, 185 | SimpleNamespace(), 186 | logging.getLogger("test") 187 | ) 188 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_factory.py: -------------------------------------------------------------------------------- 1 | from flask import template_rendered 2 | from contextlib import contextmanager 3 | from unittest.mock import MagicMock 4 | import pytest 5 | 6 | 7 | def test_config(monkeypatch, fake_create_app): 8 | # check that if a config is passed, default is overridden 9 | assert not fake_create_app().testing 10 | assert fake_create_app({"TESTING": True}).testing 11 | 12 | 13 | def test_error(client): 14 | # check that non-routed api namespace lead to 404 15 | response = client.get("/api/v2/session/illegal") 16 | assert response.status_code == 404 17 | 18 | response = client.get("/api/v2/illegal") 19 | assert response.status_code == 404 20 | 21 | 22 | @contextmanager 23 | def captured_templates(app): 24 | recorded = [] 25 | 26 | def record(sender, template, context, **extra): 27 | recorded.append((template, context)) 28 | template_rendered.connect(record, app) 29 | try: 30 | yield recorded 31 | finally: 32 | template_rendered.disconnect(record, app) 33 | 34 | 35 | def test_index(fake_create_app): 36 | app = fake_create_app({"TESTING": True, }) 37 | # with captured_templates(app) as templates: 38 | response = app.test_client().get("/") 39 | assert response.status_code == 200 40 | # assert len(templates) == 1 41 | # template, context = templates[0] 42 | # assert template.name == 'index.html' 43 | 44 | 45 | def test_debug(fake_create_app): 46 | fake_create_app({"DEBUG": True}) 47 | 48 | 49 | def test_plugin_failure(monkeypatch, fake_create_app): 50 | with monkeypatch.context() as m: 51 | mockPluginManager = MagicMock(side_effect=ValueError) 52 | m.setattr( 53 | 'pydat.core.plugins.PluginManager.gather_plugins', 54 | mockPluginManager) 55 | with pytest.raises(SystemExit): 56 | fake_create_app() 57 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_passive_plugin.py: -------------------------------------------------------------------------------- 1 | from pydat.core import plugins 2 | from pydat.core.plugins import ( 3 | PluginManager, 4 | PassivePluginBase, 5 | register_passive_plugin, 6 | ) 7 | import pytest 8 | from flask import Blueprint, Flask 9 | 10 | 11 | # def test_no_config_registration(config_app, sample_passive_plugin): 12 | # # Reset the PLUGINS Set 13 | # plugins.PLUGINS = set() 14 | 15 | # register_passive_plugin(sample_passive_plugin) 16 | 17 | # app = create_app({"TESTING": True, }) 18 | # with app.app_context(): 19 | # with pytest.raises(ValueError): 20 | # assert start_plugin() 21 | 22 | # test_plugin = create_passive_plugin("fake_plugin") 23 | # with config_app.app_context(): 24 | # with pytest.raises(ValueError): 25 | # start_plugin() 26 | 27 | 28 | def test_registration(sample_passive_plugin): 29 | # Reset the PLUGINS Set 30 | plugins.PLUGINS = set() 31 | 32 | register_passive_plugin(sample_passive_plugin) 33 | 34 | assert sample_passive_plugin in plugins.PLUGINS 35 | 36 | app = Flask(__name__) 37 | app.config['PDNSSOURCES'] = {'passive_plugin': {}} 38 | app.config['PLUGINS'] = dict() 39 | 40 | with app.app_context(): 41 | plugin_manager = PluginManager() 42 | plugin_manager.gather_plugins() 43 | loaded = plugin_manager.plugins 44 | assert len(loaded) == 1 45 | assert isinstance(loaded[0], sample_passive_plugin) 46 | 47 | 48 | def test_registration_bp(sample_passive_plugin, fake_create_app): 49 | # Reset the PLUGINS Set 50 | plugins.PLUGINS = set() 51 | 52 | # check bp properly registered 53 | register_passive_plugin(sample_passive_plugin) 54 | plugin_name = 'passive_plugin' 55 | 56 | app = fake_create_app( 57 | {"TESTING": True, "PDNSSOURCES": {"passive_plugin": {}}} 58 | ) 59 | routes = [str(p) for p in app.url_map.iter_rules()] 60 | assert f'/api/plugin/passive/{plugin_name}/hello' in routes 61 | 62 | client = app.test_client() 63 | response = client.get(f'/api/plugin/passive/{plugin_name}/hello') 64 | assert response.status_code == 200 65 | response = client.post(f'/api/plugin/passive/{plugin_name}/forward') 66 | assert response.status_code == 200 67 | response = client.post(f'/api/plugin/passive/{plugin_name}/reverse') 68 | assert response.status_code == 200 69 | 70 | 71 | def test_invalid_plugin(config_app): 72 | # Reset the PLUGINS Set 73 | plugins.PLUGINS = set() 74 | 75 | bp = Blueprint("fake_plugin", __name__) 76 | 77 | # Doesn't have forward 78 | class MissingForward(PassivePluginBase): 79 | def reverse(self): 80 | return "Reverse success!" 81 | 82 | def setConfig(self, **kwargs): 83 | self.config = kwargs 84 | 85 | # Doesn't have reverse 86 | class MissingReverse(PassivePluginBase): 87 | def forward(self): 88 | return "Forward success!" 89 | 90 | def setConfig(self, **kwargs): 91 | self.config = kwargs 92 | 93 | # Doesn't have setConfig 94 | class MissingConfig(PassivePluginBase): 95 | def reverse(self): 96 | return "Reverse success!" 97 | 98 | def forward(self): 99 | return "Forward success!" 100 | 101 | test_plugin = MissingForward("MissingForward", bp) 102 | 103 | with pytest.raises(NotImplementedError): 104 | test_plugin.forward() 105 | 106 | test_plugin = MissingReverse("MissingReverse", bp) 107 | with pytest.raises(NotImplementedError): 108 | test_plugin.reverse() 109 | 110 | test_plugin = MissingConfig("MissingConfig", bp) 111 | with pytest.raises(NotImplementedError): 112 | test_plugin.setConfig() 113 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_plugin.py: -------------------------------------------------------------------------------- 1 | from pydat.core import plugins 2 | from pydat.core.plugins import PluginManager 3 | from flask import Flask, Blueprint 4 | from pydat.core.plugins import PluginBase, register_plugin 5 | import pytest 6 | # from pydat.core import preferences 7 | 8 | 9 | def test_registration(sample_plugin): 10 | # Reset the PLUGINS Set 11 | plugins.PLUGINS = set() 12 | 13 | register_plugin(sample_plugin) 14 | assert sample_plugin in plugins.PLUGINS 15 | 16 | app = Flask(__name__) 17 | app.config['PDNSSOURCES'] = dict() 18 | app.config['PLUGINS'] = {'test_plugin': {}} 19 | 20 | with app.app_context(): 21 | plugin_manager = PluginManager() 22 | plugin_manager.gather_plugins() 23 | loaded = plugin_manager.plugins 24 | assert len(loaded) == 1 25 | assert isinstance(loaded[0], sample_plugin) 26 | 27 | 28 | def test_registration_bp(sample_plugin, fake_create_app): 29 | # Reset the PLUGINS Set 30 | plugins.PLUGINS = set() 31 | 32 | # check bp properly registered 33 | register_plugin(sample_plugin) 34 | 35 | app = fake_create_app( 36 | {"TESTING": True, "PLUGINS": {"test_plugin": {}}} 37 | ) 38 | routes = [str(p) for p in app.url_map.iter_rules()] 39 | assert '/api/plugin/test_plugin/hello' in routes 40 | 41 | client = app.test_client() 42 | response = client.get('/api/plugin/test_plugin/hello') 43 | assert response.status_code == 200 44 | 45 | # with client: 46 | # response = client.get('/api/v2/session/test_plugin') 47 | # assert response.is_json 48 | # json_data = response.get_json() 49 | # assert test_pref.keys() == json_data.keys() 50 | # assert "test_plugin" in session.keys() 51 | # assert test_pref.keys() == session["test_plugin"].keys() 52 | 53 | 54 | # test invalid plugins 55 | def test_invalid_plugin(): 56 | # Reset the PLUGINS Set 57 | plugins.PLUGINS = set() 58 | 59 | # Not child of PluginBase 60 | class FakePlugin(): 61 | def set_name(self): 62 | return "fake" 63 | 64 | with pytest.raises(TypeError): 65 | register_plugin(FakePlugin) 66 | 67 | 68 | def test_invalid_plugin_bp(): 69 | # Reset the PLUGINS Set 70 | plugins.PLUGINS = set() 71 | 72 | # does not return proper blueprint 73 | class BadPlugin(PluginBase): 74 | def __init__(self): 75 | super().__init__('bad_plugin', Blueprint('test', 'test')) 76 | 77 | @property 78 | def blueprint(self): 79 | return ["fake"] 80 | 81 | @blueprint.setter 82 | def blueprint(self, newbp): 83 | self._blueprint = newbp 84 | 85 | def setConfig(self, **kwargs): 86 | pass 87 | 88 | register_plugin(BadPlugin) 89 | 90 | app = Flask(__name__) 91 | app.config['PDNSSOURCES'] = dict() 92 | app.config['PLUGINS'] = {'bad_plugin': {}} 93 | 94 | with app.app_context(): 95 | plugin_manager = PluginManager() 96 | with pytest.raises(ValueError): 97 | plugin_manager.gather_plugins() 98 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_settings.py: -------------------------------------------------------------------------------- 1 | from pydat.core import plugins 2 | from pydat.core.plugins import register_plugin 3 | 4 | 5 | def test_settings(client): 6 | response = client.get("/api/v2/settings") 7 | assert response.status_code == 200 8 | data = response.json 9 | assert 'enable_plugin_test_plugin' not in data.keys() 10 | 11 | 12 | def test_plugin_settings(sample_plugin, fake_create_app): 13 | # Reset the PLUGINS Set 14 | plugins.PLUGINS = set() 15 | 16 | register_plugin(sample_plugin) 17 | assert sample_plugin in plugins.PLUGINS 18 | 19 | app = fake_create_app({ 20 | 'PLUGINS': {'test_plugin': {}} 21 | }) 22 | 23 | client = app.test_client() 24 | response = client.get("/api/v2/settings") 25 | assert response.status_code == 200 26 | data = response.json 27 | assert data['enable_plugin_test_plugin'] 28 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_whois_v1.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import MagicMock 3 | from pydat.core.elastic.exceptions import ( 4 | ESConnectionError, 5 | ESQueryError, 6 | ESNotFoundError 7 | ) 8 | 9 | 10 | @pytest.mark.parametrize("low", ("low", -1, 3, 100, 1, -21)) 11 | @pytest.mark.parametrize("high", ("high", -1, 4, 2, 200)) 12 | def test_domains(monkeypatch, config_app, low, high, es_handler): 13 | client = config_app.test_client() 14 | # search is always valid 15 | mock_search = MagicMock(return_value={"data": [{ 16 | "test": "output", 17 | "dataVersion": 99 18 | }]}) 19 | monkeypatch.setattr(es_handler, 'search', mock_search) 20 | 21 | # test checking valid search keys 22 | for key in config_app.config['SEARCHKEYS']: 23 | response = client.get(f"/api/v1/domains/{key}/fake") 24 | assert response.status_code == 200 25 | assert client.get("/api/v1/domains/fake_key/fake").status_code == 400 26 | assert client.get("/api/v1/domains/fake_key").status_code == 404 27 | 28 | # test valid low/high parameters 29 | response = client.get(f"/api/v1/domains/domainName/fake/{low}") 30 | if (isinstance(low, float) or isinstance(low, int)) and low > 0: 31 | assert response.status_code == 200 32 | else: 33 | assert response.status_code == 400 34 | response = client.get(f"/api/v1/domains/domainName/fake/{low}/{high}") 35 | if (isinstance(low, float) or isinstance(low, int)) \ 36 | and (isinstance(high, float) or isinstance(high, int)) \ 37 | and low < high and low > 0: 38 | assert response.status_code == 200 39 | else: 40 | assert response.status_code == 400 41 | 42 | 43 | def test_latest(monkeypatch, config_app, es_handler): 44 | # search and lastVersion are always valid 45 | mock_search = MagicMock(return_value={"data": [{ 46 | "test": "output", 47 | "dataVersion": 99 48 | }]}) 49 | mock_last = MagicMock(return_value=1) 50 | monkeypatch.setattr(es_handler, 'search', mock_search) 51 | monkeypatch.setattr(es_handler, 'last_version', mock_last) 52 | 53 | client = config_app.test_client() 54 | for key in config_app.config['SEARCHKEYS']: 55 | response = client.get(f"/api/v1/domains/{key}/fake/latest") 56 | assert response.status_code == 200 57 | response = client.get("/api/v1/domain/fake/latest") 58 | assert response.status_code == 200 59 | 60 | # error: unable to find last version 61 | mock_last.side_effect = ESQueryError 62 | response = client.get("/api/v1/domain/fake/latest") 63 | assert response.status_code == 500 64 | response = client.get(f"/api/v1/domains/{key}/fake/latest") 65 | assert response.status_code == 500 66 | 67 | 68 | def test_domain_diff(monkeypatch, client, es_handler): 69 | # type checks independent of search 70 | assert client.get('/api/v1/domain/test/diff/false/true').status_code == 400 71 | assert client.get('/api/v1/domain/test/diff/1/true').status_code == 400 72 | 73 | # error: no data for domainName/version 74 | mock_diff = MagicMock(return_value={'data': []}) 75 | monkeypatch.setattr(es_handler, 'search', mock_diff) 76 | response = client.get('/api/v1/domain/greetings/diff/3/4') 77 | assert response.status_code == 404 78 | assert 'version' in response.get_json()['error'] 79 | 80 | # test diff functunality 81 | v1_res = {"data": [{"hey": True, "hi": 1, "bye": -1.1, "Version": 1}]} 82 | v2_res = {"data": [{"hey": 1, "hi": 1, "bye": False, "si": "yes"}]} 83 | mock_diff.side_effect = [v1_res, v2_res, v2_res, v1_res] 84 | response = client.get('/api/v1/domain/greetings/diff/1/2') 85 | # ensure call search(low=1) and search(low=2) 86 | assert mock_diff.call_count == 4 87 | assert response.status_code == 200 88 | v1_data = response.get_json()['data'] 89 | 90 | response = client.get('/api/v1/domain/greetings/diff/2/1') 91 | assert response.status_code == 200 92 | v2_data = response.get_json()['data'] 93 | assert v1_data != v2_data 94 | assert v1_data.keys() == v2_data.keys() 95 | assert v1_data == { 96 | "hey": True, 97 | "hi": 1, 98 | "bye": [-1.1, False], 99 | "si": ["", "yes"] 100 | } 101 | assert v2_data['si'] == ["yes", ""] 102 | 103 | 104 | @pytest.mark.parametrize("version", ("version", -1, 1, 2)) 105 | def test_metadata(monkeypatch, client, version, es_handler): 106 | # metadata is always valid 107 | mock_meta = MagicMock(return_value=[{"test": "record"}]) 108 | monkeypatch.setattr(es_handler, 'metadata', mock_meta) 109 | 110 | # type checking 111 | response = client.get(f'/api/v1/metadata/{version}') 112 | if isinstance(version, int) and version > 0: 113 | assert response.status_code == 200 114 | else: 115 | assert response.status_code == 400 116 | 117 | 118 | def test_metadata_notfound(monkeypatch, client, es_handler): 119 | # error: version doesn't exist 120 | mock_meta = MagicMock(side_effect=ESNotFoundError) 121 | monkeypatch.setattr(es_handler, 'metadata', mock_meta) 122 | 123 | assert client.get('/api/v1/metadata/1').status_code == 404 124 | 125 | 126 | def test_query(monkeypatch, client, es_handler): 127 | # must have query 128 | response = client.get("/api/v1/query") 129 | assert response.status_code == 400 130 | 131 | # type checking 132 | response = client.get("/api/v1/query", 133 | query_string={"query": "query", "size": 20.1}) 134 | assert response.status_code == 400 135 | response = client.get("/api/v1/query", 136 | query_string={"query": "query", "page": 1.1}) 137 | assert response.status_code == 400 138 | response = client.get("/api/v1/query", 139 | query_string={"query": "query", "size": -20}) 140 | assert response.status_code == 400 141 | response = client.get("/api/v1/query", 142 | query_string={"query": "query", "page": -1}) 143 | assert response.status_code == 400 144 | 145 | # test page specification 146 | mock_query = MagicMock(return_value={'total': 1000, 'data': []}) 147 | monkeypatch.setattr(es_handler, 'advanced_search', mock_query) 148 | response = client.get("/api/v1/query", 149 | query_string={"query": "query", "size": 1000}) 150 | assert response.status_code == 200 151 | response = client.get("/api/v1/query", 152 | query_string={"query": "query", "page": 501}) 153 | assert response.status_code == 400 154 | assert '501' in response.get_json()['error'] 155 | 156 | 157 | def test_connection_error(monkeypatch, config_app, es_handler): 158 | mock_connection = MagicMock(side_effect=ESConnectionError) 159 | client = config_app.test_client() 160 | 161 | # search connection error 162 | monkeypatch.setattr(es_handler, 'search', mock_connection) 163 | with pytest.raises(ESConnectionError): 164 | assert es_handler.search() 165 | # Domains 166 | response = client.get('/api/v1/domains/domainName/value') 167 | assert response.status_code == 500 168 | response = client.get('/api/v1/domains/domainName/value/1/2') 169 | assert response.status_code == 500 170 | response = client.get('/api/v1/domains/domainName/value/latest') 171 | assert response.status_code == 500 172 | # Domain 173 | response = client.get('/api/v1/domain/value') 174 | assert response.status_code == 500 175 | response = client.get('/api/v1/domain/value/diff/1/2') 176 | assert response.status_code == 500 177 | 178 | monkeypatch.setattr(es_handler, 'metadata', mock_connection) 179 | with pytest.raises(ESConnectionError): 180 | assert es_handler.metadata() 181 | # Metadata 182 | response = client.get('/api/v1/metadata') 183 | assert response.status_code == 500 184 | response = client.get('/api/v1/metadata/1') 185 | assert response.status_code == 500 186 | 187 | monkeypatch.setattr(es_handler, 'advanced_search', mock_connection) 188 | with pytest.raises(ESConnectionError): 189 | assert es_handler.advanced_search() 190 | # Query 191 | response = client.get('/api/v1/query', query_string={"query": "query"}) 192 | assert response.status_code == 500 193 | -------------------------------------------------------------------------------- /pydat/backend/tests/test_whois_v2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import MagicMock 3 | from pydat.core.elastic.exceptions import ( 4 | ESConnectionError, 5 | ESQueryError, 6 | ESNotFoundError, 7 | ) 8 | import socket 9 | 10 | 11 | # identical code to testing metadata v1 (shared) 12 | @pytest.mark.parametrize("version", ("version", -1, 1)) 13 | def test_metadata(monkeypatch, client, version, es_handler): 14 | # metadata is always valid 15 | mock_meta = MagicMock(return_value={"data": "success"}) 16 | monkeypatch.setattr(es_handler, "metadata", mock_meta) 17 | 18 | # type checking 19 | response = client.get(f"/api/v2/metadata/{version}") 20 | if ( 21 | isinstance(version, int) or isinstance(version, float) 22 | ) and version > 0: 23 | assert response.status_code == 200 24 | else: 25 | assert response.status_code == 400 26 | 27 | 28 | def test_metadata_errors(monkeypatch, client, es_handler): 29 | mock_meta = MagicMock(side_effect=ESNotFoundError) 30 | monkeypatch.setattr(es_handler, "metadata", mock_meta) 31 | # error: version doesn't exist 32 | assert client.get("/api/v2/metadata/1").status_code == 404 33 | 34 | # error: version doesn't exist 35 | mock_meta.side_effect = ESQueryError 36 | assert client.get("/api/v2/metadata/1").status_code == 500 37 | 38 | 39 | def test_resolve(monkeypatch, client, es_handler): 40 | response = client.get("/api/v2/resolve") 41 | assert response.status_code == 404 42 | 43 | mock_socket = MagicMock(return_value=("valid", ["v2", "v2"], ["127"])) 44 | monkeypatch.setattr(socket, "gethostbyname_ex", mock_socket) 45 | response = client.get("/api/v2/resolve/test") 46 | assert response.status_code == 200 47 | assert mock_socket.called 48 | 49 | mock_socket.side_effect = socket.gaierror 50 | response = client.get("/api/v2/resolve/test") 51 | assert response.status_code == 400 52 | 53 | mock_socket.side_effect = socket.timeout 54 | response = client.get("/api/v2/resolve/test") 55 | assert response.status_code == 504 56 | 57 | 58 | def test_domains_diff(monkeypatch, config_app, es_handler): 59 | client = config_app.test_client() 60 | 61 | mock_diff = MagicMock(return_value={"data": [{"success": 1}]}) 62 | monkeypatch.setattr(es_handler, "search", mock_diff) 63 | # required parameters 64 | response = client.post("/api/v2/domains/diff", json={}) 65 | assert response.status_code == 400 66 | response = client.post("/api/v2/domains/diff", json={"domain": "value"}) 67 | assert response.status_code == 400 68 | response = client.post( 69 | "/api/v2/domains/diff", json={"domain": "value", "version1": 0} 70 | ) 71 | assert response.status_code == 400 72 | response = client.post( 73 | "/api/v2/domains/diff", 74 | json={"domain": "value", "version1": 0, "version2": 1}, 75 | ) 76 | assert response.status_code == 200 77 | 78 | # diff functionality is equivalent to v1 79 | 80 | 81 | def test_query(monkeypatch, config_app, es_handler): 82 | client = config_app.test_client() 83 | 84 | mock_adv = MagicMock(return_value={"total": 100, "data": []}) 85 | monkeypatch.setattr(es_handler, "advanced_search", mock_adv) 86 | 87 | response = client.post("/api/v2/query") 88 | assert response.status_code == 400 89 | response = client.post("/api/v2/query", json={"query": "query"}) 90 | assert response.status_code == 200 91 | 92 | # test valid sort keys 93 | # Invalid syntax 94 | response = client.post( 95 | "/api/v2/query", 96 | json={"query": "query", "sort_keys": {"domainName": "swirl"}}, 97 | ) 98 | assert response.status_code == 400 99 | # Missing 'dir' 100 | response = client.post( 101 | "/api/v2/query", 102 | json={"query": "query", "sort_keys": [{"name": "domainName"}]}, 103 | ) 104 | assert response.status_code == 400 105 | # Invalid key name 106 | response = client.post( 107 | "/api/v2/query", 108 | json={"query": "query", 109 | "sort_keys": [{"name": "fake_key", "dir": "desc"}]}, 110 | ) 111 | assert response.status_code == 400 112 | # Invalid direction 113 | response = client.post( 114 | "/api/v2/query", 115 | json={"query": "query", 116 | "sort_keys": [{"name": "domainName", "dir": "down"}]}, 117 | ) 118 | assert response.status_code == 400 119 | # Valid sort key 120 | response = client.post( 121 | "/api/v2/query", 122 | json={"query": "query", 123 | "sort_keys": [{"name": "domainName", "dir": "asc"}]}, 124 | ) 125 | assert response.status_code == 200 126 | # Multiple keys 127 | response = client.post( 128 | "/api/v2/query", 129 | json={"query": "query", 130 | "sort_keys": [ 131 | {"name": "domainName", "dir": "asc"}, 132 | {"name": "registrant_name", "dir": "asc"} 133 | ]}, 134 | ) 135 | assert response.status_code == 200 136 | response = client.post( 137 | "/api/v2/query", 138 | json={ 139 | "query": "query", 140 | "chunk_size": mock_adv.return_value["total"] / 5, 141 | "offset": 6, 142 | }, 143 | ) 144 | assert response.status_code == 400 145 | response = client.post( 146 | "/api/v2/query", 147 | json={ 148 | "query": "query", 149 | "chunk_size": mock_adv.return_value["total"] / 5.0, 150 | "offset": 4, 151 | }, 152 | ) 153 | assert response.status_code == 400 154 | 155 | mock_adv.side_effect = ESQueryError 156 | response = client.post("/api/v2/query", json={"query": "query"}) 157 | assert response.status_code == 500 158 | 159 | 160 | def test_connection_error(monkeypatch, config_app, es_handler): 161 | mock_connection = MagicMock(side_effect=ESConnectionError) 162 | client = config_app.test_client() 163 | 164 | # search connection error 165 | monkeypatch.setattr(es_handler, "search", mock_connection) 166 | with pytest.raises(ESConnectionError): 167 | assert es_handler.search() 168 | response = client.post( 169 | "/api/v2/domains/diff", 170 | json={"domain": "value", "version1": 0, "version2": 1}, 171 | ) 172 | assert response.status_code == 500 173 | 174 | monkeypatch.setattr(es_handler, "metadata", mock_connection) 175 | with pytest.raises(ESConnectionError): 176 | assert es_handler.metadata() 177 | # Metadata 178 | response = client.get("/api/v2/metadata") 179 | assert response.status_code == 500 180 | response = client.get("/api/v2/metadata/1") 181 | assert response.status_code == 500 182 | 183 | monkeypatch.setattr(es_handler, "advanced_search", mock_connection) 184 | with pytest.raises(ESConnectionError): 185 | assert es_handler.advanced_search() 186 | # Query 187 | response = client.post("/api/v2/query", json={"query": "query"}) 188 | assert response.status_code == 500 189 | -------------------------------------------------------------------------------- /pydat/backend/tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3.6, 3.7, 3.8, 3.9, coverage, lint 3 | 4 | [testenv] 5 | deps = 6 | setuptools >= 40.0.0 7 | pytest 8 | blinker 9 | 10 | commands = 11 | pytest -v tests/ 12 | 13 | [testenv:coverage] 14 | deps = 15 | setuptools >= 40.0.0 16 | coverage 17 | pytest 18 | blinker 19 | 20 | commands = 21 | coverage run -m pytest 22 | coverage report -m --skip-empty 23 | 24 | [testenv:lint] 25 | deps = flake8 26 | commands = 27 | flake8 tests pydat --exclude pydat/core/parsetab.py 28 | -------------------------------------------------------------------------------- /pydat/entry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | . /opt/pydat/pydat-env/bin/activate 4 | 5 | export PYDAT_STATICFOLDER="/opt/pydat/ui" 6 | export PYDATCONFIG="/opt/pydat/config.py" 7 | 8 | if [[ -z "${WORKERS}" ]]; then 9 | WORKERS=4 10 | fi 11 | 12 | if [[ -z "${HOST}" ]]; then 13 | HOST="0.0.0.0:8888" 14 | fi 15 | 16 | gunicorn -w ${WORKERS} -b ${HOST} "pydat.api:create_app()" -------------------------------------------------------------------------------- /pydat/frontend/.gitignore: -------------------------------------------------------------------------------- 1 | ## https://github.com/github/gitignore/blob/master/Node.gitignore 2 | 3 | # Logs 4 | logs 5 | *.log 6 | npm-debug.log* 7 | yarn-debug.log* 8 | yarn-error.log* 9 | lerna-debug.log* 10 | 11 | # Diagnostic reports (https://nodejs.org/api/report.html) 12 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 13 | 14 | # Runtime data 15 | pids 16 | *.pid 17 | *.seed 18 | *.pid.lock 19 | 20 | # Directory for instrumented libs generated by jscoverage/JSCover 21 | lib-cov 22 | 23 | # Coverage directory used by tools like istanbul 24 | coverage 25 | *.lcov 26 | 27 | # nyc test coverage 28 | .nyc_output 29 | 30 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 31 | .grunt 32 | 33 | # Bower dependency directory (https://bower.io/) 34 | bower_components 35 | 36 | # node-waf configuration 37 | .lock-wscript 38 | 39 | # Compiled binary addons (https://nodejs.org/api/addons.html) 40 | build/Release 41 | 42 | # Dependency directories 43 | node_modules/ 44 | jspm_packages/ 45 | 46 | # Snowpack dependency directory (https://snowpack.dev/) 47 | web_modules/ 48 | 49 | # TypeScript cache 50 | *.tsbuildinfo 51 | 52 | # Optional npm cache directory 53 | .npm 54 | 55 | # Optional eslint cache 56 | .eslintcache 57 | 58 | # Microbundle cache 59 | .rpt2_cache/ 60 | .rts2_cache_cjs/ 61 | .rts2_cache_es/ 62 | .rts2_cache_umd/ 63 | 64 | # Optional REPL history 65 | .node_repl_history 66 | 67 | # Output of 'npm pack' 68 | *.tgz 69 | 70 | # Yarn Integrity file 71 | .yarn-integrity 72 | 73 | # dotenv environment variables file 74 | .env 75 | .env.test 76 | 77 | # parcel-bundler cache (https://parceljs.org/) 78 | .cache 79 | .parcel-cache 80 | 81 | # Next.js build output 82 | .next 83 | out 84 | 85 | # Nuxt.js build / generate output 86 | .nuxt 87 | dist 88 | 89 | # Gatsby files 90 | .cache/ 91 | # Comment in the public line in if your project uses Gatsby and not Next.js 92 | # https://nextjs.org/blog/next-9-1#public-directory-support 93 | # public 94 | 95 | # vuepress build output 96 | .vuepress/dist 97 | 98 | # Serverless directories 99 | .serverless/ 100 | 101 | # FuseBox cache 102 | .fusebox/ 103 | 104 | # DynamoDB Local files 105 | .dynamodb/ 106 | 107 | # TernJS port file 108 | .tern-port 109 | 110 | # Stores VSCode versions used for testing VSCode extensions 111 | .vscode-test 112 | 113 | # yarn v2 114 | .yarn/cache 115 | .yarn/unplugged 116 | .yarn/build-state.yml 117 | .yarn/install-state.gz 118 | .pnp.* -------------------------------------------------------------------------------- /pydat/frontend/README.md: -------------------------------------------------------------------------------- 1 | This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). 2 | 3 | ## Available Scripts 4 | 5 | In the project directory, you can run: 6 | 7 | ### `npm start` 8 | 9 | Runs the app in the development mode.
10 | Open [http://localhost:3000](http://localhost:3000) to view it in the browser. 11 | 12 | The page will reload if you make edits.
13 | You will also see any lint errors in the console. 14 | 15 | ### `npm test` 16 | 17 | Launches the test runner in the interactive watch mode.
18 | See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. 19 | 20 | ### `npm run build` 21 | 22 | Builds the app for production to the `build` folder.
23 | It correctly bundles React in production mode and optimizes the build for the best performance. 24 | 25 | The build is minified and the filenames include the hashes.
26 | Your app is ready to be deployed! 27 | 28 | See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. 29 | 30 | ### `npm run eject` 31 | 32 | **Note: this is a one-way operation. Once you `eject`, you can’t go back!** 33 | 34 | If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. 35 | 36 | Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. 37 | 38 | You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. 39 | 40 | ## Learn More 41 | 42 | You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). 43 | 44 | To learn React, check out the [React documentation](https://reactjs.org/). 45 | 46 | ### Code Splitting 47 | 48 | This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting 49 | 50 | ### Analyzing the Bundle Size 51 | 52 | This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size 53 | 54 | ### Making a Progressive Web App 55 | 56 | This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app 57 | 58 | ### Advanced Configuration 59 | 60 | This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration 61 | 62 | ### Deployment 63 | 64 | This section has moved here: https://facebook.github.io/create-react-app/docs/deployment 65 | 66 | ### `npm run build` fails to minify 67 | 68 | This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify 69 | -------------------------------------------------------------------------------- /pydat/frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pydat-ui", 3 | "version": "0.1.0", 4 | "private": true, 5 | "proxy": "http://localhost:5000", 6 | "dependencies": { 7 | "@material-ui/core": "^4.12.3", 8 | "@material-ui/icons": "^4.11.2", 9 | "@testing-library/jest-dom": "^4.2.4", 10 | "@testing-library/react": "^9.5.0", 11 | "@testing-library/user-event": "^7.2.1", 12 | "clsx": "^1.1.1", 13 | "fontsource-roboto": "^3.0.3", 14 | "immutability-helper": "^3.1.1", 15 | "json2csv": "^5.0.3", 16 | "notistack": "^1.0.5", 17 | "qs": "^6.9.4", 18 | "react": "^16.13.1", 19 | "react-dom": "^16.13.1", 20 | "react-hook-form": "^6.7.0", 21 | "react-markdown": "^5.0.3", 22 | "react-router-dom": "^5.2.0", 23 | "react-scripts": "4.0.3", 24 | "react-table": "^7.6.3", 25 | "styled-components": "^5.2.0" 26 | }, 27 | "scripts": { 28 | "start": "react-scripts start", 29 | "build": "npm run build:isolated && mv -f build/static ../backend/pydat/api/static", 30 | "build:isolated": "react-scripts build", 31 | "test": "react-scripts test", 32 | "eject": "react-scripts eject" 33 | }, 34 | "eslintConfig": { 35 | "extends": "react-app" 36 | }, 37 | "browserslist": { 38 | "production": [ 39 | ">0.2%", 40 | "not dead", 41 | "not op_mini all" 42 | ], 43 | "development": [ 44 | "last 1 chrome version", 45 | "last 1 firefox version", 46 | "last 1 safari version" 47 | ] 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /pydat/frontend/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/frontend/public/favicon.ico -------------------------------------------------------------------------------- /pydat/frontend/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | MITRE Pydat 28 | 29 | 30 | 31 |
32 | 42 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /pydat/frontend/public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/frontend/public/logo192.png -------------------------------------------------------------------------------- /pydat/frontend/public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MITRECND/WhoDat/9c2a3265c6437f2f82f23abc1deaaaa86b947d38/pydat/frontend/public/logo512.png -------------------------------------------------------------------------------- /pydat/frontend/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /pydat/frontend/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /pydat/frontend/src/active_resolution.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState} from 'react' 2 | 3 | import Button from '@material-ui/core/Button' 4 | import Checkbox from '@material-ui/core/Checkbox' 5 | import FormControlLabel from '@material-ui/core/FormControlLabel'; 6 | import DialogActions from '@material-ui/core/DialogActions'; 7 | import DialogContent from '@material-ui/core/DialogContent'; 8 | import DialogContentText from '@material-ui/core/DialogContentText'; 9 | import List from '@material-ui/core/List' 10 | import ListSubheader from '@material-ui/core/ListSubheader' 11 | import ListItem from '@material-ui/core/ListItem' 12 | import ListItemText from '@material-ui/core/ListItemText' 13 | // import EqualizerIcon from '@material-ui/icons/Equalizer'; 14 | 15 | import { useSnackbar } from 'notistack' 16 | 17 | import { 18 | useUserPreferences, 19 | userPreferencesManager, 20 | UserPreferenceNamespace, 21 | UserPreference, 22 | } from './components/helpers/preferences' 23 | import {RegularDialog} from './components/layout/dialogs' 24 | import {MenuElement} from './components/layout' 25 | import {PluginManagers} from './components/plugins' 26 | import {activeResolutionFetcher} from './components/helpers/fetchers' 27 | import {appSettings} from './settings' 28 | 29 | 30 | 31 | const ActiveResolutionConfirmation = ({ 32 | data, 33 | setConfirm, 34 | onClose, 35 | }) => { 36 | const preferences = useUserPreferences('general') 37 | const [repromptOption, setRepromptOption] = useState(false) 38 | 39 | const handleConfirm = () => { 40 | if (repromptOption) { 41 | preferences.setPref('ar_confirm', true) 42 | } 43 | setConfirm(true) 44 | } 45 | 46 | const handleDeny = () => { 47 | onClose() 48 | } 49 | 50 | const handleRepromptChange = () => { 51 | setRepromptOption(true) 52 | } 53 | 54 | return ( 55 | 56 | 57 | 58 | Warning: Making Active DNS Resolutions has the potential to leak information to outside parties without your knowledge. Are you sure would you like to actively resolve "{data}"? 59 | 60 | } 62 | label="Never Prompt Again" 63 | /> 64 | 65 | 66 | 69 | 72 | 73 | 74 | ) 75 | } 76 | 77 | const ActiveResolutionDialog = ({open, onClose, data}) => { 78 | const preferences = useUserPreferences('general') 79 | const [confirm, setConfirm] = useState(preferences.getPref('ar_confirm')? false : true) 80 | const [fetching, setFetching] = useState(false) 81 | const [domainData, setDomainData] = useState(null) 82 | const {enqueueSnackbar} = useSnackbar() 83 | 84 | const fetchData = () => { 85 | const asyncfetch = async () => { 86 | try { 87 | let results = await activeResolutionFetcher({domainName: data}) 88 | setDomainData(results) 89 | } catch (err) { 90 | enqueueSnackbar("Unable to contact API to resolve domain name", {variant: "error"}) 91 | } 92 | } 93 | asyncfetch() 94 | } 95 | 96 | let body 97 | if (!confirm) { 98 | body = 104 | } else { 105 | if (!fetching){ 106 | setFetching(true) 107 | fetchData() 108 | } 109 | 110 | if (domainData === null) { 111 | body = Querying ... 112 | } else { 113 | body = ( 114 | 115 | Hostnames}> 116 | {domainData.hostnames.map((domainName, index) => ( 117 | 118 | 119 | {domainName} 120 | 121 | 122 | ))} 123 | 124 | IPs}> 125 | {domainData.ips.map((ip, index) => ( 126 | 127 | 128 | {ip} 129 | 130 | 131 | ))} 132 | 133 | 134 | 135 | ) 136 | } 137 | } 138 | 139 | return ( 140 | 145 | {open && 146 | 147 | {body} 148 | 149 | } 150 | 151 | ) 152 | } 153 | 154 | const ARENABLED = appSettings.hasOwnProperty("enable_active_resolution") && appSettings.enable_active_resolution ? true : false 155 | 156 | const generalPreferencesNamespace = new UserPreferenceNamespace({ 157 | name: "general", 158 | title: "General PyDat Preferences", 159 | description: "General Preferences across the PyDat Search" 160 | }) 161 | 162 | const ActiveResolutionMenu = new MenuElement({ 163 | text: "Actively Resolve", 164 | RenderComponent: 165 | }) 166 | 167 | if (ARENABLED) { 168 | userPreferencesManager.registerNamespace(generalPreferencesNamespace) 169 | userPreferencesManager.registerPrefs( 170 | generalPreferencesNamespace, [ 171 | new UserPreference({ 172 | name: "ar_confirm", 173 | type: "boolean", 174 | title: "Prompt/Confirm before making Active queries", 175 | description: "To prevent accidental dns queries, pydat will confirm before making requests. Toggle this to disable that confirmation", 176 | default_value: true 177 | }) 178 | 179 | ]) 180 | 181 | PluginManagers.menu.addPlugin("active_resolution", "domain", ActiveResolutionMenu) 182 | PluginManagers.menu.addPlugin("active_resolution", "tld", ActiveResolutionMenu) 183 | } 184 | -------------------------------------------------------------------------------- /pydat/frontend/src/components/helpers/dropdown_cell.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState} from 'react' 2 | import clsx from 'clsx' 3 | 4 | import ArrowDropDownIcon from '@material-ui/icons/ArrowDropDown'; 5 | import IconButton from '@material-ui/core/IconButton' 6 | import Menu from '@material-ui/core/Menu'; 7 | import { makeStyles } from '@material-ui/core'; 8 | 9 | 10 | const useStyles = makeStyles((theme) => ({ 11 | linkCell: { 12 | cursor: 'pointer' 13 | }, 14 | buttonRoot: { 15 | display: 'inline-flex', 16 | alignItems: 'flex-start', 17 | flexWrap: 'wrap', 18 | }, 19 | buttonLabel: { 20 | width: '100%', 21 | display: 'inline-flex', 22 | } 23 | })) 24 | 25 | export const DropDownCell = (props) => { 26 | const classes = useStyles() 27 | const [anchorEl, setAnchorEl] = useState(null) 28 | // TODO cleanup implemention and pick one 29 | const useDropDownIcon = false 30 | 31 | const handleClick = (e) => { 32 | setAnchorEl(e.currentTarget) 33 | } 34 | 35 | const handleClose = () => { 36 | setAnchorEl(null) 37 | } 38 | 39 | const copyFriendly = props.copyFriendly || false 40 | 41 | if (useDropDownIcon) { 42 | return ( 43 | 44 | {copyFriendly === false && 45 | 46 | 55 | 56 | 57 | {anchorEl !== null && 58 | 64 | {props.children} 65 | 66 | } 67 | 68 | } 69 | {props.value} 70 | 71 | 72 | ) 73 | } else { 74 | return ( 75 | 76 | 80 | {copyFriendly === false && 81 | 82 | } 83 | {props.value} 84 | 85 | {anchorEl !== null && 86 | 92 | {props.children} 93 | 94 | } 95 | 96 | ) 97 | } 98 | } 99 | 100 | export default DropDownCell -------------------------------------------------------------------------------- /pydat/frontend/src/components/helpers/fetchers.jsx: -------------------------------------------------------------------------------- 1 | export const queryFetcher = async ({ 2 | query, 3 | chunk_size, 4 | offset, 5 | sort_keys={}} 6 | ) => { 7 | 8 | let data = { 9 | 'query': query, 10 | 'chunk_size': chunk_size, 11 | 'offset': offset 12 | } 13 | 14 | if (Object.keys(sort_keys).length > 0) { 15 | data['sort_keys'] = sort_keys 16 | } 17 | 18 | let response = await fetch ( 19 | '/api/v2/query', { 20 | method: 'POST', 21 | headers: { 22 | 'Content-Type': 'application/json' 23 | 24 | }, 25 | body: JSON.stringify(data) 26 | }) 27 | 28 | // console.log(response) 29 | 30 | if (response.status === 200) { 31 | let jresp = await response.json() 32 | return jresp 33 | } else { 34 | throw response 35 | } 36 | } 37 | 38 | export const domainFetcher = async ({ 39 | domainName 40 | }) => { 41 | 42 | let data = { 43 | value: domainName 44 | } 45 | 46 | let response = await fetch ( 47 | '/api/v2/domain', { 48 | method: 'POST', 49 | headers: { 50 | 'Content-Type': 'application/json' 51 | 52 | }, 53 | body: JSON.stringify(data) 54 | }) 55 | 56 | // console.log(response) 57 | 58 | if (response.status === 200) { 59 | let jresp = await response.json() 60 | return jresp 61 | } else { 62 | throw response 63 | } 64 | } 65 | 66 | 67 | export const statusFetcher = async () => { 68 | let response = await fetch ( 69 | '/api/v2/info', { 70 | method: 'GET', 71 | headers: { 72 | 'Content-Type': 'application/json' 73 | 74 | }, 75 | }) 76 | 77 | if (response.status === 200) { 78 | let jresp = await response.json() 79 | return jresp 80 | } else { 81 | throw response 82 | } 83 | } 84 | 85 | export const activeResolutionFetcher = async ({ 86 | domainName 87 | }) => { 88 | let response = await fetch ( 89 | `/api/v2/resolve/${encodeURIComponent(domainName)}`, { 90 | method: 'GET', 91 | headers: { 92 | 'Content-Type': 'application/json' 93 | 94 | }, 95 | }) 96 | 97 | if (response.status === 200) { 98 | let jresp = await response.json() 99 | return jresp 100 | } else { 101 | throw response 102 | } 103 | } -------------------------------------------------------------------------------- /pydat/frontend/src/components/helpers/loaders.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import Backdrop from '@material-ui/core/Backdrop' 3 | import CircularProgress from '@material-ui/core/CircularProgress' 4 | 5 | 6 | export const BackdropLoader = ({}) => { 7 | return ( 8 | 9 | 10 | 11 | ) 12 | } -------------------------------------------------------------------------------- /pydat/frontend/src/components/helpers/search_tools.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState} from 'react' 2 | 3 | import IconButton from '@material-ui/core/IconButton' 4 | import Menu from '@material-ui/core/Menu'; 5 | import MenuItem from '@material-ui/core/MenuItem' 6 | import BuildIcon from '@material-ui/icons/Build'; 7 | 8 | import { 9 | JSONExporter, 10 | CSVExporter, 11 | ListExporter 12 | } from './data_exporters' 13 | 14 | export const SearchTools = ({ 15 | data, 16 | children, 17 | defaultListField, 18 | dataControl = null, 19 | jsonPreprocessor = null, 20 | csvPreprocessor = null, 21 | listPreprocessor = null 22 | }) => { 23 | const [anchorEl, setAnchorEl] = useState(null) 24 | const [openJSONDialog, setOpenJSONDialog] = useState(false) 25 | const [openCSVDialog, setOpenCSVDialog] = useState(false) 26 | const [openListDialog, setOpenListDialog] = useState(false) 27 | 28 | const handleClick = (e) => { 29 | setAnchorEl(e.currentTarget) 30 | } 31 | 32 | const handleClose = () => { 33 | setAnchorEl(null) 34 | } 35 | 36 | return ( 37 | 38 | 42 | 43 | 44 | 50 | {setOpenJSONDialog(true); handleClose()}} 52 | > 53 | Export JSON 54 | 55 | 56 | {openJSONDialog && 57 | {setOpenJSONDialog(false)}} 63 | /> 64 | } 65 | {setOpenCSVDialog(true); handleClose()}} 67 | > 68 | Export CSV 69 | 70 | {openCSVDialog && 71 | {setOpenCSVDialog(false)}} 77 | /> 78 | } 79 | {setOpenListDialog(true); handleClose()}} 81 | > 82 | Export List 83 | 84 | {openListDialog && 85 | {setOpenListDialog(false)}} 92 | /> 93 | } 94 | {React.Children.map(children, (child) => { 95 | const props = { 96 | data: data, 97 | handleClose: handleClose, 98 | } 99 | if (React.isValidElement(child)) { 100 | return React.cloneElement(child, props) 101 | } else { 102 | return child 103 | } 104 | })} 105 | 106 | 107 | ) 108 | } 109 | 110 | export default SearchTools -------------------------------------------------------------------------------- /pydat/frontend/src/components/layout/dashboard.jsx: -------------------------------------------------------------------------------- 1 | import React, { useContext, useEffect, useMemo, useState, useRef } from 'react'; 2 | import clsx from 'clsx'; 3 | 4 | import { makeStyles} from '@material-ui/core/styles'; 5 | import CssBaseline from '@material-ui/core/CssBaseline'; 6 | import AppBar from '@material-ui/core/AppBar'; 7 | import Toolbar from '@material-ui/core/Toolbar'; 8 | import List from '@material-ui/core/List'; 9 | import IconButton from '@material-ui/core/IconButton'; 10 | import MenuIcon from '@material-ui/icons/Menu'; 11 | import MenuItem from '@material-ui/core/MenuItem'; 12 | import Menu from '@material-ui/core/Menu'; 13 | import MoreIcon from '@material-ui/icons/MoreVert'; 14 | import ListItem from '@material-ui/core/ListItem' 15 | import {Link as RouterLink, useLocation} from 'react-router-dom' 16 | import ListItemText from '@material-ui/core/ListItemText'; 17 | 18 | import {PluginManagers} from '../plugins' 19 | import {whoisRoute, whoisNavigation} from '../whois' 20 | import {OptionsContext} from '../layout' 21 | import { userPreferencesOption } from '../helpers/preferences'; 22 | 23 | 24 | // https://material-ui.com/components/app-bar/#app-bar-with-a-primary-search-field 25 | // https://ansonlowzf.com/how-to-build-a-material-ui-navbar/ 26 | 27 | const useStyles = makeStyles((theme) => ({ 28 | grow: { 29 | flexGrow: 1, 30 | }, 31 | menuButton: { 32 | marginRight: theme.spacing(2), 33 | }, 34 | layoutDesktop: { 35 | display: 'none', 36 | [theme.breakpoints.up('md')]: { 37 | display: 'flex', 38 | }, 39 | }, 40 | layoutMobile: { 41 | display: 'flex', 42 | [theme.breakpoints.up('md')]: { 43 | display: 'none', 44 | }, 45 | }, 46 | 47 | linkText: { 48 | textDecoration: 'none', 49 | textTransform: 'uppercase', 50 | color: `${theme.palette.primary.contrastText}` 51 | }, 52 | menuLinkText: { 53 | textDecoration: 'none', 54 | textTransform: 'uppercase', 55 | }, 56 | desktopNav: { 57 | justifyContent: 'space-between', 58 | display: 'flex', 59 | }, 60 | content: { 61 | flexGrow: 1, 62 | padding: theme.spacing(3), 63 | transition: theme.transitions.create('margin', { 64 | easing: theme.transitions.easing.sharp, 65 | duration: theme.transitions.duration.leavingScreen, 66 | }), 67 | }, 68 | })); 69 | 70 | 71 | const Navigation = () => { 72 | const classes = useStyles() 73 | const [anchorEl, setAnchorEl] = useState(null); 74 | const isMenuOpen = Boolean(anchorEl) 75 | 76 | const handleMenuOpen = (event) => { 77 | setAnchorEl(event.currentTarget) 78 | } 79 | 80 | const handleMenuClose = () => { 81 | setAnchorEl(null) 82 | } 83 | 84 | 85 | const all_paths = useMemo(() => ( 86 | [ 87 | whoisNavigation, 88 | ...Object.values(PluginManagers.nav.plugins) 89 | ] 90 | ), []) 91 | 92 | const menuId = 'mobile-navigation-menu' 93 | const mobileMenu = ( 94 | 103 | {all_paths.map(({path, title}, index) => ( 104 | 105 | 110 | {title} 111 | 112 | 113 | ))} 114 | 115 | ) 116 | 117 | return ( 118 | 119 |
120 | 127 | 128 | 129 | {mobileMenu} 130 |
131 | 132 |
133 | 137 | {all_paths.map(({path, title}, index) => ( 138 | 143 | 144 | 145 | 146 | 147 | ))} 148 | 149 |
150 |
151 | ) 152 | } 153 | 154 | const Options = () => { 155 | const classes = useStyles() 156 | 157 | const [anchorEl, setAnchorEl] = useState(null); 158 | const isMenuOpen = Boolean(anchorEl) 159 | const optionsContext = useContext(OptionsContext) 160 | const location = useLocation() 161 | 162 | const handleMenuOpen = (event) => { 163 | setAnchorEl(event.currentTarget) 164 | } 165 | 166 | const handleMenuClose = () => { 167 | setAnchorEl(null) 168 | } 169 | 170 | const routes = { 171 | ...PluginManagers.routes.plugins, 172 | whois: whoisRoute 173 | } 174 | 175 | let match = [] 176 | for (const name in routes) { 177 | if (routes[name].matchRoute(location.pathname)) { 178 | match = [...routes[name].options] 179 | break 180 | } 181 | } 182 | match.push(userPreferencesOption) 183 | 184 | const menuId = 'mobile-options-menu' 185 | const mobileMenu = ( 186 | 195 | {match.map((option_element, index) => ( 196 | 197 | {option_element.getMobileElement({ 198 | optionsContext: optionsContext 199 | })} 200 | 201 | 202 | ))} 203 | 204 | 205 | ) 206 | 207 | return ( 208 | 209 |
210 | {match.map((option_element, index) => ( 211 | option_element.getDesktopElement({ 212 | optionsContext: optionsContext, 213 | index: index 214 | }) 215 | ))} 216 |
217 |
218 | {match.length > 0 && 219 | 226 | 227 | } 228 | {mobileMenu} 229 |
230 |
231 | ) 232 | } 233 | 234 | const Dashboard = (props) => { 235 | const classes = useStyles(); 236 | const [optionsState, setOptionsState] = useState({}) 237 | 238 | return ( 239 |
240 | 241 | 246 |
247 | 248 | 249 | 250 |
251 | 252 | 253 | 254 | 255 |
256 | {props.children} 257 |
258 |
259 | 260 |
261 | 262 | ); 263 | } 264 | 265 | export default Dashboard -------------------------------------------------------------------------------- /pydat/frontend/src/components/layout/dialogs.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState} from 'react'; 2 | import { makeStyles } from '@material-ui/core/styles'; 3 | import Dialog from '@material-ui/core/Dialog'; 4 | import AppBar from '@material-ui/core/AppBar'; 5 | import Toolbar from '@material-ui/core/Toolbar'; 6 | import IconButton from '@material-ui/core/IconButton'; 7 | import Typography from '@material-ui/core/Typography'; 8 | import CloseIcon from '@material-ui/icons/Close'; 9 | import Slide from '@material-ui/core/Slide'; 10 | import InputAdornment from '@material-ui/core/InputAdornment' 11 | import SettingsIcon from '@material-ui/icons/Settings'; 12 | import Container from '@material-ui/core/Container' 13 | 14 | // https://material-ui.com/components/dialogs/#full-screen-dialogs 15 | 16 | const useStyles = makeStyles((theme) => ({ 17 | appBar: { 18 | position: 'relative', 19 | }, 20 | title: { 21 | marginLeft: theme.spacing(2), 22 | flex: 1, 23 | }, 24 | regularDialog: { 25 | marginTop: '1rem', 26 | minHeight: '5vh' 27 | } 28 | })); 29 | 30 | // const Transition = React.forwardRef(function Transition(props, ref) { 31 | // return ; 32 | // }); 33 | 34 | export const FullScreenDialog = ({open, onClose, title, children}) => { 35 | const classes = useStyles(); 36 | 37 | const handleClose = () => { 38 | onClose() 39 | }; 40 | 41 | return ( 42 | 43 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | {title} 57 | 58 | 59 | 60 | 61 | {children} 62 | 63 | 64 | 65 | ); 66 | } 67 | 68 | export const RegularDialog = ({open, onClose, title, children}) => { 69 | const classes = useStyles(); 70 | 71 | const handleClose = () => { 72 | onClose() 73 | }; 74 | 75 | return ( 76 | 77 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | {title} 90 | 91 | 92 | 93 | 94 | {children} 95 | 96 | 97 | 98 | ); 99 | } 100 | 101 | 102 | export const SearchSettings = (props) => { 103 | const [open, setOpen] = useState(false) 104 | 105 | const handleClick = (e) => { 106 | e.preventDefault() 107 | setOpen(true) 108 | } 109 | 110 | const handleMouseDown = (e) => { 111 | e.preventDefault() 112 | } 113 | 114 | const handleClose = () => { 115 | setOpen(false) 116 | } 117 | 118 | return ( 119 | 120 | 122 | 127 | 128 | 129 | 130 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | {props.title} 144 | 145 | 146 | 147 | 148 | {props.children} 149 | 150 | 151 | 152 | 153 | ) 154 | } -------------------------------------------------------------------------------- /pydat/frontend/src/components/layout/index.jsx: -------------------------------------------------------------------------------- 1 | import React, {createContext, useState} from 'react' 2 | import {Link as RouterLink, matchPath} from 'react-router-dom' 3 | 4 | import MenuItem from '@material-ui/core/MenuItem' 5 | import Link from '@material-ui/core/Link' 6 | import IconButton from '@material-ui/core/IconButton' 7 | import Tooltip from '@material-ui/core/Tooltip' 8 | 9 | 10 | const DesktopOption = ({ 11 | icon, 12 | optionsContext, 13 | handleClick = null, 14 | childComponent = null, 15 | tooltip = null, 16 | }) => { 17 | const [anchorEl, setAnchorEl] = useState(null) 18 | const [open, setOpen] = useState(false) 19 | const onClose = () => { 20 | setAnchorEl(null) 21 | setOpen(false) 22 | } 23 | 24 | let iconProps = {} 25 | if (handleClick !== null) { 26 | iconProps.onClick = (e) => { 27 | setAnchorEl(e.currentTarget) 28 | handleClick({ 29 | optionsContext: optionsContext 30 | }) 31 | } 32 | } 33 | 34 | if (childComponent !== null) { 35 | iconProps.onClick = () => { 36 | setOpen(true) 37 | } 38 | } 39 | 40 | const button = ( 41 | 42 | 46 | {icon} 47 | 48 | {childComponent && ( 49 | React.cloneElement(childComponent, { 50 | open: open, 51 | onClose: onClose, 52 | anchorEl: anchorEl 53 | })) 54 | } 55 | 56 | ) 57 | 58 | if (tooltip !== null) { 59 | return ( 60 | 61 | {button} 62 | 63 | ) 64 | } else { 65 | return button 66 | } 67 | } 68 | 69 | const MobileOption = ({ 70 | icon, 71 | text, 72 | optionsContext, 73 | handleClick = null, 74 | childComponent = null, 75 | }) => { 76 | return ( 77 | { 79 | handleClick({ 80 | optionsContext 81 | }) 82 | }} 83 | > 84 | 85 | {icon} 86 | 87 | {text} 88 | 89 | ) 90 | } 91 | export class OptionElement { 92 | constructor({ 93 | icon, 94 | text, 95 | handleClick = null, 96 | tooltip=null, 97 | childComponent=null 98 | }) { 99 | this.icon = icon 100 | this.text = text 101 | this.handleClick = handleClick 102 | this.tooltip = tooltip 103 | this.childComponent = childComponent 104 | 105 | if (childComponent !== null) { 106 | if (!(React.isValidElement(childComponent))) { 107 | throw TypeError("childComponent must be a valid React Component") 108 | } 109 | } 110 | } 111 | 112 | getDesktopElement({optionsContext, index = null}) { 113 | return ( 114 | 121 | ) 122 | } 123 | 124 | getMobileElement({optionsContext, index = null}) { 125 | return ( 126 | 134 | ) 135 | 136 | } 137 | 138 | } 139 | export class RouteElement { 140 | constructor({path, title, component, extra = {}, options = []}) { 141 | this.path = path 142 | this.title = title 143 | this.component = component 144 | this.extra = extra 145 | this.options = options 146 | } 147 | 148 | matchRoute(current_path) { 149 | return matchPath(current_path, 150 | { 151 | path: this.path, 152 | exact: true 153 | }) 154 | } 155 | } 156 | 157 | export class NavigationElement { 158 | constructor ({title, path, text = null}) { 159 | this.title = title 160 | this.path = path 161 | this.text = text 162 | } 163 | } 164 | 165 | const MenuDialogWrapper = (props) => { 166 | const [open, setOpen] = useState(false) 167 | 168 | const onClose = () => { 169 | setOpen(false) 170 | } 171 | 172 | const handleOnClick = (e) => { 173 | e.preventDefault() 174 | setOpen(true) 175 | } 176 | 177 | let menuProps = {...props} 178 | delete menuProps.dialogProps 179 | 180 | return ( 181 | 182 | 183 | {props.dialogProps.text} 184 | 185 | {open && 186 | React.cloneElement(props.dialogProps.RenderComponent, { 187 | data: props.dialogProps.data, 188 | open: open, 189 | onClose: onClose, 190 | })} 191 | 192 | ) 193 | } 194 | 195 | export class MenuElement { 196 | constructor ({ 197 | text, 198 | path = null, 199 | RenderComponent = null, 200 | external = false 201 | }) { 202 | this.text = text 203 | this.RenderComponent = RenderComponent 204 | this.path = path 205 | this.external = external 206 | this._getLinkComponent.bind(this) 207 | this._getDialogComponent.bind(this) 208 | this.getComponent.bind(this) 209 | 210 | if (RenderComponent === null && path === null){ 211 | throw TypeError("Either 'path' or 'RenderComponent' must be defined") 212 | } 213 | 214 | if (RenderComponent !== null && !React.isValidElement(RenderComponent)) { 215 | throw TypeError("RenderComponent must be a valid React element") 216 | } 217 | } 218 | 219 | _getLinkComponent(data, key = null) { 220 | let link_props 221 | 222 | let cpath = this.path 223 | if (this.path && typeof this.path === 'function'){ 224 | cpath = this.path(data) 225 | } 226 | 227 | if (this.external) { 228 | link_props = { 229 | component: Link, 230 | href: cpath, 231 | target: "_blank", 232 | rel: "noreferrer" 233 | } 234 | } else { 235 | link_props = { 236 | component: RouterLink, 237 | to: cpath, 238 | } 239 | } 240 | 241 | return ( 242 | 243 | {this.text} 244 | 245 | ) 246 | } 247 | 248 | _getDialogComponent(data, key = null) { 249 | return ( 250 | 258 | ) 259 | } 260 | 261 | getComponent(data, key = null) { 262 | if (this.path !== null) { 263 | return this._getLinkComponent(data, key) 264 | } else { 265 | return this._getDialogComponent(data, key) 266 | } 267 | 268 | } 269 | } 270 | 271 | export const OptionsContext = createContext() -------------------------------------------------------------------------------- /pydat/frontend/src/components/layout/notfound.jsx: -------------------------------------------------------------------------------- 1 | import { Typography } from '@material-ui/core' 2 | import React from 'react' 3 | 4 | 5 | const NotFound = () => { 6 | console.log("not found") 7 | return ( 8 | 9 | Page Could Not Be Found 10 | 11 | ) 12 | } 13 | 14 | export default NotFound -------------------------------------------------------------------------------- /pydat/frontend/src/components/layout/themes.jsx: -------------------------------------------------------------------------------- 1 | import {createTheme} from '@material-ui/core/styles' 2 | 3 | const _color_palette = { 4 | primary: { 5 | main: '#212121' 6 | }, 7 | secondary: { 8 | main: '#0288d1' 9 | }, 10 | } 11 | 12 | const _defaultTheme = { 13 | palette: { 14 | ..._color_palette 15 | } 16 | } 17 | 18 | const _darkTheme = { 19 | palette: { 20 | type: 'dark', 21 | ..._color_palette 22 | } 23 | } 24 | 25 | 26 | export const defaultTheme = createTheme(_defaultTheme) 27 | export const darkTheme = createTheme(_darkTheme) -------------------------------------------------------------------------------- /pydat/frontend/src/components/plugins/index.jsx: -------------------------------------------------------------------------------- 1 | import { 2 | RouteElement, 3 | NavigationElement, 4 | MenuElement 5 | } from '../layout' 6 | class PluginContainer { 7 | constructor() { 8 | this._plugins = {} 9 | } 10 | 11 | get plugins() { 12 | return this._plugins 13 | } 14 | } 15 | class MenuPluginContainer extends PluginContainer { 16 | constructor() { 17 | super() 18 | this._plugins = { 19 | tld: {}, 20 | domain: {}, 21 | ip: {}, 22 | email: {}, 23 | telephone: {}, 24 | registrant: {} 25 | } 26 | } 27 | 28 | addPlugin(name, type, plugin) { 29 | if (!(plugin instanceof MenuElement)){ 30 | throw new TypeError("Must provide object of type 'MenuElement'") 31 | } 32 | 33 | const validKeys = [ 34 | 'tld', 35 | 'domain', 36 | 'ip', 37 | 'email', 38 | 'telephone', 39 | 'registrant' 40 | ] 41 | 42 | if (!validKeys.includes(type)) { 43 | throw new TypeError(`type must be one of ${validKeys.join()}`) 44 | } 45 | 46 | if (!Object.keys(this._plugins).includes(type)) { 47 | this._plugins[type] = {} 48 | } 49 | 50 | this._plugins[type][name] = plugin 51 | } 52 | } 53 | 54 | class RoutePluginContainer extends PluginContainer { 55 | addPlugin(name, plugin) { 56 | if (!(plugin instanceof RouteElement)){ 57 | throw new TypeError("Must provide object of type 'RouteElement'") 58 | } 59 | 60 | this._plugins[name] = plugin 61 | } 62 | } 63 | 64 | class NavigationPluginContainer extends PluginContainer { 65 | addPlugin(name, plugin) { 66 | if (!(plugin instanceof NavigationElement)){ 67 | throw new TypeError("Must provide object of type 'NavigationElement'") 68 | } 69 | this._plugins[name] = plugin 70 | } 71 | } 72 | 73 | export const PluginManagers = { 74 | routes: new RoutePluginContainer(), 75 | menu: new MenuPluginContainer(), 76 | nav: new NavigationPluginContainer(), 77 | } 78 | -------------------------------------------------------------------------------- /pydat/frontend/src/components/whois/index.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState, useEffect, useContext, useMemo} from 'react' 2 | import {useHistory, useLocation} from 'react-router-dom' 3 | import update from 'immutability-helper' 4 | import qs from 'qs' 5 | 6 | import Grid from '@material-ui/core/Grid' 7 | import TextField from '@material-ui/core/TextField' 8 | import Button from '@material-ui/core/Button' 9 | import CheckBox from '@material-ui/core/Checkbox' 10 | import FormControlLabel from '@material-ui/core/FormControlLabel'; 11 | import FormControl from '@material-ui/core/FormControl'; 12 | import Container from '@material-ui/core/Container' 13 | import Paper from '@material-ui/core/Paper' 14 | import HelpIcon from '@material-ui/icons/Help'; 15 | // import EqualizerIcon from '@material-ui/icons/Equalizer'; 16 | 17 | import WhoisTable from './whois_table' 18 | import { 19 | useUserPreferences, 20 | userPreferencesManager, 21 | UserPreferenceNamespace, 22 | UserPreference, 23 | } from '../helpers/preferences' 24 | import {SearchSettings} from '../layout/dialogs' 25 | import { 26 | OptionElement, 27 | RouteElement, 28 | NavigationElement 29 | } from '../layout' 30 | import {OptionsContext} from '../layout' 31 | import HelpPage from './help' 32 | // import StatsPage from './stats' 33 | import ClusterStatus from './status' 34 | import { useSnackbar } from 'notistack' 35 | 36 | const whoisPreferencesNamespace = new UserPreferenceNamespace({ 37 | name: "whois", 38 | title: "Whois Search Preferences", 39 | description: "Preferences for Whois Search" 40 | }) 41 | userPreferencesManager.registerNamespace(whoisPreferencesNamespace) 42 | userPreferencesManager.registerPrefs( 43 | whoisPreferencesNamespace, [ 44 | new UserPreference({ 45 | name: 'fang', 46 | type: "boolean", 47 | title: "De-fang Queries", 48 | description: "Automatically replace [.] with . in search queries", 49 | default_value: true 50 | }), 51 | new UserPreference({ 52 | name: 'page_size', 53 | type: "number", 54 | title: "Results Page Size", 55 | description: "Default Page Size to use for result pagination", 56 | default_value: 50, 57 | }), 58 | new UserPreference({ 59 | name: "remember_page_size", 60 | type: "boolean", 61 | title: "Remember Results Page Size", 62 | description: "Remember last used page size when displaying results", 63 | default_value: true, 64 | }), 65 | new UserPreference({ 66 | name: 'details_colon', 67 | type: "boolean", 68 | title: "Full Details Colon Suffix", 69 | description: "Append a colon (:) to the names in the Full Details dialog", 70 | default_value: false 71 | }) 72 | ] 73 | ) 74 | 75 | export const whoisRoute = new RouteElement({ 76 | path: "/whois", 77 | title: "Whois", 78 | component: null, 79 | options: [ 80 | // new OptionElement({ 81 | // icon: , 82 | // text: "Stats", 83 | // childComponent: 84 | // }), 85 | new OptionElement({ 86 | icon: , 87 | text: "Help", 88 | childComponent: 89 | }), 90 | ] 91 | }) 92 | 93 | export const whoisNavigation = new NavigationElement({ 94 | title: 'WhoIs', 95 | path: '/whois', 96 | text: "Whois Search" 97 | }) 98 | 99 | const GeneralOptions = ({}) => { 100 | const preferences = useUserPreferences('whois') 101 | const [fangStatus, setFangStatus] = useState(preferences.getPref('fang')) 102 | 103 | const toggleFangOption = () => { 104 | preferences.setPref('fang', !fangStatus) 105 | setFangStatus(!fangStatus) 106 | } 107 | 108 | return ( 109 | 110 | 111 | } 119 | label="ReFang" 120 | labelPlacement="end" 121 | /> 122 | 123 | 124 | ) 125 | } 126 | 127 | const WhoisHandler = ({}) => { 128 | const preferences = useUserPreferences('whois') 129 | 130 | const [formData, setFormData] = useState({ 131 | query: "", 132 | }) 133 | 134 | const [queryData, setQueryData] = useState({ 135 | ...formData 136 | }) 137 | 138 | const {enqueueSnackbar} = useSnackbar() 139 | const location = useLocation() 140 | let history = useHistory() 141 | 142 | useEffect(() => { 143 | console.log(location) 144 | let query_string 145 | try { 146 | query_string = qs.parse(location.search, { 147 | ignoreQueryPrefix: true 148 | }).query 149 | } catch (err) { 150 | enqueueSnackbar("Unable to parse query from params", {variant: "error"}) 151 | } 152 | 153 | let updated 154 | if (!!query_string) { 155 | updated = update(formData, { 156 | query: {$set: query_string} 157 | }) 158 | } else { 159 | updated = update(formData, { 160 | query: {$set: ""} 161 | }) 162 | } 163 | 164 | setFormData(updated) 165 | setQueryData(updated) 166 | }, [location]) 167 | 168 | const handleOnSubmit = (e) => { 169 | e.preventDefault() 170 | 171 | let updated = {...formData} 172 | 173 | console.log(formData) 174 | 175 | if (preferences.getPref('fang')) { 176 | let refanged = formData.query.replace('[.]', '.') 177 | if (refanged !== formData.query) { 178 | updated = update(formData, { 179 | query: {$set: refanged} 180 | }) 181 | setFormData(updated) 182 | } 183 | } 184 | 185 | history.push({ 186 | pathname: '/whois', 187 | search: `?query=${encodeURIComponent(updated.query)}` 188 | }) 189 | 190 | } 191 | 192 | const handleOnChangeQuery = (e) => { 193 | setFormData(update(formData, { 194 | query: {$set: e.target.value} 195 | })) 196 | } 197 | 198 | const wtable = useMemo(() => { 199 | if (!!queryData.query && queryData != "") { 200 | return ( 201 | 202 | 203 | 206 | 207 | 208 | ) 209 | } 210 | }, [queryData]) 211 | 212 | return ( 213 | 214 | 215 | 216 |
217 | 218 | 219 | 229 | 233 | 234 | ) 235 | }} 236 | /> 237 | 238 | 239 | 242 | 243 | 244 |
245 |
246 | {wtable} 247 |
248 | ) 249 | } 250 | 251 | export default WhoisHandler -------------------------------------------------------------------------------- /pydat/frontend/src/components/whois/stats.jsx: -------------------------------------------------------------------------------- 1 | import React, {useContext, useEffect, useState} from 'react' 2 | import update from 'immutability-helper' 3 | import Typography from '@material-ui/core/Typography' 4 | 5 | import {OptionsContext} from '../layout' 6 | import {FullScreenDialog} from '../layout/dialogs' 7 | 8 | 9 | const StatsPage = ({open, onClose}) => { 10 | const optionsContext = useContext(OptionsContext) 11 | 12 | return ( 13 | 14 | 19 | {open && 20 | 21 | 22 | } 23 | 24 | 25 | 26 | ) 27 | } 28 | 29 | export default StatsPage -------------------------------------------------------------------------------- /pydat/frontend/src/components/whois/status.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState, useEffect} from 'react' 2 | 3 | import {makeStyles} from '@material-ui/core/styles' 4 | import Grid from '@material-ui/core/Grid' 5 | import Typography from '@material-ui/core/Typography' 6 | import Tooltip from '@material-ui/core/Tooltip' 7 | import Badge from '@material-ui/core/Badge' 8 | import StorageIcon from '@material-ui/icons/Storage'; 9 | import CircularProgress from '@material-ui/core/CircularProgress' 10 | 11 | import { useSnackbar } from 'notistack' 12 | 13 | import {statusFetcher} from '../helpers/fetchers' 14 | 15 | 16 | const useStatusStyles = makeStyles(() => ({ 17 | clusterGreenStatusBadge: { 18 | backgroundColor: "green" 19 | }, 20 | clusterYellowStatusBadge: { 21 | backgroundColor: "yellow" 22 | }, 23 | clusterRedStatusBadge: { 24 | backgroundColor: "red" 25 | }, 26 | clusterUnknownStatusBadge: { 27 | backgroundColor: "gray" 28 | } 29 | })) 30 | 31 | const ClusterStatus = ({}) => { 32 | const statusClasses = useStatusStyles() 33 | const [fetching, setFetching] = useState(false) 34 | const [loaded, setLoaded] = useState(false) 35 | const [clusterStatus, setClusterStats] = useState({timestamp: 0}) 36 | const timeout = 30 * 10000 // convert to ms 37 | const {enqueueSnackbar} = useSnackbar() 38 | 39 | const fetchStatus = () => { 40 | const asyncfetch = async () => { 41 | try { 42 | let results = await statusFetcher() 43 | 44 | let clusterstatus = { 45 | last: results.last, 46 | records: results.records, 47 | dateProcessed: results.last_update.dateProcessed 48 | } 49 | 50 | switch (results.health) { 51 | case "green": 52 | clusterstatus.color = statusClasses.clusterGreenStatusBadge 53 | clusterstatus.color_string = "Green" 54 | break; 55 | case "yellow": 56 | clusterstatus.color = statusClasses.clusterYellowStatusBadge 57 | clusterstatus.color_string = "Yellow" 58 | break; 59 | case "red": 60 | clusterstatus.color = statusClasses.clusterRedStatusBadge 61 | clusterstatus.color_string = "Red" 62 | break; 63 | default: 64 | clusterstatus.color = statusClasses.clusterUnknownStatusBadge 65 | clusterstatus.color_string = "Unknown" 66 | } 67 | 68 | setClusterStats({ 69 | ...clusterstatus, 70 | timestamp: Date.now() 71 | }) 72 | setLoaded(true) 73 | } catch (err) { 74 | enqueueSnackbar("Unable to query Cluster Status", {variant: "warning"}) 75 | console.log(err) 76 | let clusterstatus = { 77 | last: "?", 78 | records: "?", 79 | dateProcessed: "?", 80 | color: statusClasses.clusterUnknownStatusBadge, 81 | color_string: "Unknown" 82 | } 83 | 84 | setClusterStats({ 85 | ...clusterstatus, 86 | timestamp: Date.now() 87 | }) 88 | setLoaded(true) 89 | } finally { 90 | setFetching(false) 91 | } 92 | } 93 | 94 | asyncfetch() 95 | } 96 | 97 | useEffect(() => { 98 | if (!fetching && Date.now() - clusterStatus.timestamp > timeout){ 99 | setLoaded(false) 100 | setFetching(true) 101 | fetchStatus() 102 | } 103 | }) 104 | 105 | return ( 106 | 107 | {!loaded && 108 | 111 | } 112 | {loaded && 113 | 114 | 115 | 116 | 117 | 123 | 124 | 125 | 126 | 127 | 128 |   Last Ingest: {clusterStatus.dateProcessed} Records: {clusterStatus.records} 129 | 130 | 131 | 132 | 133 | } 134 | 135 | ) 136 | 137 | } 138 | 139 | 140 | export default ClusterStatus -------------------------------------------------------------------------------- /pydat/frontend/src/components/whois/table_cells.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import qs from 'qs' 3 | 4 | import DropDownCell from '../helpers/dropdown_cell' 5 | import {PluginManagers} from '../plugins' 6 | 7 | import {MenuElement} from '../layout' 8 | 9 | const createSearchString = (query) => { 10 | return( 11 | '?' + qs.stringify({ 12 | query: query 13 | }) 14 | ) 15 | } 16 | 17 | const whoisDomainMenuElement = new MenuElement({ 18 | type: "tld", 19 | path: (domainName) => { 20 | const search_string = createSearchString(`dn:"${domainName}"`) 21 | return `/whois${search_string}` 22 | }, 23 | text: "Pivot Search", 24 | }) 25 | 26 | const whoisRegistrantMenuElement = new MenuElement({ 27 | type: "registrant", 28 | path: (registrant) => { 29 | const search_string = createSearchString(`registrant_name:"${registrant}"`) 30 | return `/whois${search_string}` 31 | }, 32 | text: "Pivot Search" 33 | }) 34 | 35 | const whoisEmailMenuElement = new MenuElement({ 36 | type: "email", 37 | path: (email) => { 38 | const search_string = createSearchString(`contactEmail:"${email}"`) 39 | return `/whois${search_string}` 40 | }, 41 | text: "Pivot Search" 42 | }) 43 | 44 | const whoisTelephoneMenuElement = new MenuElement({ 45 | type: "telephone", 46 | path: (telephone) => { 47 | const search_string = createSearchString(`registrant_telephone:"${telephone}"`) 48 | return `/whois${search_string}` 49 | }, 50 | text: "Pivot Search" 51 | }) 52 | 53 | export const DomainNameCell = ({value: domainName, copyFriendly}) => { 54 | const plugins = PluginManagers.menu.plugins.tld 55 | 56 | return ( 57 | 62 | {whoisDomainMenuElement.getComponent(domainName)} 63 | {Object.keys(plugins).map((name, index) => ( 64 | plugins[name].getComponent(domainName, index) 65 | ))} 66 | 67 | ) 68 | } 69 | 70 | export const RegistrantCell = ({value: registrant_name, copyFriendly}) => { 71 | const plugins = PluginManagers.menu.plugins.registrant 72 | 73 | if (registrant_name === null || registrant_name === "") { 74 | return ( 75 | 76 | ) 77 | } 78 | 79 | 80 | return ( 81 | 86 | {whoisRegistrantMenuElement.getComponent(registrant_name)} 87 | {Object.keys(plugins).map((name, index) => ( 88 | plugins[name].getComponent(registrant_name, index) 89 | ))} 90 | 91 | ) 92 | } 93 | 94 | export const EmailCell = ({value: contactEmail, copyFriendly}) => { 95 | const plugins = PluginManagers.menu.plugins.email 96 | if (contactEmail === null || contactEmail === "") { 97 | return ( 98 | 99 | ) 100 | } 101 | 102 | return ( 103 | 108 | {whoisEmailMenuElement.getComponent(contactEmail)} 109 | {Object.keys(plugins).map((name, index) => ( 110 | plugins[name].getComponent(contactEmail, index) 111 | ))} 112 | 113 | ) 114 | } 115 | 116 | export const TelephoneCell = ({value: registrant_telephone, copyFriendly}) => { 117 | const plugins = PluginManagers.menu.plugins.telephone 118 | if (registrant_telephone === null || registrant_telephone === "") { 119 | return ( 120 | 121 | ) 122 | } 123 | 124 | return ( 125 | 130 | {whoisTelephoneMenuElement.getComponent(registrant_telephone)} 131 | {Object.keys(plugins).map((name, index) => ( 132 | plugins[name].getComponent(registrant_telephone, index) 133 | ))} 134 | 135 | ) 136 | } -------------------------------------------------------------------------------- /pydat/frontend/src/components/whois/table_pagination.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | 3 | import { makeStyles} from '@material-ui/core/styles'; 4 | import IconButton from '@material-ui/core/IconButton' 5 | import TablePagination from '@material-ui/core/TablePagination' 6 | import FirstPageIcon from '@material-ui/icons/FirstPage'; 7 | import KeyboardArrowLeft from '@material-ui/icons/KeyboardArrowLeft'; 8 | import KeyboardArrowRight from '@material-ui/icons/KeyboardArrowRight'; 9 | import LastPageIcon from '@material-ui/icons/LastPage'; 10 | 11 | import {useUserPreferences} from '../helpers/preferences' 12 | 13 | const usePaginationStyles = makeStyles((theme) => ({ 14 | root: { 15 | flexShrink: 0, 16 | marginLeft: theme.spacing(2.5), 17 | } 18 | })) 19 | 20 | const TablePaginationActions = ({ 21 | pageCount, 22 | gotoPage, 23 | previousPage, 24 | nextPage, 25 | canNextPage, 26 | canPreviousPage, 27 | // paginationProps, 28 | }) => { 29 | 30 | const classes = usePaginationStyles(); 31 | 32 | return ( 33 |
34 | gotoPage(0)} 36 | disabled={!canPreviousPage} 37 | aria-label="first page" 38 | > 39 | 40 | 41 | previousPage()} 43 | disabled={!canPreviousPage} 44 | aria-label="previous page" 45 | > 46 | 47 | 48 | nextPage()} 50 | disabled={!canNextPage} 51 | aria-label="next page" 52 | > 53 | 54 | 55 | gotoPage(pageCount - 1)} 57 | disabled={!canNextPage} 58 | aria-label="last page" 59 | > 60 | 61 | 62 |
63 | ); 64 | } 65 | 66 | export const Paginator = ({ 67 | gotoPage, 68 | previousPage, 69 | nextPage, 70 | pageCount, 71 | totalRecords, 72 | pageOptions, 73 | setPageSize, 74 | pageIndex, 75 | pageSize, 76 | canNextPage, 77 | canPreviousPage, 78 | columnLength, 79 | validPageSizes, 80 | }) => { 81 | const preferences = useUserPreferences('whois') 82 | const handleChangePage = (event, newPage) => { 83 | gotoPage(newPage) 84 | }; 85 | 86 | const handleChangeRowsPerPage = (event) => { 87 | if (preferences.getPref("remember_page_size")) { 88 | preferences.setPref("page_size", parseInt(event.target.value)) 89 | } 90 | setPageSize(parseInt(event.target.value)) 91 | }; 92 | 93 | return ( 94 | 95 | ( 109 | 118 | )} 119 | /> 120 | 121 | ) 122 | } 123 | 124 | export default Paginator -------------------------------------------------------------------------------- /pydat/frontend/src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', 12 | monospace; 13 | } 14 | -------------------------------------------------------------------------------- /pydat/frontend/src/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import {appSettings} from './settings' 4 | import {BackdropLoader} from './components/helpers/loaders' 5 | import 'fontsource-roboto'; 6 | import * as serviceWorker from './serviceWorker'; 7 | import { Suspense } from 'react'; 8 | 9 | const Pydat = React.lazy(() => import('./pydat')) 10 | 11 | const fetchSettings = async () => { 12 | let response = await fetch ( 13 | '/api/v2/settings', { 14 | method: 'GET', 15 | headers: { 16 | 'Content-Type': 'application/json' 17 | 18 | }, 19 | }) 20 | 21 | if (response.status === 200) { 22 | let jresp = await response.json() 23 | appSettings['loaded'] = true 24 | for (let key in jresp) { 25 | appSettings[key] = jresp[key] 26 | } 27 | return jresp 28 | } else { 29 | console.log(response) 30 | } 31 | } 32 | 33 | const runAppAsync = async () => { 34 | 35 | await fetchSettings() 36 | 37 | ReactDOM.render( 38 | 39 | }> 40 | 41 | 42 | 43 | , 44 | // 45 | // 46 | // , 47 | document.getElementById('root') 48 | ); 49 | 50 | // If you want your app to work offline and load faster, you can change 51 | // unregister() to register() below. Note this comes with some pitfalls. 52 | // Learn more about service workers: https://bit.ly/CRA-PWA 53 | serviceWorker.unregister(); 54 | } 55 | 56 | const runApp = () => { 57 | runAppAsync() 58 | } 59 | 60 | runApp() 61 | -------------------------------------------------------------------------------- /pydat/frontend/src/plugins/dnsdb/index.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import qs from 'qs' 3 | 4 | import { PluginManagers } from '../../components/plugins' 5 | 6 | import { 7 | MenuElement, 8 | RouteElement, 9 | NavigationElement 10 | } from '../../components/layout' 11 | 12 | import { 13 | userPreferencesManager, 14 | UserPreferenceNamespace, 15 | UserPreference, 16 | } from '../../components/helpers/preferences' 17 | 18 | import { appSettings } from '../../settings' 19 | 20 | const DNSDB = React.lazy(() => import ('./dnsdb')) 21 | 22 | export const DNSDBPATH = "/passive/dnsdb" 23 | 24 | const DNSDBDomainMenu = new MenuElement({ 25 | type: "tld", 26 | path: (domainName) => { 27 | const search_string = '?' + qs.stringify({ 28 | type: 'domain', 29 | value: domainName 30 | }) 31 | 32 | return `${DNSDBPATH}${search_string}` 33 | }, 34 | text: "Search DNSDB" 35 | }) 36 | 37 | const DNSDBIPMenu = new MenuElement({ 38 | type: "ip", 39 | path: (ip) => { 40 | const search_string = '?' + qs.stringify({ 41 | type: 'ip', 42 | value: ip 43 | }) 44 | 45 | return `${DNSDBPATH}${search_string}` 46 | }, 47 | text: "Search DNSDB" 48 | }) 49 | 50 | const dnsdbPreferencesNamespace = new UserPreferenceNamespace({ 51 | name: "dnsdb", 52 | title: "DNSDB Search Preferences", 53 | desription: "Preferences for DNSDB Search and Presentation" 54 | }) 55 | 56 | const DNSDBENABLED = appSettings.hasOwnProperty('enable_plugin_dnsdb') && appSettings.enable_plugin_dnsdb ? true : false 57 | 58 | if (DNSDBENABLED){ 59 | userPreferencesManager.registerNamespace(dnsdbPreferencesNamespace) 60 | userPreferencesManager.registerPrefs( 61 | dnsdbPreferencesNamespace, [ 62 | new UserPreference({ 63 | name: "page_size", 64 | type: "number", 65 | title: "Results Page Size", 66 | description: "Default Page Size to use for result pagination", 67 | default_value: 100 68 | }), 69 | new UserPreference({ 70 | name: "remember_page_size", 71 | type: "boolean", 72 | title: "Remember Results Page Size", 73 | description: "Remember last used page size when displaying results", 74 | default_value: true, 75 | }), 76 | new UserPreference({ 77 | name: "domain_search_type", 78 | type: "string", 79 | title: "Domain Search Method", 80 | description: "Search method to use when searching forward domains, (absolute, prefix-wildcard, suffix-wildcard)", 81 | default_value: "prefix-wildcard" 82 | }), 83 | new UserPreference({ 84 | name: "remember_domain_search_type", 85 | type: "boolean", 86 | title: "Remember Domain Search Method", 87 | description: "Remember last used Forward Domain Search Type (e.g., Prefix Wildcard)", 88 | default_value: false 89 | }) 90 | 91 | ] 92 | ) 93 | 94 | PluginManagers.menu.addPlugin('dnsdb_tld', 'tld', DNSDBDomainMenu) 95 | PluginManagers.menu.addPlugin('dnsdb_domain', 'domain', DNSDBDomainMenu) 96 | PluginManagers.menu.addPlugin('dnsdb_ip', 'ip', DNSDBIPMenu) 97 | PluginManagers.routes.addPlugin( 98 | 'dnsdb', 99 | new RouteElement({ 100 | path: DNSDBPATH, 101 | title: 'DNSDB Passive DNS', 102 | component: 103 | }) 104 | ) 105 | 106 | PluginManagers.nav.addPlugin( 107 | 'dnsdb', 108 | new NavigationElement({ 109 | title: 'DNSDB', 110 | path: DNSDBPATH, 111 | text: "Passive DNS" 112 | }) 113 | ) 114 | } -------------------------------------------------------------------------------- /pydat/frontend/src/plugins/dnsdb/table_cells.jsx: -------------------------------------------------------------------------------- 1 | import React, { useMemo } from 'react' 2 | 3 | import Grid from '@material-ui/core/Grid' 4 | import DropDownCell from '../../components/helpers/dropdown_cell' 5 | import { PluginManagers } from '../../components/plugins'; 6 | 7 | const cleanData = (data) => { 8 | // Remove trailing '.' 9 | if (data.slice(-1) === '.') { 10 | return data.slice(0, -1) 11 | } else { 12 | return data 13 | } 14 | } 15 | 16 | const cleanEntry = (entry) => { 17 | // clean a record and return it 18 | } 19 | 20 | const DomainMenu = ({value, copyFriendly}) => { 21 | let plugins = PluginManagers.menu.plugins.domain 22 | const cleanValue = cleanData(value) 23 | 24 | return ( 25 | 30 | {Object.keys(plugins).map((name, index) => ( 31 | plugins[name].getComponent(cleanValue, index) 32 | ))} 33 | 34 | 35 | ) 36 | } 37 | 38 | const IPMenu = ({value, copyFriendly}) => { 39 | let plugins = PluginManagers.menu.plugins.ip 40 | 41 | return ( 42 | 47 | {Object.keys(plugins).map((name, index) => ( 48 | plugins[name].getComponent(value, index) 49 | ))} 50 | 51 | 52 | ) 53 | } 54 | 55 | export const RRNameCell = ({row, copyFriendly}) => { 56 | return ( 57 | 62 | ) 63 | } 64 | 65 | 66 | export const RDataCell = ({row, copyFriendly}) => { 67 | const cleanedData = useMemo(() => { 68 | let data = [] 69 | row.rdata.forEach((value) => { 70 | data.push(cleanData(value)) 71 | }) 72 | return data 73 | }, [row]) 74 | 75 | if (!copyFriendly) { 76 | return ( 77 | 78 | {cleanedData.map((value, index) => { 79 | let data = value 80 | if (['ns', 'cname', 'mx'].includes(row.rrtype.toLowerCase())) { 81 | data = ( 82 | 87 | ) 88 | } else if (['a', 'aaaa'].includes(row.rrtype.toLowerCase())) { 89 | data = ( 90 | 95 | ) 96 | } 97 | 98 | return ( 99 | 100 | {data} 101 | 102 | ) 103 | })} 104 | 105 | ) 106 | } else { 107 | return ( 108 | 109 | {cleanedData.join()} 110 | 111 | ) 112 | } 113 | } -------------------------------------------------------------------------------- /pydat/frontend/src/plugins/dnsdb/table_pagination.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | 3 | import { makeStyles} from '@material-ui/core/styles'; 4 | import IconButton from '@material-ui/core/IconButton' 5 | import TablePagination from '@material-ui/core/TablePagination' 6 | import FirstPageIcon from '@material-ui/icons/FirstPage'; 7 | import KeyboardArrowLeft from '@material-ui/icons/KeyboardArrowLeft'; 8 | import KeyboardArrowRight from '@material-ui/icons/KeyboardArrowRight'; 9 | import LastPageIcon from '@material-ui/icons/LastPage'; 10 | 11 | import {useUserPreferences} from '../../components/helpers/preferences' 12 | 13 | const usePaginationStyles = makeStyles((theme) => ({ 14 | root: { 15 | flexShrink: 0, 16 | marginLeft: theme.spacing(2.5), 17 | } 18 | })) 19 | 20 | const TablePaginationActions = ({ 21 | pageCount, 22 | gotoPage, 23 | previousPage, 24 | nextPage, 25 | canNextPage, 26 | canPreviousPage, 27 | // paginationProps, 28 | }) => { 29 | 30 | const classes = usePaginationStyles(); 31 | 32 | return ( 33 |
34 | gotoPage(0)} 36 | disabled={!canPreviousPage} 37 | aria-label="first page" 38 | > 39 | 40 | 41 | previousPage()} 43 | disabled={!canPreviousPage} 44 | aria-label="previous page" 45 | > 46 | 47 | 48 | nextPage()} 50 | disabled={!canNextPage} 51 | aria-label="next page" 52 | > 53 | 54 | 55 | gotoPage(pageCount - 1)} 57 | disabled={!canNextPage} 58 | aria-label="last page" 59 | > 60 | 61 | 62 |
63 | ); 64 | } 65 | 66 | export const Paginator = ({ 67 | gotoPage, 68 | previousPage, 69 | nextPage, 70 | pageCount, 71 | rowCount, 72 | setPageSize, 73 | pageIndex, 74 | pageSize, 75 | canNextPage, 76 | canPreviousPage, 77 | columnLength, 78 | validPageSizes = [50, 100, 1000, 2500] 79 | }) => { 80 | const preferences = useUserPreferences('dnsdb') 81 | const handleChangePage = (event, newPage) => { 82 | gotoPage(newPage) 83 | }; 84 | 85 | const handleChangeRowsPerPage = (event) => { 86 | if (preferences.getPref("remember_page_size")) { 87 | preferences.setPref("page_size", parseInt(event.target.value)) 88 | } 89 | setPageSize(parseInt(event.target.value)) 90 | }; 91 | 92 | return ( 93 | 94 | ( 108 | 117 | )} 118 | /> 119 | 120 | ) 121 | } 122 | 123 | export default Paginator -------------------------------------------------------------------------------- /pydat/frontend/src/plugins/index.jsx: -------------------------------------------------------------------------------- 1 | // Importing Plugins 2 | // To import a plugin, place the directory of the plugin into this directory 3 | // and then import the directory, e.g., 4 | // import './my_plugin_directory' 5 | 6 | import './dnsdb' -------------------------------------------------------------------------------- /pydat/frontend/src/pydat.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import { 3 | BrowserRouter as Router, 4 | Switch, 5 | Route, 6 | Redirect 7 | } from 'react-router-dom' 8 | 9 | import ThemeProvider from '@material-ui/styles/ThemeProvider' 10 | import useMediaQuery from '@material-ui/core/useMediaQuery' 11 | import Slide from '@material-ui/core/Slide' 12 | 13 | import {SnackbarProvider} from 'notistack' 14 | 15 | import {PluginManagers} from './components/plugins' 16 | import {userPreferencesContainer} from './components/helpers/preferences' 17 | import Dashboard from './components/layout/dashboard' 18 | import NotFound from './components/layout/notfound' 19 | import { appSettings } from './settings' 20 | import {defaultTheme, darkTheme } from './components/layout/themes' 21 | 22 | import './plugins' 23 | import './active_resolution' 24 | import { Typography } from '@material-ui/core' 25 | 26 | const WhoisHandler = React.lazy(() => import('./components/whois')) 27 | 28 | userPreferencesContainer._initializePreferences() 29 | 30 | const Pydat = () => { 31 | const routes = PluginManagers.routes 32 | const enableDarkMode = useMediaQuery('@media (prefers-color-scheme: dark') 33 | 34 | const theme = enableDarkMode ? darkTheme : defaultTheme 35 | 36 | if (!appSettings['loaded']) { 37 | return ( 38 | 39 | 40 | 41 | Unable to talk to backend API!! 42 | 43 | 44 | 45 | ) 46 | } 47 | 48 | return ( 49 | 50 | 51 | 52 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | {Object.keys(routes.plugins).map((name, index) => { 69 | return ( 70 | 76 | {React.cloneElement(routes.plugins[name].component)} 77 | 78 | ) 79 | })} 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | ) 88 | } 89 | 90 | export default Pydat -------------------------------------------------------------------------------- /pydat/frontend/src/serviceWorker.js: -------------------------------------------------------------------------------- 1 | // This optional code is used to register a service worker. 2 | // register() is not called by default. 3 | 4 | // This lets the app load faster on subsequent visits in production, and gives 5 | // it offline capabilities. However, it also means that developers (and users) 6 | // will only see deployed updates on subsequent visits to a page, after all the 7 | // existing tabs open on the page have been closed, since previously cached 8 | // resources are updated in the background. 9 | 10 | // To learn more about the benefits of this model and instructions on how to 11 | // opt-in, read https://bit.ly/CRA-PWA 12 | 13 | const isLocalhost = Boolean( 14 | window.location.hostname === 'localhost' || 15 | // [::1] is the IPv6 localhost address. 16 | window.location.hostname === '[::1]' || 17 | // 127.0.0.0/8 are considered localhost for IPv4. 18 | window.location.hostname.match( 19 | /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ 20 | ) 21 | ); 22 | 23 | export function register(config) { 24 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { 25 | // The URL constructor is available in all browsers that support SW. 26 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); 27 | if (publicUrl.origin !== window.location.origin) { 28 | // Our service worker won't work if PUBLIC_URL is on a different origin 29 | // from what our page is served on. This might happen if a CDN is used to 30 | // serve assets; see https://github.com/facebook/create-react-app/issues/2374 31 | return; 32 | } 33 | 34 | window.addEventListener('load', () => { 35 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; 36 | 37 | if (isLocalhost) { 38 | // This is running on localhost. Let's check if a service worker still exists or not. 39 | checkValidServiceWorker(swUrl, config); 40 | 41 | // Add some additional logging to localhost, pointing developers to the 42 | // service worker/PWA documentation. 43 | navigator.serviceWorker.ready.then(() => { 44 | console.log( 45 | 'This web app is being served cache-first by a service ' + 46 | 'worker. To learn more, visit https://bit.ly/CRA-PWA' 47 | ); 48 | }); 49 | } else { 50 | // Is not localhost. Just register service worker 51 | registerValidSW(swUrl, config); 52 | } 53 | }); 54 | } 55 | } 56 | 57 | function registerValidSW(swUrl, config) { 58 | navigator.serviceWorker 59 | .register(swUrl) 60 | .then(registration => { 61 | registration.onupdatefound = () => { 62 | const installingWorker = registration.installing; 63 | if (installingWorker == null) { 64 | return; 65 | } 66 | installingWorker.onstatechange = () => { 67 | if (installingWorker.state === 'installed') { 68 | if (navigator.serviceWorker.controller) { 69 | // At this point, the updated precached content has been fetched, 70 | // but the previous service worker will still serve the older 71 | // content until all client tabs are closed. 72 | console.log( 73 | 'New content is available and will be used when all ' + 74 | 'tabs for this page are closed. See https://bit.ly/CRA-PWA.' 75 | ); 76 | 77 | // Execute callback 78 | if (config && config.onUpdate) { 79 | config.onUpdate(registration); 80 | } 81 | } else { 82 | // At this point, everything has been precached. 83 | // It's the perfect time to display a 84 | // "Content is cached for offline use." message. 85 | console.log('Content is cached for offline use.'); 86 | 87 | // Execute callback 88 | if (config && config.onSuccess) { 89 | config.onSuccess(registration); 90 | } 91 | } 92 | } 93 | }; 94 | }; 95 | }) 96 | .catch(error => { 97 | console.error('Error during service worker registration:', error); 98 | }); 99 | } 100 | 101 | function checkValidServiceWorker(swUrl, config) { 102 | // Check if the service worker can be found. If it can't reload the page. 103 | fetch(swUrl, { 104 | headers: { 'Service-Worker': 'script' }, 105 | }) 106 | .then(response => { 107 | // Ensure service worker exists, and that we really are getting a JS file. 108 | const contentType = response.headers.get('content-type'); 109 | if ( 110 | response.status === 404 || 111 | (contentType != null && contentType.indexOf('javascript') === -1) 112 | ) { 113 | // No service worker found. Probably a different app. Reload the page. 114 | navigator.serviceWorker.ready.then(registration => { 115 | registration.unregister().then(() => { 116 | window.location.reload(); 117 | }); 118 | }); 119 | } else { 120 | // Service worker found. Proceed as normal. 121 | registerValidSW(swUrl, config); 122 | } 123 | }) 124 | .catch(() => { 125 | console.log( 126 | 'No internet connection found. App is running in offline mode.' 127 | ); 128 | }); 129 | } 130 | 131 | export function unregister() { 132 | if ('serviceWorker' in navigator) { 133 | navigator.serviceWorker.ready 134 | .then(registration => { 135 | registration.unregister(); 136 | }) 137 | .catch(error => { 138 | console.error(error.message); 139 | }); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /pydat/frontend/src/settings.js: -------------------------------------------------------------------------------- 1 | export const appSettings = {} 2 | -------------------------------------------------------------------------------- /pydat/frontend/src/setupTests.js: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom/extend-expect'; 6 | --------------------------------------------------------------------------------