├── .coveragerc ├── .dockerignore ├── .gitchangelog.rc ├── .gitignore ├── CHANGELOG.rst ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile └── source │ ├── alerts.rst │ ├── changelog.rst │ ├── conf.py │ ├── data_packets.rst │ ├── data_sources.rst │ ├── devices.rst │ ├── extras_kismet_log_devices_to_filebeat_json.rst │ ├── extras_kismet_log_devices_to_json.rst │ ├── extras_kismet_log_to_csv.rst │ ├── extras_kismet_log_to_kml.rst │ ├── extras_kismet_log_to_pcap.rst │ ├── included_scripts.rst │ ├── index.rst │ ├── kismet.rst │ ├── packets.rst │ ├── snapshots.rst │ ├── tables.rst │ ├── testing.rst │ └── updating.rst ├── kismetdb ├── __init__.py ├── alerts.py ├── base_interface.py ├── data_packets.py ├── data_sources.py ├── devices.py ├── kismet.py ├── messages.py ├── packets.py ├── scripts │ ├── __init__.py │ ├── log_devices_to_filebeat_json.py │ ├── log_devices_to_json.py │ ├── log_to_csv.py │ ├── log_to_kml.py │ └── log_to_pcap.py ├── snapshots.py └── utility.py ├── setup.py └── tests ├── __init__.py ├── assets └── .placeholder ├── integration ├── test_integration_alerts_4.py ├── test_integration_alerts_5.py ├── test_integration_base_interface_4.py ├── test_integration_base_interface_5.py ├── test_integration_data_sources_4.py ├── test_integration_data_sources_5.py ├── test_integration_devices_4.py ├── test_integration_devices_5.py └── test_integration_packets_4.py └── unit ├── __init__.py └── test_unit_utility.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = */scripts/* 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /.gitchangelog.rc: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | ## 5 | ## Format 6 | ## 7 | ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 8 | ## 9 | ## Description 10 | ## 11 | ## ACTION is one of 'chg', 'fix', 'new' 12 | ## 13 | ## Is WHAT the change is about. 14 | ## 15 | ## 'chg' is for refactor, small improvement, cosmetic changes... 16 | ## 'fix' is for bug fixes 17 | ## 'new' is for new features, big improvement 18 | ## 19 | ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 20 | ## 21 | ## Is WHO is concerned by the change. 22 | ## 23 | ## 'dev' is for developpers (API changes, refactors...) 24 | ## 'usr' is for final users (UI changes) 25 | ## 'pkg' is for packagers (packaging changes) 26 | ## 'test' is for testers (test only related changes) 27 | ## 'doc' is for doc guys (doc only changes) 28 | ## 29 | ## COMMIT_MSG is ... well ... the commit message itself. 30 | ## 31 | ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' 32 | ## 33 | ## They are preceded with a '!' or a '@' (prefer the former, as the 34 | ## latter is wrongly interpreted in github.) Commonly used tags are: 35 | ## 36 | ## 'refactor' is obviously for refactoring code only 37 | ## 'minor' is for a very meaningless change (a typo, adding a comment) 38 | ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 39 | ## 'wip' is for partial functionality but complete subfunctionality. 40 | ## 41 | ## Example: 42 | ## 43 | ## new: usr: support of bazaar implemented 44 | ## chg: re-indentend some lines !cosmetic 45 | ## new: dev: updated code to be compatible with last version of killer lib. 46 | ## fix: pkg: updated year of licence coverage. 47 | ## new: test: added a bunch of test around user usability of feature X. 48 | ## fix: typo in spelling my name in comment. !minor 49 | ## 50 | ## Please note that multi-line commit message are supported, and only the 51 | ## first line will be considered as the "summary" of the commit message. So 52 | ## tags, and other rules only applies to the summary. The body of the commit 53 | ## message will be displayed in the changelog without reformatting. 54 | 55 | 56 | ## This is where we determine the current version, used for labeling the 'unreleased version' 57 | 58 | def read(fname): 59 | return open(os.path.join(os.path.dirname('__file__'), fname)).read() 60 | 61 | def get_version(): 62 | raw_init_file = read("kismetdb/__init__.py") 63 | rx_compiled = re.compile(r"\s*__version__\s*=\s*\"(\S+)\"") 64 | ver = rx_compiled.search(raw_init_file).group(1) 65 | return ver 66 | 67 | ## 68 | ## ``ignore_regexps`` is a line of regexps 69 | ## 70 | ## Any commit having its full commit message matching any regexp listed here 71 | ## will be ignored and won't be reported in the changelog. 72 | ## 73 | ignore_regexps = [ 74 | r'@minor', r'!minor', 75 | r'@cosmetic', r'!cosmetic', 76 | r'@refactor', r'!refactor', 77 | r'@wip', r'!wip', 78 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', 79 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:', 80 | r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', 81 | ] 82 | 83 | 84 | ## ``section_regexps`` is a list of 2-tuples associating a string label and a 85 | ## list of regexp 86 | ## 87 | ## Commit messages will be classified in sections thanks to this. Section 88 | ## titles are the label, and a commit is classified under this section if any 89 | ## of the regexps associated is matching. 90 | ## 91 | section_regexps = [ 92 | ('New', [ 93 | r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 94 | ]), 95 | ('Changes', [ 96 | r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 97 | ]), 98 | ('Fix', [ 99 | r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 100 | ]), 101 | 102 | ('Other', None ## Match all lines 103 | ), 104 | 105 | ] 106 | 107 | 108 | ## ``body_process`` is a callable 109 | ## 110 | ## This callable will be given the original body and result will 111 | ## be used in the changelog. 112 | ## 113 | ## Available constructs are: 114 | ## 115 | ## - any python callable that take one txt argument and return txt argument. 116 | ## 117 | ## - ReSub(pattern, replacement): will apply regexp substitution. 118 | ## 119 | ## - Indent(chars=" "): will indent the text with the prefix 120 | ## Please remember that template engines gets also to modify the text and 121 | ## will usually indent themselves the text if needed. 122 | ## 123 | ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns 124 | ## 125 | ## - noop: do nothing 126 | ## 127 | ## - ucfirst: ensure the first letter is uppercase. 128 | ## (usually used in the ``subject_process`` pipeline) 129 | ## 130 | ## - final_dot: ensure text finishes with a dot 131 | ## (usually used in the ``subject_process`` pipeline) 132 | ## 133 | ## - strip: remove any spaces before or after the content of the string 134 | ## 135 | ## Additionally, you can `pipe` the provided filters, for instance: 136 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ") 137 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') 138 | #body_process = noop 139 | body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip 140 | 141 | 142 | ## ``subject_process`` is a callable 143 | ## 144 | ## This callable will be given the original subject and result will 145 | ## be used in the changelog. 146 | ## 147 | ## Available constructs are those listed in ``body_process`` doc. 148 | subject_process = (strip | 149 | ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | 150 | ucfirst | final_dot) 151 | 152 | 153 | ## ``tag_filter_regexp`` is a regexp 154 | ## 155 | ## Tags that will be used for the changelog must match this regexp. 156 | ## 157 | tag_filter_regexp = r'^v[0-9]+\.[0-9]+(\.[0-9]+)?$' 158 | 159 | 160 | ## ``unreleased_version_label`` is a string 161 | ## 162 | ## This label will be used as the changelog Title of the last set of changes 163 | ## between last valid tag and HEAD if any. 164 | # unreleased_version_label = "%%version%% (unreleased)" 165 | unreleased_version_label = "v%s"% get_version() 166 | 167 | ## ``output_engine`` is a callable 168 | ## 169 | ## This will change the output format of the generated changelog file 170 | ## 171 | ## Available choices are: 172 | ## 173 | ## - rest_py 174 | ## 175 | ## Legacy pure python engine, outputs ReSTructured text. 176 | ## This is the default. 177 | ## 178 | ## - mustache() 179 | ## 180 | ## Template name could be any of the available templates in 181 | ## ``templates/mustache/*.tpl``. 182 | ## Requires python package ``pystache``. 183 | ## Examples: 184 | ## - mustache("markdown") 185 | ## - mustache("restructuredtext") 186 | ## 187 | ## - makotemplate() 188 | ## 189 | ## Template name could be any of the available templates in 190 | ## ``templates/mako/*.tpl``. 191 | ## Requires python package ``mako``. 192 | ## Examples: 193 | ## - makotemplate("restructuredtext") 194 | ## 195 | #output_engine = rest_py 196 | #output_engine = mustache("restructuredtext") 197 | #output_engine = mustache("markdown") 198 | #output_engine = makotemplate("restructuredtext") 199 | 200 | 201 | ## ``include_merge`` is a boolean 202 | ## 203 | ## This option tells git-log whether to include merge commits in the log. 204 | ## The default is to include them. 205 | include_merge = False 206 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # Vim stuff 107 | *.swp 108 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | v2020.06.01 5 | ----------- 6 | - Assume the last version of the db-specific fields and converters if no other version found [Mike Kershaw / Dragorn] 7 | 8 | v2019.05.05 9 | ----------- 10 | - Handle missing SYSTEM snapshots during Kismet processing [Mike Kershaw / Dragorn] 11 | 12 | v2019.05.04 13 | ----------- 14 | - Add DataPackets handler [Mike Kershaw / Dragorn] 15 | 16 | v2019.05.03 17 | ----------- 18 | - Fix JSON blob type extractor for DataSources [Ash Wilson] 19 | 20 | Closes #3 21 | - Add JSON blob type extractor for Snapshots [Mike Kershaw / Dragorn] 22 | 23 | v2019.05.02 24 | ----------- 25 | - Make RST doc levels happy. [Mike Kershaw / Dragorn] 26 | - Hopefully make docs happy. [Mike Kershaw / Dragorn] 27 | - Add self to docs. [Mike Kershaw / Dragorn] 28 | - Fix changelog. [Mike Kershaw / Dragorn] 29 | - Fix RST? [Mike Kershaw / Dragorn] 30 | - Docs. [Mike Kershaw / Dragorn] 31 | - Ignore vim. [Mike Kershaw / Dragorn] 32 | - Enable classes Bump version Add integer version. [Mike Kershaw / 33 | Dragorn] 34 | - Add snapshots class Add kismet class for server info derived from 35 | snapshots. [Mike Kershaw / Dragorn] 36 | - Add float comparators Add string LIKE comparators. [Mike Kershaw / 37 | Dragorn] 38 | - Add defaults for db6. [Mike Kershaw / Dragorn] 39 | - Add support for database version 6. [Mike Kershaw / Dragorn] 40 | - Add license file now that it's a submodule. [Mike Kershaw / Dragorn] 41 | - Minor commit to trigger mirror. [Mike Kershaw / Dragorn] 42 | 43 | 44 | v5.1.0 (2019-02-16) 45 | ------------------- 46 | 47 | New 48 | ~~~ 49 | - Include version-specific converters. [Ash Wilson] 50 | 51 | This allows us to, for instance, ensure that all 52 | GPS coordinates are returned as float-type values, 53 | across all database versions, no matter how they 54 | were originally stored in the database. 55 | 56 | Closes #22 57 | - Support v4 as well as v5 Kismet databases. [Ash Wilson] 58 | 59 | Closes #19 60 | - Add ``kismet_log_devices_to_filebeat_json``. [Ash Wilson] 61 | 62 | Closes #17 63 | 64 | 65 | v5.0.0 (2019-02-12) 66 | ------------------- 67 | 68 | New 69 | ~~~ 70 | - Support v5 schema. [Ash Wilson] 71 | 72 | 73 | v4.0.3 (2019-02-05) 74 | ------------------- 75 | 76 | Changes 77 | ~~~~~~~ 78 | - Updated docs, added simplekml requirement. [Ash Wilson] 79 | 80 | Closes #8 81 | Closes #7 82 | - Adding docs to be built by Sphinx. [Ash Wilson] 83 | - Scripts automatically install with Python package. [Ash Wilson] 84 | 85 | Added generator function yield_rows() to all abstractions. 86 | - Initial working commit. [Ash Wilson] 87 | 88 | In order to run integration tests, you need a 89 | Kismet db at tests/assets/testdata.kismet. 90 | 91 | 92 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PY_VER=2.7 2 | FROM python:${PY_VER} 3 | 4 | RUN apt-get update && apt-get install -y tcpdump xmlstarlet 5 | 6 | RUN pip install pytest pytest-coverage 7 | 8 | COPY . /src/ 9 | 10 | WORKDIR /src/ 11 | 12 | RUN pip install -e . 13 | 14 | RUN py.test --cov=kismetdb --cov-report=term-missing 15 | 16 | ### 17 | # Test v4 18 | 19 | # Test KML script 20 | RUN kismet_log_to_kml --in=./tests/assets/testdata.kismet_4 --out=./out.kml > /dev/null 21 | RUN cat ./out.kml | wc -l 22 | RUN xmlstarlet val ./out.kml 23 | 24 | # Test CSV script 25 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_4 --table=devices --out=./devices.csv > /dev/null 26 | RUN cat devices.csv | wc -l 27 | 28 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_4 --table=packets --out=packets.csv > /dev/null 29 | RUN cat packets.csv | wc -l 30 | 31 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_4 --table=datasources --out=./datasources.csv > /dev/null 32 | RUN cat datasources.csv | wc -l 33 | 34 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_4 --table=alerts --out=./alerts.csv > /dev/null 35 | RUN cat alerts.csv | wc -l 36 | 37 | # Test pcap script 38 | RUN kismet_log_to_pcap --in=./tests/assets/testdata.kismet_4 --out=./out.pcap > /dev/null 39 | RUN tcpdump -r ./out.pcap > /dev/null 40 | 41 | RUN kismet_log_devices_to_json --in=./tests/assets/testdata.kismet_4 --out=./out.json 42 | RUN cat ./out.json | wc -l 43 | 44 | RUN kismet_log_devices_to_filebeat_json --in=./tests/assets/testdata.kismet_4 | wc -l 45 | 46 | 47 | ### 48 | # Test v5 49 | 50 | # Test KML script 51 | RUN kismet_log_to_kml --in=./tests/assets/testdata.kismet_5 --out=./out.kml > /dev/null 52 | RUN cat ./out.kml | wc -l 53 | RUN xmlstarlet val ./out.kml 54 | 55 | # Test CSV script 56 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_5 --table=devices --out=./devices.csv > /dev/null 57 | RUN cat devices.csv | wc -l 58 | 59 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_5 --table=packets --out=packets.csv > /dev/null 60 | RUN cat packets.csv | wc -l 61 | 62 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_5 --table=datasources --out=./datasources.csv > /dev/null 63 | RUN cat datasources.csv | wc -l 64 | 65 | RUN kismet_log_to_csv --in=./tests/assets/testdata.kismet_5 --table=alerts --out=./alerts.csv > /dev/null 66 | RUN cat alerts.csv | wc -l 67 | 68 | # Test pcap script 69 | RUN kismet_log_to_pcap --in=./tests/assets/testdata.kismet_5 --out=./out.pcap > /dev/null 70 | RUN tcpdump -r ./out.pcap > /dev/null 71 | 72 | RUN kismet_log_devices_to_json --in=./tests/assets/testdata.kismet_5 --out=./out.json 73 | RUN cat ./out.json | wc -l 74 | 75 | RUN kismet_log_devices_to_filebeat_json --in=./tests/assets/testdata.kismet_5 | wc -l 76 | 77 | 78 | ###### Flush it! 79 | FROM ubuntu:18.04 80 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.rst 2 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Kismet database wrapper 2 | ======================= 3 | 4 | .. image:: https://readthedocs.org/projects/kismetdb/badge/?version=latest 5 | :target: https://kismetdb.readthedocs.io/en/latest/?badge=latest 6 | :alt: Documentation Status 7 | 8 | 9 | Quickstart 10 | ---------- 11 | 12 | Install from PyPI with ``pip install kismetdb`` 13 | 14 | Install from source with with ``pip install .`` 15 | 16 | In the Python interpreter: 17 | 18 | :: 19 | 20 | import json 21 | import kismetdb 22 | kismet_log_file = "kismet/database.here" 23 | alerts = kismetdb.Alerts(kismet_log_file) 24 | 25 | # Get alert metadata 26 | all_alerts_meta = alerts.get_meta() 27 | for alert in all_alerts_meta: 28 | print(alert["header"]) 29 | 30 | # Get payload from all alerts 31 | all_alerts = alerts.get_all() 32 | for alert in all_alerts: 33 | print(json.loads(alert["json"])["kismet.alert.text"]) 34 | 35 | 36 | Included scripts 37 | ---------------- 38 | 39 | Alongside the Python library, several commands are installed: 40 | 41 | * ``kismet_log_devices_to_json`` 42 | * ``kismet_log_to_csv`` 43 | * ``kismet_log_to_kml`` 44 | * ``kismet_log_to_pcap`` 45 | * ``kismet_log_devices_to_filebeat_json`` 46 | 47 | Following any of the prior commands with ``--help`` will provide details on 48 | usage. 49 | 50 | 51 | Testing 52 | ------- 53 | 54 | In order to test, you must place a kismet sqlite log file at 55 | ``tests/assets/testdata.kismet_4`` and ``tests/assets/testdata.kismet_5``, 56 | which are Kismet version 4 and 5 databases, respectively. 57 | 58 | Testing happens in a Docker build process: 59 | 60 | Testing for Python 2.7: 61 | 62 | ``docker build .`` 63 | 64 | Testing for Python 3.6: 65 | 66 | ``docker build --build-arg PY_VER=3.6 .`` 67 | 68 | Testing for Python 3.7: 69 | 70 | ``docker build --build-arg PY_VER=3.7 .`` 71 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/source/alerts.rst: -------------------------------------------------------------------------------- 1 | Alerts 2 | ====== 3 | 4 | .. toctree:: 5 | 6 | 7 | .. autoclass:: kismetdb.Alerts 8 | :members: get_meta, get_all, yield_meta, yield_all 9 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../CHANGELOG.rst 2 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import re 4 | # 5 | # Configuration file for the Sphinx documentation builder. 6 | # 7 | # This file does only contain a selection of the most common options. For a 8 | # full list see the documentation: 9 | # http://www.sphinx-doc.org/en/master/config 10 | 11 | # -- Path setup -------------------------------------------------------------- 12 | 13 | # If extensions (or modules to document with autodoc) are in another directory, 14 | # add these directories to sys.path here. If the directory is relative to the 15 | # documentation root, use os.path.abspath to make it absolute, like shown here. 16 | # 17 | # import os 18 | # import sys 19 | # sys.path.insert(0, os.path.abspath('.')) 20 | 21 | # -- Helper Functions -------------------------------------------------------- 22 | 23 | 24 | def read_file(fname): 25 | """Return file contents as a string.""" 26 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 27 | 28 | 29 | def get_version(): 30 | """Return version from init file.""" 31 | raw_init_file = read_file("../../{}/__init__.py".format(project)) 32 | rx_compiled = re.compile(r"\s*__version__\s*=\s*\"(\S+)\"") 33 | ver = rx_compiled.search(raw_init_file).group(1) 34 | return ver 35 | 36 | 37 | def get_author(): 38 | """Get author from __init__.py.""" 39 | raw_init_file = read_file("../../{}/__init__.py".format(project)) 40 | rx_compiled = re.compile(r"\s*__author__\s*=\s*\"(.*)\"") 41 | auth = rx_compiled.search(raw_init_file).group(1) 42 | return auth 43 | 44 | 45 | def get_copyright(): 46 | """Get copyright from __init__.py.""" 47 | raw_init_file = read_file("../../{}/__init__.py".format(project)) 48 | rx_compiled = re.compile(r"\s*__copyright__\s*=\s*\"(.*)\"") 49 | cright = rx_compiled.search(raw_init_file).group(1) 50 | return cright 51 | 52 | 53 | # -- Project information ----------------------------------------------------- 54 | project = u'kismetdb' 55 | 56 | # The short X.Y version 57 | version = get_version() 58 | # The full version, including alpha/beta/rc tags 59 | release = get_version() 60 | 61 | copyright = get_copyright() 62 | 63 | author = get_author() 64 | 65 | 66 | # -- General configuration --------------------------------------------------- 67 | 68 | # If your documentation needs a minimal Sphinx version, state it here. 69 | # 70 | # needs_sphinx = '1.0' 71 | 72 | # Add any Sphinx extension module names here, as strings. They can be 73 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 74 | # ones. 75 | extensions = [ 76 | 'sphinx.ext.autodoc', 77 | 'sphinx.ext.napoleon' 78 | ] 79 | 80 | # Add any paths that contain templates here, relative to this directory. 81 | templates_path = ['_templates'] 82 | 83 | # The suffix(es) of source filenames. 84 | # You can specify multiple suffix as a list of string: 85 | # 86 | # source_suffix = ['.rst', '.md'] 87 | source_suffix = '.rst' 88 | 89 | # The master toctree document. 90 | master_doc = 'index' 91 | 92 | # The language for content autogenerated by Sphinx. Refer to documentation 93 | # for a list of supported languages. 94 | # 95 | # This is also used if you do content translation via gettext catalogs. 96 | # Usually you set "language" from the command line for these cases. 97 | language = None 98 | 99 | # List of patterns, relative to source directory, that match files and 100 | # directories to ignore when looking for source files. 101 | # This pattern also affects html_static_path and html_extra_path. 102 | exclude_patterns = [] 103 | 104 | # The name of the Pygments (syntax highlighting) style to use. 105 | pygments_style = None 106 | 107 | 108 | # -- Options for HTML output ------------------------------------------------- 109 | 110 | # The theme to use for HTML and HTML Help pages. See the documentation for 111 | # a list of builtin themes. 112 | # 113 | html_theme = 'alabaster' 114 | 115 | # Theme options are theme-specific and customize the look and feel of a theme 116 | # further. For a list of options available for each theme, see the 117 | # documentation. 118 | # 119 | # html_theme_options = {} 120 | 121 | # Add any paths that contain custom static files (such as style sheets) here, 122 | # relative to this directory. They are copied after the builtin static files, 123 | # so a file named "default.css" will overwrite the builtin "default.css". 124 | html_static_path = ['_static'] 125 | 126 | # Custom sidebar templates, must be a dictionary that maps document names 127 | # to template names. 128 | # 129 | # The default sidebars (for documents that don't match any pattern) are 130 | # defined by theme itself. Builtin themes are using these templates by 131 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 132 | # 'searchbox.html']``. 133 | # 134 | # html_sidebars = {} 135 | 136 | 137 | # -- Options for HTMLHelp output --------------------------------------------- 138 | 139 | # Output file base name for HTML help builder. 140 | htmlhelp_basename = 'kismetdbdoc' 141 | 142 | 143 | # -- Options for LaTeX output ------------------------------------------------ 144 | 145 | latex_elements = { 146 | # The paper size ('letterpaper' or 'a4paper'). 147 | # 148 | # 'papersize': 'letterpaper', 149 | 150 | # The font size ('10pt', '11pt' or '12pt'). 151 | # 152 | # 'pointsize': '10pt', 153 | 154 | # Additional stuff for the LaTeX preamble. 155 | # 156 | # 'preamble': '', 157 | 158 | # Latex figure (float) alignment 159 | # 160 | # 'figure_align': 'htbp', 161 | } 162 | 163 | # Grouping the document tree into LaTeX files. List of tuples 164 | # (source start file, target name, title, 165 | # author, documentclass [howto, manual, or own class]). 166 | latex_documents = [ 167 | (master_doc, 'kismetdb.tex', u'kismetdb Documentation', 168 | u'Ash Wilson', 'manual'), 169 | ] 170 | 171 | 172 | # -- Options for manual page output ------------------------------------------ 173 | 174 | # One entry per manual page. List of tuples 175 | # (source start file, name, description, authors, manual section). 176 | man_pages = [ 177 | (master_doc, 'kismetdb', u'kismetdb Documentation', 178 | [author], 1) 179 | ] 180 | 181 | 182 | # -- Options for Texinfo output ---------------------------------------------- 183 | 184 | # Grouping the document tree into Texinfo files. List of tuples 185 | # (source start file, target name, title, author, 186 | # dir menu entry, description, category) 187 | texinfo_documents = [ 188 | (master_doc, 'kismetdb', u'kismetdb Documentation', 189 | author, 'kismetdb', 'Python wrapper for Kismet database.', 190 | 'Miscellaneous'), 191 | ] 192 | 193 | 194 | # -- Options for Epub output ------------------------------------------------- 195 | 196 | # Bibliographic Dublin Core info. 197 | epub_title = project 198 | 199 | # The unique identifier of the text. This can be a ISBN number 200 | # or the project homepage. 201 | # 202 | # epub_identifier = '' 203 | 204 | # A unique identification for the text. 205 | # 206 | # epub_uid = '' 207 | 208 | # A list of files that should not be packed into the epub file. 209 | epub_exclude_files = ['search.html'] 210 | 211 | 212 | # -- Extension configuration ------------------------------------------------- 213 | -------------------------------------------------------------------------------- /docs/source/data_packets.rst: -------------------------------------------------------------------------------- 1 | DataPackets 2 | =========== 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.DataPackets 7 | :members: get_meta, get_all, yield_meta, yield_all 8 | -------------------------------------------------------------------------------- /docs/source/data_sources.rst: -------------------------------------------------------------------------------- 1 | DataSources 2 | =========== 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.DataSources 7 | :members: get_meta, get_all, yield_meta, yield_all 8 | -------------------------------------------------------------------------------- /docs/source/devices.rst: -------------------------------------------------------------------------------- 1 | Devices 2 | ======= 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.Devices 7 | :members: get_meta, get_all, yield_meta, yield_all 8 | -------------------------------------------------------------------------------- /docs/source/extras_kismet_log_devices_to_filebeat_json.rst: -------------------------------------------------------------------------------- 1 | kismet_log_devices_to_filebeat_json 2 | =================================== 3 | 4 | .. toctree:: 5 | 6 | Export from the ``devices`` table to stdout or append a json file. 7 | 8 | :: 9 | 10 | usage: kismet_log_devices_to_filebeat_json [-h] --in INFILE [--out OUTFILE] 11 | [--start-time STARTTIME] 12 | [--min-signal MINSIGNAL] 13 | 14 | optional arguments: 15 | -h, --help show this help message and exit 16 | --in INFILE Input (.kismet) file 17 | --out OUTFILE Output filename (optional) for appending. If unspecified, 18 | each record will be printed to stdout, one record per line, 19 | ideal for piping into filebeat. 20 | --start-time STARTTIME Only list devices seen after given time 21 | --min-signal MINSIGNAL Only list devices with a best signal higher than min-signal 22 | -------------------------------------------------------------------------------- /docs/source/extras_kismet_log_devices_to_json.rst: -------------------------------------------------------------------------------- 1 | kismet_log_devices_to_json 2 | ========================== 3 | 4 | .. toctree:: 5 | 6 | Export contents of devices table in Kismet DB to json file. 7 | 8 | :: 9 | 10 | usage: kismet_log_devices_to_json [-h] [--in INFILE] [--out OUTFILE] 11 | [--start-time STARTTIME] 12 | [--min-signal MINSIGNAL] 13 | 14 | optional arguments: 15 | -h, --help show this help message and exit 16 | --in INFILE Input (.kismet) file 17 | --out OUTFILE Output filename (optional). If omitted, logs multi- 18 | line and indented (human-readable) to stdout. 19 | --start-time STARTTIME Only list devices seen after given time 20 | --min-signal MINSIGNAL Only list devices with a best signal higher than min-signal 21 | -------------------------------------------------------------------------------- /docs/source/extras_kismet_log_to_csv.rst: -------------------------------------------------------------------------------- 1 | kismet_log_to_csv 2 | ================= 3 | 4 | .. toctree:: 5 | 6 | Export contents of various tables in Kismet DB to csv file. 7 | 8 | :: 9 | 10 | usage: kismet_log_to_csv [-h] [--in INFILE] [--out OUTFILE] [--table SRCTABLE] 11 | 12 | optional arguments: 13 | -h, --help show this help message and exit 14 | --in INFILE Input (.kismet) file 15 | --out OUTFILE Output CSV filename 16 | --table SRCTABLE Select the table to export. The ``packets``, ``datasources``, 17 | and ``alerts`` tables are supported. Defaults to ``devices`` table. 18 | -------------------------------------------------------------------------------- /docs/source/extras_kismet_log_to_kml.rst: -------------------------------------------------------------------------------- 1 | kismet_log_to_kml 2 | ================= 3 | 4 | .. toctree:: 5 | 6 | Export contents of the ``devices`` table to KML. 7 | 8 | :: 9 | 10 | usage: kismet_log_to_kml [-h] [--in INFILE] [--out OUTFILE] 11 | [--start-time STARTTIME] [--min-signal MINSIGNAL] 12 | [--strongest-point] [--title TITLE] [--ssid SSID] 13 | 14 | 15 | optional arguments: 16 | -h, --help show this help message and exit 17 | --in INFILE Input (.kismet) file 18 | --out OUTFILE Output filename (optional) 19 | --start-time STARTTIME Only list devices seen after given time 20 | --min-signal MINSIGNAL Only list devices with a best signal higher than min-signal 21 | --strongest-point Plot points based on strongest signal 22 | --title TITLE Title embedded in KML file 23 | --ssid SSID Only plot networks which match the SSID (or SSID regex) 24 | -------------------------------------------------------------------------------- /docs/source/extras_kismet_log_to_pcap.rst: -------------------------------------------------------------------------------- 1 | kismet_log_to_pcap 2 | ================== 3 | 4 | .. toctree:: 5 | 6 | Export captures from the ``packets`` table to .pcap file. 7 | 8 | :: 9 | 10 | usage: kismet_log_to_pcap [-h] [--in INFILE] [--out OUTFILE] 11 | [--outtitle OUTTITLE] [--limit-packets LIMITPACKETS] 12 | [--source-uuid UUID] [--start-time STARTTIME] 13 | [--end-time ENDTIME] [--silent SILENT] 14 | [--min-signal MINSIGNAL] [--device-key DEVICEKEY] 15 | 16 | optional arguments: 17 | -h, --help show this help message and exit 18 | --in INFILE Input (.kismet) file 19 | --out OUTFILE Output filename (when exporting all packets) 20 | --outtitle OUTTITLE Output title (when limiting packets per file) 21 | --limit-packets LIMITPACKETS Generate multiple pcap files, limiting the number 22 | of packets per file 23 | --source-uuid UUID Limit packets to a specific data source (multiple 24 | --source-uuid options will match multiple datasources) 25 | --start-time STARTTIME Only convert packets recorded after start-time 26 | --end-time ENDTIME Only convert packets recorded before end-time 27 | --silent SILENT Silent operation (no status output) 28 | --min-signal MINSIGNAL Only convert packets with a signal greater than min-signal 29 | --device-key DEVICEKEY Only convert packets which are linked to the specified device 30 | key (multiple --device-key options will match multiple devices) 31 | -------------------------------------------------------------------------------- /docs/source/included_scripts.rst: -------------------------------------------------------------------------------- 1 | .. kismetdb documentation master file, created by 2 | sphinx-quickstart on Mon Jan 21 22:01:16 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Extras 7 | ====== 8 | 9 | Some pre-built scripts are included for common use cases. 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | :caption: Extras: 14 | 15 | extras_kismet_log_devices_to_filebeat_json 16 | extras_kismet_log_devices_to_json 17 | extras_kismet_log_to_csv 18 | extras_kismet_log_to_kml 19 | extras_kismet_log_to_pcap 20 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. kismetdb documentation master file, created by 2 | sphinx-quickstart on Mon Jan 21 22:01:16 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | kismetdb 7 | ======== 8 | 9 | .. include:: ../../README.rst 10 | 11 | 12 | Table of Contents 13 | ================= 14 | 15 | .. toctree:: 16 | :maxdepth: 2 17 | 18 | tables 19 | included_scripts 20 | testing 21 | updating 22 | changelog 23 | 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | -------------------------------------------------------------------------------- /docs/source/kismet.rst: -------------------------------------------------------------------------------- 1 | Kismet 2 | ====== 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.Kismet 7 | 8 | -------------------------------------------------------------------------------- /docs/source/packets.rst: -------------------------------------------------------------------------------- 1 | Packets 2 | ======= 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.Packets 7 | :members: get_meta, get_all, yield_meta, yield_all 8 | -------------------------------------------------------------------------------- /docs/source/snapshots.rst: -------------------------------------------------------------------------------- 1 | Snapshots 2 | ========= 3 | 4 | .. toctree:: 5 | 6 | .. autoclass:: kismetdb.Snapshots 7 | :members: get_meta, get_all, yield_meta, yield_all 8 | -------------------------------------------------------------------------------- /docs/source/tables.rst: -------------------------------------------------------------------------------- 1 | .. kismetdb documentation master file, created by 2 | sphinx-quickstart on Mon Jan 21 22:01:16 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Tables 7 | ====== 8 | 9 | This wrapper presents tables as Python objects. 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | :caption: Tables: 14 | 15 | alerts 16 | data_packets 17 | data_sources 18 | devices 19 | kismet 20 | packets 21 | snapshots 22 | -------------------------------------------------------------------------------- /docs/source/testing.rst: -------------------------------------------------------------------------------- 1 | Testing 2 | ======= 3 | 4 | .. toctree:: 5 | 6 | In order to test, you must place a kismet sqlite log file at 7 | ``tests/assets/testdata.kismet_4`` and ``tests/assets/testdata.kismet_5``, 8 | which are version 4 and version 5 log files, respectively. 9 | 10 | Testing happens in a Docker build process: 11 | 12 | Testing for Python 2.7: 13 | 14 | ``docker build .`` 15 | 16 | Testing for Python 3.6: 17 | 18 | ``docker build --build-arg PY_VER=3.6 .`` 19 | 20 | Testing for Python 3.7: 21 | 22 | ``docker build --build-arg PY_VER=3.7 .`` 23 | -------------------------------------------------------------------------------- /docs/source/updating.rst: -------------------------------------------------------------------------------- 1 | Updating and Extending 2 | ====================== 3 | 4 | .. toctree:: 5 | 6 | Over time, we expect that the database schema will change. To make 7 | transitioning to a new schema easier, each object is defined with the expected 8 | database columns defined in a class variable named ``column_names``. The bulk 9 | data field (which contains json or raw packet capture) is in a class variable 10 | named ``bulk_data_field``. The ``valid_kwargs`` class variable is used in 11 | parsing keyword arguments for filtering in the SQL query. These items tie into 12 | functions that live in the Utility class, and are used for forming the SQL 13 | that's used to query the Kismet DB. 14 | 15 | This tool follows calendar versioning, and new versions support DB schemas as 16 | far back as v4. 17 | 18 | As the database schema changes, the changes required to support a new version 19 | of the db will be required on a per-object basis. The following object 20 | attributes are used to contain version-specific schema information: 21 | 22 | * ``field_defaults``: This is used to force a default value for fields that are not found in older-than-current versions of the Kismet DB. 23 | * ``converters_reference``: This allows us to specify a converter so that if the data type changes between schema versions, we can force the older DB type to match the current DB version's type. 24 | * ``column_reference``: This describes the expected columns for each supported version of the kismet DB 25 | 26 | All objects representing tables inherit from the BaseInterface class: 27 | 28 | .. autoclass:: kismetdb.BaseInterface 29 | :members: get_meta, get_all, yield_meta, yield_all 30 | -------------------------------------------------------------------------------- /kismetdb/__init__.py: -------------------------------------------------------------------------------- 1 | from .alerts import Alerts # NOQA 2 | from .base_interface import BaseInterface # NOQA 3 | from .data_packets import DataPackets # NOQA 4 | from .data_sources import DataSources # NOQA 5 | from .devices import Devices # NOQA 6 | from .kismet import Kismet # NOQA 7 | from .messages import Messages # NOQA 8 | from .packets import Packets # NOQA 9 | from .snapshots import Snapshots # NOQA 10 | from .utility import Utility # NOQA 11 | 12 | __version__ = "2021.06.01" 13 | __author__ = "Mike Kershaw, Ash Wilson" 14 | __copyright__ = "2021, Mike Kershaw" 15 | __license__ = "GPL2" 16 | __email__ = "dragorn@kismetwireless.net" 17 | __version_int__ = "20210601" 18 | 19 | -------------------------------------------------------------------------------- /kismetdb/alerts.py: -------------------------------------------------------------------------------- 1 | """Alerts abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class Alerts(BaseInterface): 7 | """This object covers alerts stored in the Kismet DB. 8 | 9 | The ``Keyword Arguments`` section below applies only to methods which 10 | support them (as noted below), not to object instantiation. 11 | 12 | Args: 13 | file_location (str): Path to Kismet log file. 14 | 15 | Keyword args: 16 | ts_sec_gt (str, datetime, or (secs, u_secs)): Timestamp for starting 17 | query. 18 | phyname (str, list): Restrict results to this PHY. 19 | devmac (str, list): Restrict results to this MAC address. 20 | header (str, list): Restrict results to alerts of this type. 21 | 22 | """ 23 | 24 | table_name = "alerts" 25 | bulk_data_field = "json" 26 | field_defaults = {4: {}, 27 | 5: {}, 28 | 6: {}, 29 | 7: {}, 30 | 8: {}} 31 | converters_reference = {4: {"lat": Utility.format_int_as_latlon, 32 | "lon": Utility.format_int_as_latlon}, 33 | 5: {}, 34 | 6: {}, 35 | 7: {}, 36 | 8: {}} 37 | column_reference = {4: ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 38 | "lon", "header", "json"], 39 | 5: ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 40 | "lon", "header", "json"], 41 | 6: ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 42 | "lon", "header", "json"], 43 | 7: ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 44 | "lon", "header", "json"], 45 | 8: ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 46 | "lon", "header", "json"]} 47 | valid_kwargs = {"ts_sec_gt": Utility.generate_single_tstamp_secs_gt, 48 | "devmac": Utility.generate_multi_string_sql_eq, 49 | "header": Utility.generate_multi_string_sql_eq, 50 | "phyname": Utility.generate_multi_string_sql_eq} 51 | -------------------------------------------------------------------------------- /kismetdb/base_interface.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sqlite3 3 | 4 | 5 | class BaseInterface(object): 6 | """Initialize with a path to a valid Kismet log file. 7 | 8 | Args: 9 | file_location (str): Path to Kismet log file. 10 | 11 | Attributes: 12 | bulk_data_field (str): Field containing bulk data (typically stored 13 | as a blob in the DB). This allows the `get_meta()` method to 14 | exclude information which may have a performance impact. This 15 | is especially true for the retrieval of packet captures. 16 | column_reference (dict): Top-level keys in this dictionary are version 17 | numbers, and are used to easily extend the schema for new versions. 18 | The ``column_names`` attribute is populated from this during 19 | instantiation. 20 | column_names (list): Name of columns expected to be in this object's 21 | table by this abstraction. Used for validation against columns in 22 | DB on instanitation. 23 | table_name (str): Name of the table this abstraction represents. 24 | valid_kwargs (str): This is a dictionary where the key is the name 25 | of a keyword argument and the value is a reference to the function 26 | which builds the SQL partial and replacement dictionary. 27 | field_defaults (dict): Statically set these column defaults by DB 28 | version. 29 | converters_reference (dict): This provides a reference for converters 30 | to use on data coming from the DB on a version by version basis. 31 | full_query_column_names (list): Processed column names for full query 32 | of kismet DB. Created on instantiation. 33 | meta_query_column_names (list): Processed column names for meta query 34 | of kismet DB. Created on instantiation. 35 | 36 | """ 37 | table_name = "KISMET" 38 | bulk_data_field = "" 39 | field_defaults = {4: {}, 40 | 5: {}, 41 | 6: {}, 42 | 7: {}, 43 | 8: {}} 44 | converters_reference = {4: {}, 45 | 5: {}, 46 | 6: {}, 47 | 7: {}, 48 | 8: {}} 49 | column_reference = {4: ["kismet_version", "db_version", "db_module"], 50 | 5: ["kismet_version", "db_version", "db_module"], 51 | 6: ["kismet_version", "db_version", "db_module"], 52 | 7: ["kismet_version", "db_version", "db_module"], 53 | 8: ["kismet_version", "db_version", "db_module"]} 54 | valid_kwargs = {} 55 | 56 | def __init__(self, file_location): 57 | self.check_db_exists(file_location) 58 | self.db_file = file_location 59 | self.db_version = self.get_db_version() 60 | self.column_names = self.__get_latest_version(self.column_reference) 61 | self.check_column_names(file_location) 62 | self.full_query_column_names = self.get_query_column_names() 63 | self.meta_query_column_names = self.get_meta_query_column_names() 64 | 65 | def __get_latest_version(self, content): 66 | if self.db_version in content: 67 | return content[self.db_version] 68 | 69 | last = max(k for k, v in content.items()) 70 | 71 | return content[last] 72 | 73 | 74 | def get_db_version(self): 75 | sql = "SELECT db_version from KISMET" 76 | db = sqlite3.connect(self.db_file, detect_types=sqlite3.PARSE_COLNAMES) 77 | db.row_factory = sqlite3.Row 78 | cur = db.cursor() 79 | cur.execute(sql) 80 | row = cur.fetchone() 81 | result = row["db_version"] 82 | db.close() 83 | return int(result) 84 | 85 | def get_query_column_names(self): 86 | """Build query columns, which incorporate converters for bulk fields. 87 | 88 | This allows us to set query columns that may be different from the 89 | actual columns used in the database. This is necessary to incorporate 90 | some of the data massaging as it comes out of the database, so 91 | higher-level logic can parse it more easily. 92 | 93 | """ 94 | result = [] 95 | # converter_reference = self.converters_reference[self.db_version] 96 | converter_reference = self.__get_latest_version(self.converters_reference) 97 | # column_reference = self.column_reference[self.db_version] 98 | column_reference = self.__get_latest_version(self.column_reference) 99 | for col in column_reference: 100 | if col in converter_reference.keys(): 101 | result.append("{} as \"{} [{}]\"".format(col, col, col)) 102 | else: 103 | result.append(col) 104 | return result 105 | 106 | def get_meta_query_column_names(self): 107 | """Build query columns, which incorporate converters for bulk fields. 108 | 109 | This allows us to set query columns that may be different from the 110 | actual columns used in the database. This is necessary to incorporate 111 | some of the data massaging as it comes out of the database, so 112 | higher-level logic can parse it more easily. 113 | 114 | """ 115 | result = [] 116 | #converter_reference = self.converters_reference[self.db_version] 117 | converter_reference = self.__get_latest_version(self.converters_reference) 118 | #column_reference = self.column_reference[self.db_version] 119 | column_reference = self.__get_latest_version(self.column_reference) 120 | for col in column_reference: 121 | if col == self.bulk_data_field: 122 | continue 123 | elif col in converter_reference.keys(): 124 | result.append("{} as \"{} [{}]\"".format(col, col, col)) 125 | else: 126 | result.append(col) 127 | return result 128 | 129 | def generate_parts_and_replacements(self, filters): 130 | """Return tuple with sql parts and replacements.""" 131 | query_parts = [] 132 | replacements = {} 133 | for k, v in list(filters.items()): 134 | if k not in self.valid_kwargs: 135 | continue 136 | results = self.valid_kwargs[k](k, v) 137 | query_parts.append(results[0]) 138 | replacements.update(results[1]) 139 | return (query_parts, replacements) 140 | 141 | def get_all(self, **kwargs): 142 | """Get all objects represented by this class from Kismet DB. 143 | 144 | Keyword arguments are described above, near the beginning of 145 | the class documentation. 146 | 147 | Returns: 148 | list: List of each json object from all rows returned from query. 149 | """ 150 | 151 | if kwargs: 152 | query_parts, replacements = self.generate_parts_and_replacements(kwargs) # NOQA 153 | else: 154 | query_parts = [] 155 | replacements = {} 156 | 157 | sql = "SELECT {} FROM {}".format(", ".join(self.full_query_column_names), # NOQA 158 | self.table_name) 159 | if query_parts: 160 | sql = sql + " WHERE " + " AND ".join(query_parts) 161 | return self.get_rows(self.column_names, sql, replacements) 162 | 163 | def get_meta(self, **kwargs): 164 | """Get metadata columns from DB, excluding bulk data columns. 165 | 166 | Keyword arguments are described above, near the beginning of 167 | the class documentation. 168 | 169 | Returns: 170 | list: List of each json object from all rows returned from query. 171 | """ 172 | 173 | query_parts = [] 174 | replacements = {} 175 | columns = list(self.column_names) 176 | columns.remove(self.bulk_data_field) 177 | 178 | if kwargs: 179 | query_parts, replacements = self.generate_parts_and_replacements(kwargs) # NOQA 180 | else: 181 | query_parts = [] 182 | replacements = {} 183 | 184 | sql = "SELECT {} FROM {}".format(", ".join(self.meta_query_column_names), # NOQA 185 | self.table_name) 186 | if query_parts: 187 | sql = sql + " WHERE " + " AND ".join(query_parts) 188 | return self.get_rows(columns, sql, replacements) 189 | 190 | def yield_all(self, **kwargs): 191 | """Get all objects represented by this class from Kismet DB. 192 | 193 | Yields one row at a time. Keyword arguments are described above, 194 | near the beginning of the class documentation. 195 | 196 | Yields: 197 | dict: Dict representing one row from query. 198 | """ 199 | 200 | if kwargs: 201 | query_parts, replacements = self.generate_parts_and_replacements(kwargs) # NOQA 202 | else: 203 | query_parts = [] 204 | replacements = {} 205 | 206 | sql = "SELECT {} FROM {}".format(", ".join(self.full_query_column_names), # NOQA 207 | self.table_name) 208 | if query_parts: 209 | sql = sql + " WHERE " + " AND ".join(query_parts) 210 | for row in self.yield_rows(self.column_names, sql, replacements): 211 | yield row 212 | 213 | def yield_meta(self, **kwargs): 214 | """Yield metadata from DB, excluding bulk data columns. 215 | 216 | Yields one row at a time. Keyword arguments are described above, near 217 | the beginning of the class documentation. 218 | 219 | Returns: 220 | dict: Dict representing one row from query. 221 | """ 222 | 223 | query_parts = [] 224 | replacements = {} 225 | columns = list(self.column_names) 226 | columns.remove(self.bulk_data_field) 227 | 228 | if kwargs: 229 | query_parts, replacements = self.generate_parts_and_replacements(kwargs) # NOQA 230 | else: 231 | query_parts = [] 232 | replacements = {} 233 | 234 | sql = "SELECT {} FROM {}".format(", ".join(self.meta_query_column_names), # NOQA 235 | self.table_name) 236 | if query_parts: 237 | sql = sql + " WHERE " + " AND ".join(query_parts) 238 | for row in self.yield_rows(columns, sql, replacements): 239 | yield row 240 | 241 | @classmethod 242 | def check_db_exists(cls, log_file): 243 | """Return None if able to open DB file, otherwise raise exception. 244 | 245 | Args: 246 | db_file (str): path to Kismet log file. 247 | 248 | Returns: 249 | None 250 | 251 | Raises: 252 | ValueError: File either does not exist, is not in sqlite3 format, 253 | or file is not a valid Kismet log file. 254 | """ 255 | if not os.path.isfile(log_file): 256 | err = "Could not find input file \"{}\"".format(log_file) 257 | raise ValueError(err) 258 | try: 259 | cls.get_column_names(log_file, "KISMET") 260 | except sqlite3.DatabaseError: 261 | err = "This is not a valid database file: {}".format(log_file) 262 | raise ValueError(err) 263 | except sqlite3.OperationalError: 264 | err = ("This is a valid sqlite3 file, but it does not appear to " 265 | "be a valid Kismet log file: {}".format(log_file)) 266 | raise ValueError(err) 267 | return 268 | 269 | @classmethod 270 | def get_column_names(cls, log_file, table_name): 271 | """Return a list of column names for `table_name` in `log_file`. 272 | 273 | Args: 274 | log_file (str): Path to Kismet log file. 275 | table_name (str): Name of table. 276 | 277 | Returns: 278 | list: List of column names. 279 | """ 280 | db = sqlite3.connect(log_file) 281 | db.row_factory = sqlite3.Row 282 | cur = db.cursor() 283 | cur.execute("SELECT * from {} LIMIT 1".format(table_name)) 284 | cols = [d[0] for d in cur.description] 285 | db.close() 286 | return cols 287 | 288 | def check_column_names(self, log_file): 289 | """Check that schema is correct. 290 | 291 | Compares column names in DB to expected columns for abstraction. 292 | 293 | Returns: 294 | None 295 | 296 | Raises: 297 | ValueError: Column names are not what we expect them to be. 298 | """ 299 | column_names = self.get_column_names(log_file, self.table_name) 300 | if column_names != self.column_names: 301 | err = ("Schema mismatch in {} table, in file " 302 | "{}. Expected {}, got {}".format(self.table_name, log_file, 303 | self.column_names, 304 | column_names)) 305 | raise ValueError(err) 306 | return 307 | 308 | def get_rows(self, column_names, sql, replacements): 309 | """Return rows from query results as a list of dictionary objects. 310 | 311 | Args: 312 | column_names (list): List of column names. Used in constructing 313 | row dictionary (these are the dictionary keys). 314 | sql (str): SQL statement. 315 | replacements (dict): Replacements for SQL query. 316 | 317 | Returns: 318 | list: List of dictionary items. 319 | """ 320 | # static_fields = self.field_defaults[self.db_version] 321 | static_fields = self.__get_latest_version(self.field_defaults) 322 | 323 | results = [] 324 | db = sqlite3.connect(self.db_file, detect_types=sqlite3.PARSE_COLNAMES) 325 | db.row_factory = sqlite3.Row 326 | converter_reference = self.__get_latest_version(self.converters_reference) 327 | for field_name, converter in list(converter_reference.items()): # NOQA 328 | sqlite3.register_converter(field_name, converter) 329 | cur = db.cursor() 330 | cur.execute(sql, replacements) 331 | for row in cur.fetchall(): 332 | result = {x: row[x] for x in column_names} 333 | result.update(static_fields) 334 | results.append(result.copy()) # NOQA 335 | db.close() 336 | return results 337 | 338 | def yield_rows(self, column_names, sql, replacements): 339 | """Yield rows from query results as a list of dictionary objects. 340 | 341 | Args: 342 | column_names (list): List of column names. Used in constructing 343 | row dictionary (these are the dictionary keys). 344 | sql (str): SQL statement. 345 | replacements (dict): Replacements for SQL query. 346 | 347 | Yields: 348 | dict: Dictionary object representing one row in result of SQL 349 | query. 350 | """ 351 | #static_fields = self.field_defaults[self.db_version] 352 | static_fields = self.__get_latest_version(self.field_defaults) 353 | db = sqlite3.connect(self.db_file, detect_types=sqlite3.PARSE_COLNAMES) 354 | db.row_factory = sqlite3.Row 355 | converter_reference = self.__get_latest_version(self.converters_reference) 356 | for field_name, converter in list(converter_reference.items()): # NOQA 357 | sqlite3.register_converter(field_name, converter) 358 | cur = db.cursor() 359 | cur.execute(sql, replacements) 360 | moar_rows = True 361 | while moar_rows: 362 | try: 363 | row = cur.fetchone() 364 | if row is None: 365 | moar_rows = False 366 | else: 367 | result = {x: row[x] for x in column_names} 368 | result.update(static_fields) 369 | yield result.copy() # NOQA 370 | except KeyboardInterrupt: 371 | moar_rows = False 372 | print("Caught keyboard interrupt, exiting gracefully!") 373 | db.close() 374 | return 375 | -------------------------------------------------------------------------------- /kismetdb/data_packets.py: -------------------------------------------------------------------------------- 1 | """Data abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class DataPackets(BaseInterface): 7 | """This object covers non-packet data stored in the Kismet DB. 8 | 9 | 10 | The actual packet is stored in the `json` field of the dictionary 11 | returned for every row. This can be a very expensive abstraction to 12 | use if you don't employ some sort of filtering on your query. Consider 13 | using the `Packets.get_meta()` method to retrieve only the metadata 14 | (not the actual packet capture), which will preserve performance. 15 | The ``Keyword Arguments`` section below applies only to methods which 16 | support them (as noted below), not to object instantiation. 17 | 18 | 19 | Args: 20 | file_location (str): Path to Kismet log file. 21 | 22 | Keyword args: 23 | ts_sec_lt (str, datetime.datetime): Match packets where the timestamp 24 | is before this. 25 | ts_sec_gt (str, datetime.datetime): Match packets where the timestamp 26 | is after this. 27 | phyname (str or list): Exact match against phy type 28 | devmac (str or list): Exact match against device mac. 29 | datasource (str or list): Exact match against datasource UUID. 30 | type (str or list): Exact match against reported data type 31 | 32 | """ 33 | 34 | table_name = "data" 35 | bulk_data_field = "json" 36 | field_defaults = {4: {"alt": 0, 37 | "speed": 0, 38 | "heading": 0}, 39 | 5: {}, 40 | 6: {}, 41 | 7: {}, 42 | 8: {}} 43 | converters_reference = {4: {"lat": Utility.format_int_as_latlon, 44 | "lon": Utility.format_int_as_latlon, 45 | "json": Utility.device_field_parser}, 46 | 5: {"json": Utility.device_field_parser}, 47 | 6: {"json": Utility.device_field_parser}, 48 | 7: {"json": Utility.device_field_parser}, 49 | 8: {"json": Utility.device_field_parser}} 50 | column_reference = {4: ["ts_sec", "ts_usec", "phyname", "devmac", 51 | "lat", "lon", 52 | "datasource", "type", "json"], 53 | 5: ["ts_sec", "ts_usec", "phyname", "devmac", 54 | "lat", "lon", "alt", "speed", "heading", 55 | "datasource", "type", "json"], 56 | 6: ["ts_sec", "ts_usec", "phyname", "devmac", 57 | "lat", "lon", "alt", "speed", "heading", 58 | "datasource", "type", "json"], 59 | 7: ["ts_sec", "ts_usec", "phyname", "devmac", 60 | "lat", "lon", "alt", "speed", "heading", 61 | "datasource", "type", "json"], 62 | 8: ["ts_sec", "ts_usec", "phyname", "devmac", 63 | "lat", "lon", "alt", "speed", "heading", 64 | "datasource", "type", "json"]} 65 | valid_kwargs = {"ts_sec_lt": Utility.generate_single_tstamp_secs_lt, 66 | "ts_sec_gt": Utility.generate_single_tstamp_secs_gt, 67 | "phyname": Utility.generate_multi_string_sql_eq, 68 | "devmac": Utility.generate_multi_string_sql_eq, 69 | "datasource": Utility.generate_multi_string_sql_eq, 70 | "type": Utility.generate_multi_string_sql_eq} 71 | -------------------------------------------------------------------------------- /kismetdb/data_sources.py: -------------------------------------------------------------------------------- 1 | """DataSources abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class DataSources(BaseInterface): 7 | """This object covers data sources stored in the Kismet DB. 8 | 9 | The ``Keyword Arguments`` section below applies only to methods which 10 | support them (as noted below), not to object instantiation. 11 | 12 | Args: 13 | file_location (str): Path to Kismet log file. 14 | 15 | Keyword args: 16 | uuid (str, list): UUID of data source. 17 | typestring (str, list): Type of data source. 18 | definition (str, list): Data source definition. 19 | name (str, list): Name of data source. 20 | interface (str, list): Interface associated with data source. 21 | """ 22 | 23 | table_name = "datasources" 24 | bulk_data_field = "json" 25 | field_defaults = {4: {}, 26 | 5: {}, 27 | 6: {}, 28 | 7: {}, 29 | 8: {}} 30 | converters_reference = {4: {"json": Utility.device_field_parser}, 31 | 5: {"json": Utility.device_field_parser}, 32 | 6: {"json": Utility.device_field_parser}, 33 | 7: {"json": Utility.device_field_parser}, 34 | 8: {"json": Utility.device_field_parser}} 35 | column_reference = {4: ["uuid", "typestring", "definition", "name", 36 | "interface", "json"], 37 | 5: ["uuid", "typestring", "definition", "name", 38 | "interface", "json"], 39 | 6: ["uuid", "typestring", "definition", "name", 40 | "interface", "json"], 41 | 7: ["uuid", "typestring", "definition", "name", 42 | "interface", "json"], 43 | 8: ["uuid", "typestring", "definition", "name", 44 | "interface", "json"]} 45 | valid_kwargs = {"uuid": Utility.generate_multi_string_sql_eq, 46 | "typestring": Utility.generate_multi_string_sql_eq, 47 | "definition": Utility.generate_multi_string_sql_eq, 48 | "name": Utility.generate_multi_string_sql_eq, 49 | "interface": Utility.generate_multi_string_sql_eq} 50 | -------------------------------------------------------------------------------- /kismetdb/devices.py: -------------------------------------------------------------------------------- 1 | """Devices abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class Devices(BaseInterface): 7 | """This object covers devices tracked in the Kismet DB. 8 | 9 | 10 | Unlike other abstractions which contain the object detail under the `json` 11 | key, this abstraction contains the details under the key named `device`. 12 | The ``Keyword Arguments`` section below applies only to methods which 13 | support them (as noted below), not to object instantiation. 14 | 15 | Args: 16 | file_location (str): Path to Kismet log file. 17 | 18 | Keyword args: 19 | first_time_lt (str, datetime.datetime): Match devices where the first 20 | observation timestamp is before this time. 21 | first_time_gt (str, datetime.datetime): Match devices where the first 22 | observation timestamp is after this time. 23 | last_time_lt (str, datetime.datetime): Match devices where the most 24 | recent observation timestamp is before this time. 25 | last_time_gt (str, datetime.datetime): Match devices where the most 26 | recent observation timestamp is after this time. 27 | devkey (str, list): Exact match for this devkey. 28 | phyname (str, list): Exact match for this phyname. 29 | devmac (str, list): Exact match for this device MAC. 30 | type (str, list): Exact match for this device type. 31 | strongest_signal_gt (str, int): Match devices where the strongest 32 | signal is greater than the integer representation of this string. 33 | strongest_signal_lt (str, int): Match devices where the strongest 34 | signal is less than the integer representation of this string. 35 | bytes_data_gt (str, int): Match devices where we've seen at least this 36 | many bytes of data (converted to int). 37 | bytes_data_lt (str, int): Match devices where we've seen at most this 38 | many bytes of data (converted to int). 39 | 40 | """ 41 | 42 | table_name = "devices" 43 | bulk_data_field = "device" 44 | field_defaults = {4: {}, 45 | 5: {}, 46 | 6: {}, 47 | 7: {}, 48 | 8: {}} 49 | converters_reference = {4: {"device": Utility.device_field_parser, 50 | "min_lat": Utility.format_int_as_latlon, 51 | "min_lon": Utility.format_int_as_latlon, 52 | "max_lat": Utility.format_int_as_latlon, 53 | "max_lon": Utility.format_int_as_latlon, 54 | "avg_lat": Utility.format_int_as_latlon, 55 | "avg_lon": Utility.format_int_as_latlon}, 56 | 5: {"device": Utility.device_field_parser}, 57 | 6: {"device": Utility.device_field_parser}, 58 | 7: {"device": Utility.device_field_parser}, 59 | 8: {"device": Utility.device_field_parser}} 60 | column_reference = {4: ["first_time", "last_time", "devkey", "phyname", 61 | "devmac", "strongest_signal", "min_lat", "min_lon", 62 | "max_lat", "max_lon", "avg_lat", "avg_lon", 63 | "bytes_data", "type", "device"], 64 | 5: ["first_time", "last_time", "devkey", "phyname", 65 | "devmac", "strongest_signal", "min_lat", "min_lon", 66 | "max_lat", "max_lon", "avg_lat", "avg_lon", 67 | "bytes_data", "type", "device"], 68 | 6: ["first_time", "last_time", "devkey", "phyname", 69 | "devmac", "strongest_signal", "min_lat", "min_lon", 70 | "max_lat", "max_lon", "avg_lat", "avg_lon", 71 | "bytes_data", "type", "device"], 72 | 7: ["first_time", "last_time", "devkey", "phyname", 73 | "devmac", "strongest_signal", "min_lat", "min_lon", 74 | "max_lat", "max_lon", "avg_lat", "avg_lon", 75 | "bytes_data", "type", "device"], 76 | 8: ["first_time", "last_time", "devkey", "phyname", 77 | "devmac", "strongest_signal", "min_lat", "min_lon", 78 | "max_lat", "max_lon", "avg_lat", "avg_lon", 79 | "bytes_data", "type", "device"]} 80 | valid_kwargs = {"first_time_lt": Utility.generate_single_tstamp_secs_lt, 81 | "first_time_gt": Utility.generate_single_tstamp_secs_gt, 82 | "last_time_lt": Utility.generate_single_tstamp_secs_lt, 83 | "last_time_gt": Utility.generate_single_tstamp_secs_gt, 84 | "devkey": Utility.generate_multi_string_sql_eq, 85 | "phyname": Utility.generate_multi_string_sql_eq, 86 | "devmac": Utility.generate_multi_string_sql_eq, 87 | "type": Utility.generate_multi_string_sql_eq, 88 | "strongest_signal_lt": Utility.generate_single_int_sql_lt, 89 | "strongest_signal_gt": Utility.generate_single_int_sql_gt, 90 | "bytes_data_lt": Utility.generate_single_int_sql_lt, 91 | "bytes_data_gt": Utility.generate_single_int_sql_gt} 92 | -------------------------------------------------------------------------------- /kismetdb/kismet.py: -------------------------------------------------------------------------------- 1 | """Kismet server info abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .snapshots import Snapshots 4 | import json 5 | import sqlite3 6 | from .utility import Utility 7 | 8 | class KismetException(Exception): 9 | """ Raised when the kismetdb file lacks required Kismet data """ 10 | pass 11 | 12 | class Kismet(Snapshots): 13 | """This object extracts kismet server info from the first SYSTEM 14 | snapshot in the database. All values reference the Kismet 15 | server which generated this log. 16 | 17 | Args: 18 | file_location (str): Path to Kismet log file. 19 | 20 | Attributes: 21 | kismet_version (str): Kismet version 22 | kismet_git (str): Kismet git commit string 23 | kismet_uuid (str): UUID of server 24 | kismet_name (str): User-supplied name of server 25 | kismet_location (str): User-supplied server location 26 | kismet_description (str): User-supplied server description 27 | kismet_user (str): Username server was running under 28 | """ 29 | 30 | def __init__(self, filepath): 31 | super(Kismet, self).__init__(filepath) 32 | 33 | sql = "SELECT json FROM snapshots WHERE snaptype = 'SYSTEM' LIMIT 1" 34 | db = sqlite3.connect(self.db_file, detect_types=sqlite3.PARSE_COLNAMES) 35 | db.row_factory = sqlite3.Row 36 | cur = db.cursor() 37 | cur.execute(sql) 38 | row = cur.fetchone() 39 | if row == None: 40 | raise KismetException("No system snapshots in kismetdb log; malformed, or runt log likely") 41 | result = row["json"] 42 | db.close() 43 | 44 | system_j = json.loads(result) 45 | 46 | self.kismet_version = system_j['kismet.system.version'] 47 | self.kismet_git = system_j['kismet.system.git'] 48 | self.kismet_uuid = system_j['kismet.system.server_uuid'] 49 | self.kismet_name = system_j['kismet.system.server_name'] 50 | self.kismet_location = system_j['kismet.system.server_location'] 51 | self.kismet_description = system_j['kismet.system.server_description'] 52 | self.kismet_user = system_j['kismet.system.user'] 53 | 54 | -------------------------------------------------------------------------------- /kismetdb/messages.py: -------------------------------------------------------------------------------- 1 | """Messages abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class Messages(BaseInterface): 7 | """This object covers messages stored in the Kismet DB. 8 | 9 | The ``Keyword Arguments`` section below applies only to methods which 10 | support them (as noted below), not to object instantiation. 11 | 12 | Args: 13 | file_location (str): Path to Kismet log file. 14 | 15 | Keyword args: 16 | ts_sec_gt (str, datetime, or (secs, u_secs)): Timestamp for starting 17 | query. 18 | ts_sec_lt (str, datetime, or (secs, usecs)): Timestamp for ending 19 | query. 20 | lat_gt (str, float): Bounding minimum latitude 21 | lat_lt (str, float): Bounding maximum latitude 22 | lon_gt (str, float): Bounding minimum longitude 23 | lon_lt (str, float): Bounding maximum longitude 24 | msgtype (str): Message type 25 | """ 26 | 27 | table_name = "messages" 28 | bulk_data_field = "" 29 | field_defaults = {4: {}, 30 | 5: {}, 31 | 6: {}, 32 | 7: {}, 33 | 8: {}} 34 | converters_reference = {4: {"lat": Utility.format_int_as_latlon, 35 | "lon": Utility.format_int_as_latlon}, 36 | 5: {}, 37 | 6: {}, 38 | 7: {}, 39 | 8: {}} 40 | column_reference = {4: ["ts_sec", "lat", "lon", "msgtype", "message"], 41 | 5: ["ts_sec", "lat", "lon", "msgtype", "message"], 42 | 6: ["ts_sec", "lat", "lon", "msgtype", "message"], 43 | 7: ["ts_sec", "lat", "lon", "msgtype", "message"], 44 | 8: ["ts_sec", "lat", "lon", "msgtype", "message"]} 45 | valid_kwargs = {"ts_sec_gt": Utility.generate_single_tstamp_secs_gt, 46 | "ts_sec_lt": Utility.generate_single_tstamp_secs_lt, 47 | "lat_gt": Utility.generate_single_float_sql_gt, 48 | "lon_gt": Utility.generate_single_float_sql_gt, 49 | "lat_lt": Utility.generate_single_float_sql_lt, 50 | "lon_lt": Utility.generate_single_float_sql_lt, 51 | "msgtype": Utility.generate_single_string_sql_eq} 52 | -------------------------------------------------------------------------------- /kismetdb/packets.py: -------------------------------------------------------------------------------- 1 | """Packets abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class Packets(BaseInterface): 7 | """This object covers packets stored in the Kismet DB. 8 | 9 | 10 | The actual packet is stored in the `packet` field of the dictionary 11 | returned for every row. This can be a very expensive abstraction to 12 | use if you don't employ some sort of filtering on your query. Consider 13 | using the `Packets.get_meta()` method to retrieve only the metadata 14 | (not the actual packet capture), which will preserve performance. 15 | The ``Keyword Arguments`` section below applies only to methods which 16 | support them (as noted below), not to object instantiation. 17 | 18 | 19 | Args: 20 | file_location (str): Path to Kismet log file. 21 | 22 | Keyword args: 23 | ts_sec_lt (str, datetime.datetime): Match packets where the timestamp 24 | is before this. 25 | ts_sec_gt (str, datetime.datetime): Match packets where the timestamp 26 | is after this. 27 | phyname (str or list): Exact match against PHY name. 28 | sourcemac (str or list): Exact match against source MAC address. 29 | destmac (str or list): Exact match against destination MAC address. 30 | transmac (str or list): Exact match against trans mac. 31 | devkey (str or list): Exact match against devkey. 32 | datasource (str or list): Exact match against datasource. 33 | min_signal (str or int): Minimum signal. 34 | dlt_gt (str or int): Minimum DLT. 35 | tags (str or list): Match packets using a tag or list of tags. 36 | datarate (real): Exact match against the datarate. 37 | datarate_lt (real): Match packets where the datarate is less than this. 38 | datarate_gt (real): Match packets where the datarate is greater than this. 39 | hash (str): Exact match against CRC32 hash. 40 | packetid (int): Exact match against packetid. 41 | 42 | """ 43 | 44 | table_name = "packets" 45 | bulk_data_field = "packet" 46 | field_defaults = {4: {"alt": 0, 47 | "speed": 0, 48 | "heading": 0}, 49 | 5: {}, 50 | 6: {}, 51 | 7: {}, 52 | 8: {}} 53 | converters_reference = {4: {"lat": Utility.format_int_as_latlon, 54 | "lon": Utility.format_int_as_latlon}, 55 | 5: {}, 56 | 6: {}, 57 | 7: {}, 58 | 8: {}} 59 | column_reference = {4: ["ts_sec", "ts_usec", "phyname", "sourcemac", 60 | "destmac", "transmac", "frequency", "devkey", 61 | "lat", "lon", "packet_len", "signal", "datasource", 62 | "dlt", "packet", "error"], 63 | 5: ["ts_sec", "ts_usec", "phyname", "sourcemac", 64 | "destmac", "transmac", "frequency", "devkey", 65 | "lat", "lon", "alt", "speed", "heading", 66 | "packet_len", "signal", "datasource", "dlt", 67 | "packet", "error"], 68 | 6: ["ts_sec", "ts_usec", "phyname", "sourcemac", 69 | "destmac", "transmac", "frequency", "devkey", 70 | "lat", "lon", "alt", "speed", "heading", 71 | "packet_len", "signal", "datasource", "dlt", 72 | "packet", "error", "tags"], 73 | 7: ["ts_sec", "ts_usec", "phyname", "sourcemac", 74 | "destmac", "transmac", "frequency", "devkey", 75 | "lat", "lon", "alt", "speed", "heading", 76 | "packet_len", "signal", "datasource", "dlt", 77 | "packet", "error", "tags", "datarate"], 78 | 8: ["ts_sec", "ts_usec", "phyname", "sourcemac", 79 | "destmac", "transmac", "frequency", "devkey", 80 | "lat", "lon", "alt", "speed", "heading", 81 | "packet_len", "signal", "datasource", "dlt", 82 | "packet", "error", "tags", "datarate", "hash", 83 | "packetid"]} 84 | valid_kwargs = {"ts_sec_lt": Utility.generate_single_tstamp_secs_lt, 85 | "ts_sec_gt": Utility.generate_single_tstamp_secs_gt, 86 | "devkey": Utility.generate_multi_string_sql_eq, 87 | "phyname": Utility.generate_multi_string_sql_eq, 88 | "sourcemac": Utility.generate_multi_string_sql_eq, 89 | "destmac": Utility.generate_multi_string_sql_eq, 90 | "transmac": Utility.generate_multi_string_sql_eq, 91 | "devmac": Utility.generate_multi_string_sql_eq, 92 | "datasource": Utility.generate_multi_string_sql_eq, 93 | "min_signal": Utility.generate_single_int_sql_gt, 94 | "dlt_gt": Utility.generate_single_int_sql_gt, 95 | "tags": Utility.generate_multi_string_sql_eq, 96 | "datarate": Utility.generate_single_float_sql_eq, 97 | "datarate_lt": Utility.generate_single_float_sql_lt, 98 | "datarate_gt": Utility.generate_single_float_sql_gt, 99 | "hash": Utility.generate_single_string_sql_eq, 100 | "packetid": Utility.generate_single_int_sql_eq} 101 | -------------------------------------------------------------------------------- /kismetdb/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kismetwireless/python-kismet-db/be7568045c7b2cc7ed942cf350545f58e400805d/kismetdb/scripts/__init__.py -------------------------------------------------------------------------------- /kismetdb/scripts/log_devices_to_filebeat_json.py: -------------------------------------------------------------------------------- 1 | """Simple dumper to extract kismet device records as a json for Filebeat.""" 2 | 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | 8 | import kismetdb 9 | 10 | 11 | def main(): 12 | parser = argparse.ArgumentParser(description="Kismet to Filebeat json") 13 | parser.add_argument("--in", action="store", dest="infile", 14 | required=True, help="Input (.kismet) file") 15 | parser.add_argument("--out", action="store", dest="outfile", 16 | help=("Output filename (optional) for appending. If " 17 | "unspecified, each record will be printed to " 18 | "stdout, one record per line, ideal for piping " 19 | "into filebeat.")) 20 | parser.add_argument("--start-time", action="store", dest="starttime", 21 | help="Only list devices seen after given time") 22 | parser.add_argument("--min-signal", action="store", dest="minsignal", 23 | help=("Only list devices with a best signal higher " 24 | "than min-signal")) 25 | 26 | results = parser.parse_args() 27 | query_args = {} 28 | 29 | if not os.path.isfile(results.infile): 30 | print("Could not find input file \"{}\"".format(results.infile)) 31 | sys.exit(1) 32 | 33 | if results.starttime: 34 | query_args["first_time_gt"] = results.starttime 35 | 36 | if results.minsignal: 37 | query_args["strongest_signal_gt"] = results.minsignal 38 | logf = None 39 | 40 | devices_abstraction = kismetdb.Devices(results.infile) 41 | 42 | for device in [json.loads(row["device"]) for row 43 | in devices_abstraction.yield_all(**query_args)]: 44 | stripped_device = strip_old_empty_trees(device) 45 | if results.outfile: 46 | logf = open(results.outfile, "a") 47 | logf.write(json.dumps(stripped_device, sort_keys=True)) 48 | else: 49 | print(json.dumps(stripped_device, sort_keys=True)) 50 | 51 | 52 | def strip_old_empty_trees(obj): 53 | """Remove specific fields if they're set to ``0``. 54 | 55 | Borrowed from ``log_tools/elk/kismet_log_to_elk.py`` in Kismet repo. 56 | """ 57 | empty_trees = [ 58 | "kismet.device.base.location", 59 | "kismet.device.base.datasize.rrd", 60 | "kismet.device.base.location_cloud", 61 | "kismet.device.base.packet.bin.250", 62 | "kismet.device.base.packet.bin.500", 63 | "kismet.device.base.packet.bin.1000", 64 | "kismet.device.base.packet.bin.1500", 65 | "kismet.device.base.packet.bin.jumbo", 66 | "kismet.common.signal.signal_rrd", 67 | "kismet.common.signal.peak_loc", 68 | "dot11.client.location", 69 | "client.location", 70 | "dot11.client.ipdata", 71 | "dot11.advertisedssid.location", 72 | "dot11.probedssid.location", 73 | "kismet.common.seenby.signal" 74 | ] 75 | try: 76 | for k in obj.keys(): 77 | if k in empty_trees and obj[k] == 0: 78 | obj.pop(k) 79 | else: 80 | obj[k] = strip_old_empty_trees(obj[k]) 81 | return obj 82 | except AttributeError: 83 | return obj 84 | 85 | 86 | if __name__ == "__main__": 87 | main() 88 | -------------------------------------------------------------------------------- /kismetdb/scripts/log_devices_to_json.py: -------------------------------------------------------------------------------- 1 | """Simple dumper to extract kismet records and export them as a json array.""" 2 | 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | 8 | import kismetdb 9 | 10 | 11 | def main(): 12 | parser = argparse.ArgumentParser(description="Kismet devices to json") 13 | parser.add_argument("--in", action="store", dest="infile", 14 | help="Input (.kismet) file") 15 | parser.add_argument("--out", action="store", dest="outfile", 16 | help=("Output filename (optional). If omitted, logs " 17 | "multi-line and indented (human-readable) " 18 | "to stdout.")) 19 | parser.add_argument("--start-time", action="store", dest="starttime", 20 | help="Only list devices seen after given time") 21 | parser.add_argument("--min-signal", action="store", dest="minsignal", 22 | help=("Only list devices with a best signal higher " 23 | "than min-signal")) 24 | 25 | results = parser.parse_args() 26 | query_args = {} 27 | 28 | if results.infile is None: 29 | print("Expected --in [file]") 30 | sys.exit(1) 31 | 32 | if not os.path.isfile(results.infile): 33 | print("Could not find input file \"{}\"".format(results.infile)) 34 | sys.exit(1) 35 | 36 | if results.starttime: 37 | query_args["first_time_gt"] = results.starttime 38 | 39 | if results.minsignal: 40 | query_args["strongest_signal_gt"] = results.minsignal 41 | logf = None 42 | 43 | devices_abstraction = kismetdb.Devices(results.infile) 44 | 45 | devs = [row["device"] for row in devices_abstraction.get_all(**query_args)] 46 | 47 | if results.outfile: 48 | logf = open(results.outfile, "w") 49 | logf.write(json.dumps(devs, sort_keys=True, indent=4, 50 | separators=(",", ": "))) 51 | else: 52 | print(json.dumps(devs, sort_keys=True, indent=4, 53 | separators=(",", ": "))) 54 | 55 | 56 | if __name__ == "__main__": 57 | main() 58 | -------------------------------------------------------------------------------- /kismetdb/scripts/log_to_csv.py: -------------------------------------------------------------------------------- 1 | """Export metadata from the Kismet DB to CSV.""" 2 | 3 | import argparse 4 | import os 5 | import sys 6 | import csv 7 | 8 | import kismetdb 9 | 10 | 11 | def main(): 12 | parser = argparse.ArgumentParser(description="Kismet to CSV Log Converter") 13 | parser.add_argument("--in", action="store", dest="infile", 14 | help="Input (.kismet) file") 15 | parser.add_argument("--out", action="store", dest="outfile", 16 | help="Output CSV filename") 17 | parser.add_argument("--table", action="store", dest="srctable", 18 | help="Select the table to output") 19 | 20 | results = parser.parse_args() 21 | replacements = {} 22 | 23 | if results.infile is None: 24 | print("Expected --in [file]") 25 | sys.exit(1) 26 | 27 | if not os.path.isfile(results.infile): 28 | print("Could not find input file \"{}\"".format(results.infile)) 29 | sys.exit(1) 30 | 31 | if results.srctable is None: 32 | results.srctable = "devices" 33 | replacements["srctable"] = results.srctable 34 | 35 | if results.srctable == "devices": 36 | table_abstraction = kismetdb.Devices(results.infile) 37 | column_names = ["first_time", "last_time", "devkey", "phyname", 38 | "devmac", "strongest_signal", "min_lat", "min_lon", 39 | "max_lat", "max_lon", "avg_lat", "avg_lon", 40 | "bytes_data", "type"] 41 | elif results.srctable == "packets": 42 | table_abstraction = kismetdb.Packets(results.infile) 43 | column_names = ["ts_sec", "ts_usec", "phyname", "sourcemac", "destmac", 44 | "transmac", "frequency", "devkey", "lat", "lon", 45 | "packet_len", "signal", "datasource", "dlt", "error"] 46 | elif results.srctable == "datasources": 47 | table_abstraction = kismetdb.DataSources(results.infile) 48 | column_names = ["uuid", "typestring", "definition", "name", 49 | "interface"] 50 | elif results.srctable == "alerts": 51 | table_abstraction = kismetdb.Alerts(results.infile) 52 | column_names = ["ts_sec", "ts_usec", "phyname", "devmac", "lat", 53 | "lon", "header"] 54 | else: 55 | print("Invalid table entered, please retry with either devices, " 56 | "packets, datasources or alerts.") 57 | sys.exit(1) 58 | 59 | if results.outfile is None: 60 | results.outfile = "{}-{}.csv".format(results.infile, 61 | replacements["srctable"]) 62 | 63 | csv_file_mode = "wb" if sys.version_info[0] < 3 else "w" 64 | 65 | with open(results.outfile, csv_file_mode) as csvfile: 66 | csvWriter = csv.DictWriter(csvfile, delimiter="\t", 67 | extrasaction="ignore", 68 | fieldnames=column_names) 69 | nrows = 0 70 | csvWriter.writeheader() 71 | for row in table_abstraction.yield_meta(): 72 | csvWriter.writerow(row) 73 | nrows = nrows + 1 74 | if nrows % 1000 == 0: 75 | print("Wrote {} rows".format(nrows)) 76 | 77 | 78 | if __name__ == "__main__": 79 | main() 80 | -------------------------------------------------------------------------------- /kismetdb/scripts/log_to_kml.py: -------------------------------------------------------------------------------- 1 | """Write device records from Kismet DB to KML.""" 2 | 3 | import argparse 4 | import json 5 | import pprint 6 | import sys 7 | import re 8 | 9 | import simplekml 10 | 11 | import kismetdb 12 | 13 | 14 | def main(): 15 | parser = argparse.ArgumentParser(description="Kismet to KML Log Converter") 16 | parser.add_argument("--in", action="store", dest="infile", 17 | help="Input (.kismet) file") 18 | parser.add_argument("--out", action="store", dest="outfile", 19 | help="Output filename (optional)") 20 | parser.add_argument("--start-time", action="store", dest="starttime", 21 | help="Only list devices seen after given time") 22 | parser.add_argument("--min-signal", action="store", dest="minsignal", 23 | help=("Only list devices with a best signal higher " 24 | "than min-signal")) 25 | parser.add_argument("--strongest-point", action="store_true", 26 | dest="strongest", default=False, 27 | help="Plot points based on strongest signal") 28 | parser.add_argument("--title", action="store", dest="title", 29 | default="Kismet", help="Title embedded in KML file") 30 | parser.add_argument("--ssid", action="store", dest="ssid", 31 | help=("Only plot networks which match the SSID " 32 | "(or SSID regex)")) 33 | 34 | results = parser.parse_args() 35 | 36 | query_args = {} 37 | 38 | if results.infile is None: 39 | print("Expected --in [file]") 40 | sys.exit(1) 41 | 42 | if results.starttime: 43 | query_args["first_time_gt"] = results.starttime 44 | 45 | if results.minsignal: 46 | query_args["strongest_signal_gt"] = results.minsignal 47 | 48 | kml = simplekml.Kml() 49 | 50 | kml.document.name = results.title 51 | 52 | num_plotted = 0 53 | 54 | devices = kismetdb.Devices(results.infile) 55 | 56 | for device in devices.yield_all(**query_args): 57 | try: 58 | dev = json.loads(device["device"]) 59 | # Check for the SSID if we"re doing that; allow it to trip 60 | # a KeyError and jump out of processing this device 61 | if results.ssid is not None: 62 | matched = False 63 | for s in dev["dot11.device"]["dot11.device.advertised_ssid_map"]: # NOQA 64 | adv_ssid = dev["dot11.device"]["dot11.device.advertised_ssid_map"][s]["dot11.advertisedssid.ssid"] # NOQA 65 | if re.match(results.ssid, adv_ssid): 66 | matched = True 67 | break 68 | 69 | if not matched: 70 | print("Not a match on SSID!") 71 | continue 72 | 73 | loc = None 74 | 75 | if results.strongest: 76 | loc = dev["kismet.device.base.signal"]["kismet.common.signal.peak_loc"] # NOQA 77 | else: 78 | loc = dev["kismet.device.base.location"]["kismet.common.location.avg_loc"] # NOQA 79 | 80 | if loc == 0: 81 | print("Null island...") 82 | continue 83 | 84 | mac = dev["kismet.device.base.macaddr"] 85 | 86 | title = "" 87 | 88 | if "kismet.device.base.name" in dev: 89 | title = dev["kismet.device.base.name"] 90 | 91 | if title == "": 92 | if "dot11.device" in dev: 93 | if "dot11.device.last_beaconed_ssid" in dev["dot11.device"]: # NOQA 94 | title = dev["dot11.device"]["dot11.device.last_beaconed_ssid"] # NOQA 95 | 96 | if title == "": 97 | title = mac 98 | 99 | kml.newpoint(name=title, 100 | coords=[(loc["kismet.common.location.lon"], 101 | loc["kismet.common.location.lat"], 102 | loc["kismet.common.location.alt"])]) 103 | 104 | num_plotted = num_plotted + 1 105 | except TypeError: 106 | continue 107 | except KeyError: 108 | continue 109 | kml.save(results.outfile) 110 | print("Exported {} devices to {}".format(num_plotted, results.outfile)) 111 | 112 | 113 | if __name__ == "__main__": 114 | main() 115 | -------------------------------------------------------------------------------- /kismetdb/scripts/log_to_pcap.py: -------------------------------------------------------------------------------- 1 | """Write packet captures from Kismet DB to pcap file.""" 2 | import argparse 3 | import struct 4 | import sys 5 | 6 | import kismetdb 7 | 8 | 9 | def main(): 10 | 11 | # Write a raw pcap file header 12 | def write_pcap_header(f, dlt): 13 | hdr = struct.pack("IHHiIII", 14 | 0xa1b2c3d4, # magic 15 | 2, 4, # version 16 | 0, # offset 17 | 0, # sigfigs 18 | 8192, # max packet len 19 | int(dlt) # packet type 20 | ) 21 | 22 | f.write(hdr) 23 | 24 | # Write a specific frame 25 | def write_pcap_packet(f, timeval_s, timeval_us, packet_bytes): 26 | packet_len = len(packet_bytes) 27 | pkt = struct.pack("IIII", 28 | timeval_s, 29 | timeval_us, 30 | packet_len, 31 | packet_len 32 | ) 33 | f.write(pkt) 34 | f.write(packet_bytes) 35 | 36 | parser = argparse.ArgumentParser(description=("Kismet to Pcap " 37 | "Log Converter")) 38 | parser.add_argument("--in", action="store", dest="infile", 39 | help="Input (.kismet) file") 40 | parser.add_argument("--out", action="store", dest="outfile", 41 | help="Output filename (when exporting all packets)") 42 | parser.add_argument("--outtitle", action="store", dest="outtitle", 43 | help="Output title (when limiting packets per file)") 44 | parser.add_argument("--limit-packets", action="store", dest="limitpackets", 45 | help=("Generate multiple pcap files, limiting the " 46 | "number of packets per file")) 47 | parser.add_argument("--source-uuid", action="append", dest="uuid", 48 | help=("Limit packets to a specific data source " 49 | "(multiple --source-uuid options will match " 50 | "multiple datasources)")) 51 | parser.add_argument("--start-time", action="store", dest="starttime", 52 | help="Only convert packets recorded after start-time") 53 | parser.add_argument("--end-time", action="store", dest="endtime", 54 | help="Only convert packets recorded before end-time") 55 | parser.add_argument("--silent", action="store", dest="silent", 56 | help="Silent operation (no status output)") 57 | parser.add_argument("--min-signal", action="store", dest="minsignal", 58 | help=("Only convert packets with a signal greater " 59 | "than min-signal")) 60 | parser.add_argument("--device-key", action="append", dest="devicekey", 61 | help=("Only convert packets which are linked to the " 62 | "specified device key (multiple --device-key " 63 | "options will match multiple devices)")) 64 | results = parser.parse_args() 65 | 66 | log_to_single = True 67 | 68 | if results.infile is None: 69 | print("Expected --in [file]") 70 | sys.exit(1) 71 | 72 | if results.limitpackets is not None and results.outtitle is None: 73 | print("Expected --outtitle when using --limit-packets") 74 | sys.exit(1) 75 | elif results.limitpackets is None and results.outfile is None: 76 | print("Expected --out [file]") 77 | sys.exit(1) 78 | elif results.limitpackets and results.outtitle: 79 | print(("Limiting to {} packets per file in " 80 | "{}-X.pcap").format(results.limitpackets, results.outtitle)) 81 | 82 | query_args = {"dlt_gt": 0} 83 | 84 | if results.uuid is not None: 85 | query_args["datasource"] = results.uuid 86 | 87 | if results.starttime: 88 | query_args["ts_sec_gt"] = results.starttime 89 | 90 | if results.endtime: 91 | query_args["ts_sec_lt"] = results.endtime 92 | 93 | if results.minsignal: 94 | query_args["min_signal"] = results.minsignal 95 | 96 | logf = None 97 | lognum = 0 98 | 99 | packet_store = kismetdb.Packets(results.infile) 100 | 101 | npackets = 0 102 | file_mode = "wb" 103 | for result in packet_store.yield_all(**query_args): 104 | if logf is None: 105 | if results.silent is None: 106 | print("DLT {} for all packets".format(query_args["dlt_gt"])) 107 | if log_to_single: 108 | if results.silent is None: 109 | print("Logging to {}".format(results.outfile)) 110 | logf = open(results.outfile, file_mode) 111 | write_pcap_header(logf, result["dlt"]) 112 | else: 113 | if results.silent is None: 114 | print("Logging to {}-{}.pcap".format(results.outtitle, 115 | lognum)) 116 | logf = open("{}-{}.pcap".format(results.outtitle, 117 | lognum), file_mode) 118 | lognum = lognum + 1 119 | print("Writing PCAP header with DLT {}".format(result["dlt"])) 120 | write_pcap_header(logf, result["dlt"]) 121 | 122 | write_pcap_packet(logf, int(result["ts_sec"]), int(result["ts_usec"]), 123 | result["packet"]) 124 | npackets = npackets + 1 125 | 126 | if not log_to_single: 127 | if npackets % results.limitpackets == 0: 128 | logf.close() 129 | logf = None 130 | elif results.silent is None: 131 | if npackets % 1000 == 0: 132 | print("Converted {} packets...".format(npackets)) 133 | 134 | if results.silent is None: 135 | print("Done! Converted {} packets.".format(npackets)) 136 | 137 | 138 | if __name__ == "__main__": 139 | main() 140 | -------------------------------------------------------------------------------- /kismetdb/snapshots.py: -------------------------------------------------------------------------------- 1 | """Snapshots abstraction.""" 2 | from .base_interface import BaseInterface 3 | from .utility import Utility 4 | 5 | 6 | class Snapshots(BaseInterface): 7 | """This object covers snapshots stored in the Kismet DB. 8 | 9 | The ``Keyword Arguments`` section below applies only to methods which 10 | support them (as noted below), not to object instantiation. 11 | 12 | Args: 13 | file_location (str): Path to Kismet log file. 14 | 15 | Keyword args: 16 | ts_sec_gt (str, datetime, or (secs, u_secs)): Timestamp for starting 17 | query. 18 | ts_sec_lt (str, datetime, or (secs, usecs)): Timestamp for ending 19 | query. 20 | lat_gt (str, float): Bounding minimum latitude 21 | lat_lt (str, float): Bounding maximum latitude 22 | lon_gt (str, float): Bounding minimum longitude 23 | lon_lt (str, float): Bounding maximum longitude 24 | snaptype (str): Snapshot type 25 | """ 26 | 27 | table_name = "snapshots" 28 | bulk_data_field = "json" 29 | field_defaults = {4: {}, 30 | 5: {}, 31 | 6: {}, 32 | 7: {}, 33 | 8: {}} 34 | converters_reference = {4: {"lat": Utility.format_int_as_latlon, 35 | "lon": Utility.format_int_as_latlon, 36 | "json": Utility.device_field_parser}, 37 | 5: {"json": Utility.device_field_parser}, 38 | 6: {"json": Utility.device_field_parser}, 39 | 7: {"json": Utility.device_field_parser}, 40 | 8: {"json": Utility.device_field_parser}} 41 | column_reference = {4: ["ts_sec", "ts_usec", "snaptype", "json"], 42 | 5: ["ts_sec", "ts_usec", "lat", "lon", "snaptype", "json"], 43 | 6: ["ts_sec", "ts_usec", "lat", "lon", "snaptype", "json"], 44 | 7: ["ts_sec", "ts_usec", "lat", "lon", "snaptype", "json"], 45 | 8: ["ts_sec", "ts_usec", "lat", "lon", "snaptype", "json"]} 46 | valid_kwargs = {"ts_sec_gt": Utility.generate_single_tstamp_secs_gt, 47 | "ts_sec_lt": Utility.generate_single_tstamp_secs_lt, 48 | "lat_gt": Utility.generate_single_float_sql_gt, 49 | "lon_gt": Utility.generate_single_float_sql_gt, 50 | "lat_lt": Utility.generate_single_float_sql_lt, 51 | "lon_lt": Utility.generate_single_float_sql_lt, 52 | "snaptype": Utility.generate_single_string_sql_eq} 53 | -------------------------------------------------------------------------------- /kismetdb/utility.py: -------------------------------------------------------------------------------- 1 | """General utility functions that are shared between other classes.""" 2 | import datetime 3 | import json 4 | import sys 5 | 6 | from dateutil import parser as dateparser 7 | 8 | 9 | class Utility(object): 10 | @classmethod 11 | def timestamp_to_iso(cls, timestamp): 12 | """Return an ISO-formatted timestamp for unix ``timestamp``.""" 13 | if not isinstance(timestamp, int): 14 | raise ValueError("Integer required for timestamp conversion.") 15 | return datetime.datetime.fromtimestamp(timestamp).isoformat() 16 | 17 | @classmethod 18 | def timestamp_to_dbtime(cls, timestamp): 19 | """Return a tuple containing Unix epoch seconds and microseconds. 20 | 21 | Args: 22 | timestamp (datetime, str, or tuple): If this is a string, we 23 | attempt to parse as such. If this is a tuple, we 24 | expect a tuple of length 2 and both items should be type: 25 | `int`. 26 | 27 | Returns: 28 | tuple: (secs, u_secs) 29 | """ 30 | t_tup = (0, 0) 31 | d_type = type(timestamp) 32 | err = ("Wrong type for timestamp. Got {}. We expect a tuple (int, " 33 | "int), string, or datetime.datetime object.").format(d_type) 34 | if isinstance(timestamp, tuple): 35 | err = cls.timestamp_tuple_validates(timestamp) 36 | if not err: 37 | t_tup = timestamp 38 | elif cls.is_it_a_string(timestamp): 39 | t_tup, err = cls.timestamp_string_to_tuple(timestamp) 40 | elif isinstance(timestamp, datetime.datetime): 41 | t_tup = cls.datetime_to_tuple(timestamp) 42 | err = "" 43 | elif isinstance(timestamp, int): 44 | t_tup = (timestamp, 0) 45 | err = "" 46 | if err: 47 | raise ValueError(err) 48 | return t_tup 49 | 50 | @classmethod 51 | def datetime_to_tuple(cls, timestamp): 52 | """Return a timestamp tuple. 53 | 54 | Args: 55 | timestamp (datetime.datetime): Python datetime.datetime object. 56 | 57 | Returns: 58 | tup: (seconds, u_seconds) 59 | """ 60 | epoch = datetime.datetime.utcfromtimestamp(0) 61 | secs = str((timestamp - epoch).total_seconds()) 62 | seconds = int(secs.split(".")[0]) 63 | u_seconds = int(secs.split(".")[1]) if len(secs.split(".")) > 1 else 0 64 | return (seconds, u_seconds) 65 | 66 | @classmethod 67 | def timestamp_string_to_tuple(cls, timestamp): 68 | """Return a timestamp tuple if possible, and a reason for failure. 69 | Args: 70 | timestamp (str): String-formatted timestamp. 71 | 72 | Returns: 73 | tup: (t_tuple, err). t_tuple is a pair of integers for seconds 74 | and u_seconds. err is a string describing errors in 75 | parsing, if any. Successful parsing means that err is an 76 | empty string. 77 | """ 78 | err = "" 79 | ts = datetime.datetime.utcfromtimestamp(0) 80 | epoch = datetime.datetime.utcfromtimestamp(0) 81 | try: 82 | ts = dateparser.parse(timestamp, fuzzy=True) 83 | except ValueError as e: 84 | err = ("Could not extract a date/time from start-time " 85 | "argument: {}".format(e)) 86 | secs = str((ts - epoch).total_seconds()) 87 | seconds = int(secs.split(".")[0]) 88 | u_seconds = int(secs.split(".")[1]) if len(secs.split(".")) > 1 else 0 89 | result = (seconds, u_seconds) 90 | return (result, err) 91 | 92 | @classmethod 93 | def timestamp_tuple_validates(cls, timestamp): 94 | """Return empty string if valid, reason if otherwise.""" 95 | err = "" 96 | if len(timestamp) != 2: 97 | err = ("Badly-formatted timestamp tuple. We expect a tuple of " 98 | "length 2") 99 | elif [x for x in timestamp if not isinstance(x, int)]: 100 | err = ("Badly-formatted timestamp tuple. We expect two " 101 | "integers.") 102 | return err 103 | 104 | @classmethod 105 | def format_tstamp_secs(cls, tstamp): 106 | """Return epoch seconds only from timestamp.""" 107 | t_tup = cls.timestamp_to_dbtime(tstamp) 108 | return str(t_tup[0]) 109 | 110 | @classmethod 111 | def format_string_match(cls, in_str): 112 | """This just returns a string-type for the argument. 113 | 114 | This is more or less a placeholder where we may have an opportunity 115 | to sanitize data at a later date. 116 | """ 117 | return str(in_str) 118 | 119 | @classmethod 120 | def format_int_match(cls, in_str): 121 | """This just returns an integer-type representation for the argument. 122 | 123 | This is more or less a placeholder where we may decide to do some 124 | sanitization at a later date. 125 | """ 126 | return int(in_str) 127 | 128 | @classmethod 129 | def format_latlon_as_integer(cls, lat_or_lon): 130 | """Return integer for input lat or lon str or float. 131 | 132 | This is used to get our lat/lon into the same format used in the DB. 133 | """ 134 | in_val = float(lat_or_lon) 135 | result = in_val * 100000 136 | return int(result) 137 | 138 | @classmethod 139 | def format_int_as_latlon(cls, lat_or_lon): 140 | """Return float for input lat or lon str or int. 141 | 142 | This is used to get the lat or lon from the DB into the form other 143 | tools will more readily accept. 144 | """ 145 | result = int(lat_or_lon) / 100000.0 146 | return result 147 | 148 | @classmethod 149 | def generate_single_string_sql_eq(cls, column_name, filter_value): 150 | """Return tuple with sql and replacement. 151 | 152 | This function builds the sql partial and replacement dict for 153 | an equivalency match for one value against a single column in 154 | the database. 155 | 156 | Args: 157 | column_name (str): Name of column in DB. 158 | filter_value (str): This is what we look for in the column. 159 | 160 | Returns: 161 | tuple: Item 0 contains the SQL partial string. Item 1 contains 162 | the replacement dictionary. 163 | 164 | """ 165 | sql = "{} = :{}".format(column_name, column_name) 166 | replacement = {column_name: str(filter_value)} 167 | return (sql, replacement) 168 | 169 | @classmethod 170 | def generate_multi_string_sql_eq(cls, column_name, filter_values): 171 | """Return tuple with sql and replacement. 172 | 173 | This function builds the sql partial and replacement dict for 174 | an equivalency match for multiple values (OR) against a single 175 | column in the database. 176 | 177 | Args: 178 | column_name (str): Name of column in DB. 179 | filter_values (list or str): This is what we look for in the column. 180 | If a string-type object is used for this argument, this function 181 | behaves as a wrapper for `Utility.generate_single_string_sql_eq()` 182 | 183 | Returns: 184 | tuple: Item 0 contains the SQL partial string. Item 1 contains 185 | the replacement dictionary. 186 | 187 | """ 188 | if not isinstance(filter_values, list): 189 | return cls.generate_single_string_sql_eq(column_name, 190 | filter_values) 191 | sql_parts = [] 192 | replacement = {} 193 | increment = 1 194 | for filter_value in filter_values: 195 | colref = "{}{}".format(column_name, str(increment)) 196 | sql_parts.append("{} = :{}".format(column_name, colref)) 197 | replacement[colref] = str(filter_value) 198 | increment += 1 199 | sql = "( {} )".format(" OR ".join(sql_parts)) 200 | return (sql, replacement) 201 | 202 | @classmethod 203 | def generate_single_string_sql_includes(cls, column_name, filter_value): 204 | """Return tuple with sql and replacement. 205 | 206 | This function builds the sql partial and replacement dict for 207 | an inclusion (LIKE %VALUE%) match for one value against a single 208 | column in the database. 209 | 210 | Args: 211 | column_name (str): Name of column in DB. 212 | filter_value (str): This is what we look for in the column. 213 | 214 | Returns: 215 | tuple: Item 0 contains the SQL partial string. Item 1 contains 216 | the replacement dictionary. 217 | 218 | """ 219 | sql = "{} LIKE :{}".format(column_name, column_name) 220 | replacement = {column_name: '%{}%'.format(str(filter_value))} 221 | return (sql, replacement) 222 | 223 | @classmethod 224 | def generate_multi_string_sql_includes(cls, column_name, filter_values): 225 | """Return tuple with sql and replacement. 226 | 227 | This function builds the sql partial and replacement dict for 228 | an inclusion (LIKE %VALUE%) match for multiple values (OR) against 229 | a single column in the database. 230 | 231 | Args: 232 | column_name (str): Name of column in DB. 233 | filter_values (list or str): This is what we look for in the column. 234 | If a string-type object is used for this argument, this function 235 | behaves as a wrapper for 236 | `Utility.generate_single_string_sql_includes()` 237 | 238 | Returns: 239 | tuple: Item 0 contains the SQL partial string. Item 1 contains 240 | the replacement dictionary. 241 | 242 | """ 243 | if not isinstance(filter_values, list): 244 | return cls.generate_single_string_sql_eq(column_name, 245 | filter_values) 246 | sql_parts = [] 247 | replacement = {} 248 | increment = 1 249 | for filter_value in filter_values: 250 | colref = "{}{}".format(column_name, str(increment)) 251 | sql_parts.append("{} LIKE :{}".format(column_name, colref)) 252 | replacement[colref] = '%{}%'.format(str(filter_value)) 253 | increment += 1 254 | sql = "( {} )".format(" OR ".join(sql_parts)) 255 | return (sql, replacement) 256 | 257 | @classmethod 258 | def generate_single_int_sql_eq(cls, column_name, filter_value): 259 | """Return tuple with sql and replacement. 260 | 261 | This function builds the sql partial and replacement dict for 262 | an equivalency match for a single integer against a single 263 | column in the database. 264 | 265 | Args: 266 | column_name (str): Name of column in DB. 267 | filter_value (str or int): This is what we look for in the column. 268 | Coerced to integer. 269 | 270 | Returns: 271 | tuple: Item 0 contains the SQL partial string. Item 1 contains 272 | the replacement dictionary. 273 | 274 | 275 | """ 276 | sql = "{} = :{}".format(column_name, column_name) 277 | replacement = {column_name: int(filter_value)} 278 | return (sql, replacement) 279 | 280 | @classmethod 281 | def generate_single_int_sql_gt(cls, column_name, filter_value): 282 | """Return tuple with sql and replacement. 283 | 284 | This function builds the sql partial and replacement dict for 285 | a greater-than match for a single integer against a single 286 | column in the database. 287 | 288 | Args: 289 | column_name (str): Name of column in DB. 290 | filter_value (str or int): This is what we look for in the column. 291 | Coerced to integer. 292 | 293 | Returns: 294 | tuple: Item 0 contains the SQL partial string. Item 1 contains 295 | the replacement dictionary. 296 | 297 | """ 298 | column_name_corrected = column_name.replace("_gt", "") 299 | sql = "{} > :{}".format(column_name_corrected, column_name_corrected) 300 | replacement = {column_name_corrected: int(filter_value)} 301 | return (sql, replacement) 302 | 303 | @classmethod 304 | def generate_single_int_sql_lt(cls, column_name, filter_value): 305 | """Return tuple with sql and replacement. 306 | 307 | This function builds the sql partial and replacement dict for 308 | a less-than match for a single integer against a single 309 | column in the database. 310 | 311 | Args: 312 | column_name (str): Name of column in DB. 313 | filter_value (str or int): This is what we look for in the column. 314 | Coerced to integer. 315 | 316 | Returns: 317 | tuple: Item 0 contains the SQL partial string. Item 1 contains 318 | the replacement dictionary. 319 | """ 320 | column_name_corrected = column_name.replace("_lt", "") 321 | sql = "{} < :{}".format(column_name_corrected, column_name_corrected) 322 | replacement = {column_name_corrected: int(filter_value)} 323 | return (sql, replacement) 324 | 325 | @classmethod 326 | def generate_single_float_sql_eq(cls, column_name, filter_value): 327 | """Return tuple with sql and replacement. 328 | 329 | This function builds the sql partial and replacement dict for 330 | an equivalency match for a single float against a single 331 | column in the database. 332 | 333 | Args: 334 | column_name (str): Name of column in DB. 335 | filter_value (str or float): This is what we look for in the column. 336 | Coerced to float. 337 | 338 | Returns: 339 | tuple: Item 0 contains the SQL partial string. Item 1 contains 340 | the replacement dictionary. 341 | 342 | 343 | """ 344 | sql = "{} = :{}".format(column_name, column_name) 345 | replacement = {column_name: float(filter_value)} 346 | return (sql, replacement) 347 | 348 | @classmethod 349 | def generate_single_float_sql_gt(cls, column_name, filter_value): 350 | """Return tuple with sql and replacement. 351 | 352 | This function builds the sql partial and replacement dict for 353 | a greater-than match for a single float against a single 354 | column in the database. 355 | 356 | Args: 357 | column_name (str): Name of column in DB. 358 | filter_value (str or float): This is what we look for in the column. 359 | Coerced to float. 360 | 361 | Returns: 362 | tuple: Item 0 contains the SQL partial string. Item 1 contains 363 | the replacement dictionary. 364 | 365 | """ 366 | column_name_corrected = column_name.replace("_gt", "") 367 | sql = "{} > :{}".format(column_name_corrected, column_name_corrected) 368 | replacement = {column_name_corrected: float(filter_value)} 369 | return (sql, replacement) 370 | 371 | @classmethod 372 | def generate_single_float_sql_lt(cls, column_name, filter_value): 373 | """Return tuple with sql and replacement. 374 | 375 | This function builds the sql partial and replacement dict for 376 | a less-than match for a single integer against a single 377 | column in the database. 378 | 379 | Args: 380 | column_name (str): Name of column in DB. 381 | filter_value (str or float): This is what we look for in the column. 382 | Coerced to float. 383 | 384 | Returns: 385 | tuple: Item 0 contains the SQL partial string. Item 1 contains 386 | the replacement dictionary. 387 | """ 388 | column_name_corrected = column_name.replace("_lt", "") 389 | sql = "{} < :{}".format(column_name_corrected, column_name_corrected) 390 | replacement = {column_name_corrected: float(filter_value)} 391 | return (sql, replacement) 392 | 393 | @classmethod 394 | def generate_single_tstamp_secs_gt(cls, column_name, filter_value): 395 | """Return tuple with sql and replacement. 396 | 397 | This function wraps other functions to build the sql partial 398 | and replacement dict for a greater-than match for a timestamp 399 | against a single column in the database. 400 | 401 | Args: 402 | column_name (str): Name of column in DB. 403 | filter_value (str or int or datetime.datetime): This is what 404 | we look for in the column. Sanitized to Unix epoch for DB 405 | compatibility. 406 | 407 | Returns: 408 | tuple: Item 0 contains the SQL partial string. Item 1 contains 409 | the replacement dictionary. 410 | 411 | """ 412 | mod_filter_value = cls.timestamp_to_dbtime(filter_value)[0] 413 | return cls.generate_single_int_sql_gt(column_name, mod_filter_value) 414 | 415 | @classmethod 416 | def generate_single_tstamp_secs_lt(cls, column_name, filter_value): 417 | """Return tuple with sql and replacement. 418 | 419 | This function wraps other functions to build the sql partial 420 | and replacement dict for a less-than match for a timestamp 421 | against a single column in the database. 422 | 423 | Args: 424 | column_name (str): Name of column in DB. 425 | filter_value (str or int or datetime.datetime): This is what 426 | we look for in the column. Sanitized to Unix epoch for DB 427 | compatibility. 428 | 429 | Returns: 430 | tuple: Item 0 contains the SQL partial string. Item 1 contains 431 | the replacement dictionary. 432 | 433 | """ 434 | mod_filter_value = cls.timestamp_to_dbtime(filter_value)[0] 435 | return cls.generate_single_int_sql_lt(column_name, mod_filter_value) 436 | 437 | @classmethod 438 | def generate_single_tstamp_secs_eq(cls, column_name, filter_value): 439 | """Return tuple with sql and replacement. 440 | 441 | This function wraps other functions to build the sql partial 442 | and replacement dict for an equivalency match for a timestamp 443 | against a single column in the database. 444 | 445 | Args: 446 | column_name (str): Name of column in DB. 447 | filter_value (str or int or datetime.datetime): This is what 448 | we look for in the column. Sanitized to Unix epoch for DB 449 | compatibility. 450 | 451 | Returns: 452 | tuple: Item 0 contains the SQL partial string. Item 1 contains 453 | the replacement dictionary. 454 | 455 | """ 456 | mod_filter_value = cls.timestamp_to_dbtime(filter_value)[0] 457 | return cls.generate_single_int_sql_eq(column_name, mod_filter_value) 458 | 459 | @classmethod 460 | def is_it_a_string(cls, target): 461 | """Return boolean True if target is a string, else return False.""" 462 | 463 | if sys.version_info < (3, 0): 464 | result = True if isinstance(target, basestring) else False # NOQA 465 | else: 466 | result = True if isinstance(target, (str, bytes)) else False 467 | return result 468 | 469 | @classmethod 470 | def device_field_parser(cls, device): 471 | """We ensure that a json-parseable string gets passed up the stack.""" 472 | retval = device 473 | retval = json.dumps(json.loads(device)) 474 | return retval 475 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from setuptools import setup 4 | 5 | 6 | def read(file_name): 7 | with open(os.path.join(os.path.dirname(__file__), file_name), 'r') as f: 8 | filestring = f.read() 9 | return filestring 10 | 11 | 12 | def get_version(): 13 | raw_init_file = read("kismetdb/__init__.py") 14 | rx_compiled = re.compile(r"\s*__version__\s*=\s*\"(\S+)\"") 15 | ver = rx_compiled.search(raw_init_file).group(1) 16 | return ver 17 | 18 | 19 | def get_author(): 20 | raw_init_file = read("kismetdb/__init__.py") 21 | rx_compiled = re.compile(r"\s*__author__\s*=\s*\"(.*)\"") 22 | author = rx_compiled.search(raw_init_file).group(1) 23 | return author 24 | 25 | 26 | def get_copyright(): 27 | raw_init_file = read("kismetdb/__init__.py") 28 | rx_compiled = re.compile(r"\s*__copyright__\s*=\s*\"(.*)\"") 29 | cright = rx_compiled.search(raw_init_file).group(1) 30 | return cright 31 | 32 | 33 | def get_license(): 34 | raw_init_file = read("kismetdb/__init__.py") 35 | rx_compiled = re.compile(r"\s*__license__\s*=\s*\"(.*)\"") 36 | ver = rx_compiled.search(raw_init_file).group(1) 37 | return ver 38 | 39 | 40 | def get_email(): 41 | raw_init_file = read("kismetdb/__init__.py") 42 | rx_compiled = re.compile(r"\s*__email__\s*=\s*\"(\S+)\"") 43 | email = rx_compiled.search(raw_init_file).group(1) 44 | return email 45 | 46 | 47 | def build_long_desc(): 48 | return "\n".join([read(f) for f in ["README.rst", "CHANGELOG.rst"]]) 49 | 50 | 51 | setup(name="kismetdb", 52 | version=get_version(), 53 | author=get_author(), 54 | author_email=get_email(), 55 | description="A python wrapper for the Kismet database", 56 | long_description=build_long_desc(), 57 | long_description_content_type="text/x-rst", 58 | license=get_license(), 59 | keywords="kismet", 60 | url="https://github.com/kismetwireless/python-kismet-db", 61 | packages=["kismetdb", "kismetdb.scripts"], 62 | install_requires=["python-dateutil", "simplekml"], 63 | entry_points={ 64 | "console_scripts": [ 65 | "kismet_log_devices_to_json = kismetdb.scripts.log_devices_to_json:main", # NOQA 66 | "kismet_log_to_csv = kismetdb.scripts.log_to_csv:main", 67 | "kismet_log_to_kml = kismetdb.scripts.log_to_kml:main", 68 | "kismet_log_to_pcap = kismetdb.scripts.log_to_pcap:main", 69 | "kismet_log_devices_to_filebeat_json = kismetdb.scripts.log_devices_to_filebeat_json:main"]}, # NOQA 70 | classifiers=[ 71 | "Development Status :: 5 - Production/Stable", 72 | "Intended Audience :: Developers", 73 | "Operating System :: MacOS :: MacOS X", 74 | "Operating System :: POSIX :: Linux", 75 | "Programming Language :: Python :: 2.7", 76 | "Programming Language :: Python :: 3.6", 77 | "Programming Language :: Python :: 3.7", 78 | "Topic :: Security", 79 | "License :: OSI Approved :: BSD License" 80 | ],) 81 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kismetwireless/python-kismet-db/be7568045c7b2cc7ed942cf350545f58e400805d/tests/__init__.py -------------------------------------------------------------------------------- /tests/assets/.placeholder: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kismetwireless/python-kismet-db/be7568045c7b2cc7ed942cf350545f58e400805d/tests/assets/.placeholder -------------------------------------------------------------------------------- /tests/integration/test_integration_alerts_4.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationAlerts(object): 7 | def test_integration_alerts_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 10 | abstraction = kismetdb.Alerts(test_db) 11 | assert abstraction 12 | 13 | def test_integration_alerts_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 16 | abstraction = kismetdb.Alerts(test_db) 17 | all_alerts = abstraction.get_all() 18 | assert all_alerts 19 | for alert in all_alerts: 20 | assert isinstance(alert["lat"], float) 21 | assert isinstance(alert["lon"], float) 22 | 23 | def test_integration_alerts_get_all_date_filter(self): 24 | here_dir = os.path.dirname(os.path.abspath(__file__)) 25 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 26 | abstraction = kismetdb.Alerts(test_db) 27 | all_alerts = abstraction.get_all(ts_sec_gt="2018-01-01") 28 | assert all_alerts 29 | 30 | def test_integration_alerts_get_all_date_phy_filter(self): 31 | here_dir = os.path.dirname(os.path.abspath(__file__)) 32 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 33 | abstraction = kismetdb.Alerts(test_db) 34 | all_alerts = abstraction.get_all(ts_sec_gt="2018-01-01", 35 | phyname=["Bluetooth", 36 | "IEEE802.11", 37 | "UNKNOWN"]) 38 | assert all_alerts 39 | 40 | def test_integration_alerts_get_meta(self): 41 | here_dir = os.path.dirname(os.path.abspath(__file__)) 42 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 43 | abstraction = kismetdb.Alerts(test_db) 44 | all_alerts = abstraction.get_meta() 45 | assert all_alerts 46 | assert "json" not in all_alerts[0] 47 | -------------------------------------------------------------------------------- /tests/integration/test_integration_alerts_5.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationAlerts(object): 7 | def test_integration_alerts_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 10 | abstraction = kismetdb.Alerts(test_db) 11 | assert abstraction 12 | 13 | def test_integration_alerts_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 16 | abstraction = kismetdb.Alerts(test_db) 17 | all_alerts = abstraction.get_all() 18 | assert all_alerts 19 | for alert in all_alerts: 20 | assert isinstance(alert["lat"], float) 21 | assert isinstance(alert["lon"], float) 22 | 23 | def test_integration_alerts_get_all_date_filter(self): 24 | here_dir = os.path.dirname(os.path.abspath(__file__)) 25 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 26 | abstraction = kismetdb.Alerts(test_db) 27 | all_alerts = abstraction.get_all(ts_sec_gt="2018-01-01") 28 | assert all_alerts 29 | 30 | def test_integration_alerts_get_all_date_phy_filter(self): 31 | here_dir = os.path.dirname(os.path.abspath(__file__)) 32 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 33 | abstraction = kismetdb.Alerts(test_db) 34 | all_alerts = abstraction.get_all(ts_sec_gt="2018-01-01", 35 | phyname=["Bluetooth", 36 | "IEEE802.11", 37 | "UNKNOWN"]) 38 | assert all_alerts 39 | 40 | def test_integration_alerts_get_meta(self): 41 | here_dir = os.path.dirname(os.path.abspath(__file__)) 42 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 43 | abstraction = kismetdb.Alerts(test_db) 44 | all_alerts = abstraction.get_meta() 45 | assert all_alerts 46 | assert "json" not in all_alerts[0] 47 | -------------------------------------------------------------------------------- /tests/integration/test_integration_base_interface_4.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | import kismetdb 6 | 7 | 8 | class TestIntegrationBaseInterface(object): 9 | def test_integration_base_interface_instantiate_success(self): 10 | here_dir = os.path.dirname(os.path.abspath(__file__)) 11 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 12 | base_interface = kismetdb.BaseInterface(test_db) 13 | assert base_interface 14 | 15 | def test_integration_base_interface_instantiate_file_noexist(self): 16 | here_dir = os.path.dirname(os.path.abspath(__file__)) 17 | test_db = os.path.join(here_dir, "./testdata.kismet_4") 18 | with pytest.raises(ValueError) as e: 19 | kismetdb.BaseInterface(test_db) 20 | errtext = str(e.value) 21 | assert "testdata.kismet" in errtext 22 | assert "Could not find" in errtext 23 | -------------------------------------------------------------------------------- /tests/integration/test_integration_base_interface_5.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | import kismetdb 6 | 7 | 8 | class TestIntegrationBaseInterface(object): 9 | def test_integration_base_interface_instantiate_success(self): 10 | here_dir = os.path.dirname(os.path.abspath(__file__)) 11 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 12 | base_interface = kismetdb.BaseInterface(test_db) 13 | assert base_interface 14 | 15 | def test_integration_base_interface_instantiate_file_noexist(self): 16 | here_dir = os.path.dirname(os.path.abspath(__file__)) 17 | test_db = os.path.join(here_dir, "./testdata.kismet_5") 18 | with pytest.raises(ValueError) as e: 19 | kismetdb.BaseInterface(test_db) 20 | errtext = str(e.value) 21 | assert "testdata.kismet" in errtext 22 | assert "Could not find" in errtext 23 | -------------------------------------------------------------------------------- /tests/integration/test_integration_data_sources_4.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationDataSources(object): 7 | def test_integration_datasources_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 10 | abstraction = kismetdb.DataSources(test_db) 11 | assert abstraction 12 | 13 | def test_integration_datasources_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 16 | abstraction = kismetdb.DataSources(test_db) 17 | all_sources = abstraction.get_all() 18 | for source in all_sources: 19 | assert isinstance(source["json"], type("")) 20 | -------------------------------------------------------------------------------- /tests/integration/test_integration_data_sources_5.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationDataSources(object): 7 | def test_integration_datasources_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 10 | abstraction = kismetdb.DataSources(test_db) 11 | assert abstraction 12 | 13 | def test_integration_datasources_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 16 | abstraction = kismetdb.DataSources(test_db) 17 | all_sources = abstraction.get_all() 18 | for source in all_sources: 19 | assert isinstance(source["json"], type("")) 20 | -------------------------------------------------------------------------------- /tests/integration/test_integration_devices_4.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationDevices(object): 7 | def test_integration_devices_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 10 | abstraction = kismetdb.Devices(test_db) 11 | assert abstraction 12 | 13 | def test_integration_devices_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 16 | abstraction = kismetdb.Devices(test_db) 17 | all_devices = abstraction.get_all() 18 | assert all_devices 19 | for device in all_devices: 20 | assert isinstance(device["min_lat"], float) 21 | assert isinstance(device["min_lon"], float) 22 | assert isinstance(device["max_lat"], float) 23 | assert isinstance(device["max_lon"], float) 24 | assert isinstance(device["avg_lat"], float) 25 | assert isinstance(device["avg_lon"], float) 26 | 27 | def test_integration_devices_get_all_date_filter(self): 28 | here_dir = os.path.dirname(os.path.abspath(__file__)) 29 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 30 | abstraction = kismetdb.Devices(test_db) 31 | all_alerts = abstraction.get_all(first_time_gt="2018-01-01") 32 | assert all_alerts 33 | 34 | def test_integration_devices_get_all_date_phy_filter(self): 35 | here_dir = os.path.dirname(os.path.abspath(__file__)) 36 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 37 | abstraction = kismetdb.Devices(test_db) 38 | all_alerts = abstraction.get_all(first_time_gt="2018-01-01", 39 | phyname=["Bluetooth", 40 | "IEEE802.11"]) 41 | assert all_alerts 42 | 43 | def test_integration_devices_get_meta(self): 44 | here_dir = os.path.dirname(os.path.abspath(__file__)) 45 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 46 | abstraction = kismetdb.Devices(test_db) 47 | all_alerts = abstraction.get_meta() 48 | assert all_alerts 49 | assert "json" not in all_alerts[0] 50 | 51 | def test_integration_devices_yield_all_date_phy_filter(self): 52 | here_dir = os.path.dirname(os.path.abspath(__file__)) 53 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 54 | abstraction = kismetdb.Devices(test_db) 55 | for device in abstraction.yield_meta(first_time_gt="2018-01-01", 56 | phyname=["Bluetooth", 57 | "IEEE802.11"]): 58 | assert device 59 | assert "device" not in device 60 | assert isinstance(device["min_lat"], float) 61 | assert isinstance(device["min_lon"], float) 62 | assert isinstance(device["max_lat"], float) 63 | assert isinstance(device["max_lon"], float) 64 | assert isinstance(device["avg_lat"], float) 65 | assert isinstance(device["avg_lon"], float) 66 | 67 | def test_integration_devices_yield_meta(self): 68 | here_dir = os.path.dirname(os.path.abspath(__file__)) 69 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 70 | abstraction = kismetdb.Devices(test_db) 71 | for alert in abstraction.yield_meta(): 72 | assert alert 73 | assert "device" not in alert 74 | -------------------------------------------------------------------------------- /tests/integration/test_integration_devices_5.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationDevices(object): 7 | def test_integration_devices_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 10 | abstraction = kismetdb.Devices(test_db) 11 | assert abstraction 12 | 13 | def test_integration_devices_get_all(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 16 | abstraction = kismetdb.Devices(test_db) 17 | all_alerts = abstraction.get_all() 18 | assert all_alerts 19 | 20 | def test_integration_devices_get_all_date_filter(self): 21 | here_dir = os.path.dirname(os.path.abspath(__file__)) 22 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 23 | abstraction = kismetdb.Devices(test_db) 24 | all_alerts = abstraction.get_all(first_time_gt="2018-01-01") 25 | assert all_alerts 26 | 27 | def test_integration_devices_get_all_date_phy_filter(self): 28 | here_dir = os.path.dirname(os.path.abspath(__file__)) 29 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 30 | abstraction = kismetdb.Devices(test_db) 31 | all_alerts = abstraction.get_all(first_time_gt="2018-01-01", 32 | phyname=["Bluetooth", 33 | "IEEE802.11"]) 34 | assert all_alerts 35 | 36 | def test_integration_devices_get_meta(self): 37 | here_dir = os.path.dirname(os.path.abspath(__file__)) 38 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 39 | abstraction = kismetdb.Devices(test_db) 40 | all_alerts = abstraction.get_meta() 41 | assert all_alerts 42 | assert "json" not in all_alerts[0] 43 | 44 | def test_integration_devices_yield_all_date_phy_filter(self): 45 | here_dir = os.path.dirname(os.path.abspath(__file__)) 46 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 47 | abstraction = kismetdb.Devices(test_db) 48 | for device in abstraction.yield_meta(first_time_gt="2018-01-01", 49 | phyname=["Bluetooth", 50 | "IEEE802.11"]): 51 | assert device 52 | assert "device" not in device 53 | assert isinstance(device["min_lat"], float) 54 | assert isinstance(device["min_lon"], float) 55 | assert isinstance(device["max_lat"], float) 56 | assert isinstance(device["max_lon"], float) 57 | assert isinstance(device["avg_lat"], float) 58 | assert isinstance(device["avg_lon"], float) 59 | 60 | def test_integration_devices_yield_meta(self): 61 | here_dir = os.path.dirname(os.path.abspath(__file__)) 62 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_5") 63 | abstraction = kismetdb.Devices(test_db) 64 | for alert in abstraction.yield_meta(): 65 | assert alert 66 | assert "device" not in alert 67 | -------------------------------------------------------------------------------- /tests/integration/test_integration_packets_4.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import kismetdb 4 | 5 | 6 | class TestIntegrationPackets(object): 7 | def test_integration_packets_instantiate(self): 8 | here_dir = os.path.dirname(os.path.abspath(__file__)) 9 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 10 | abstraction = kismetdb.Packets(test_db) 11 | assert abstraction 12 | 13 | def test_integration_packets_yield_all_date_phy_filter(self): 14 | here_dir = os.path.dirname(os.path.abspath(__file__)) 15 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 16 | abstraction = kismetdb.Packets(test_db) 17 | for packet in abstraction.yield_meta(first_time_gt="2018-01-01"): 18 | assert packet 19 | assert "packet" not in packet 20 | assert packet["alt"] == 0 21 | assert packet["speed"] == 0 22 | assert packet["heading"] == 0 23 | assert isinstance(packet["lat"], float) 24 | assert isinstance(packet["lon"], float) 25 | 26 | def test_integration_packets_yield_meta(self): 27 | here_dir = os.path.dirname(os.path.abspath(__file__)) 28 | test_db = os.path.join(here_dir, "../assets/testdata.kismet_4") 29 | abstraction = kismetdb.Packets(test_db) 30 | for packet in abstraction.yield_meta(): 31 | assert packet 32 | assert "packet" not in packet 33 | assert packet["alt"] == 0 34 | assert packet["speed"] == 0 35 | assert packet["heading"] == 0 36 | assert packet["ts_sec"] != 0 37 | assert isinstance(packet["lat"], float) 38 | assert isinstance(packet["lon"], float) 39 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kismetwireless/python-kismet-db/be7568045c7b2cc7ed942cf350545f58e400805d/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_unit_utility.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import kismetdb 4 | 5 | 6 | class TestUnitUtility(object): 7 | def test_unit_utility_datetime_to_tuple(self): 8 | now_tup = datetime.datetime.now() 9 | result = kismetdb.Utility.datetime_to_tuple(now_tup) 10 | assert isinstance(result, tuple) 11 | assert len(result) == 2 12 | assert result != (0, 0) 13 | 14 | def test_unit_utility_timestamp_string_to_tuple(self): 15 | timestamps = ["2018-05-23T12:05:45.3001", "2018-01-01"] 16 | for timestamp in timestamps: 17 | result, err = kismetdb.Utility.timestamp_string_to_tuple(timestamp) 18 | assert isinstance(result, tuple) 19 | assert err == "" 20 | assert result != (0, 0) 21 | 22 | def test_unit_utility_timestamp_datatime_to_tuple(self): 23 | timestamp = datetime.datetime(year=2018, month=1, day=1) 24 | result = kismetdb.Utility.timestamp_to_dbtime(timestamp) 25 | assert isinstance(result, tuple) 26 | assert result == (1514764800, 0) 27 | 28 | def test_unit_utility_timestamp_int_to_tuple(self): 29 | timestamp = 1514764800 30 | result = kismetdb.Utility.timestamp_to_dbtime(timestamp) 31 | assert isinstance(result, tuple) 32 | assert result == (1514764800, 0) 33 | 34 | def test_unit_utility_timestamp_string_to_tuple_2(self): 35 | timestamps = ["nonsense", "nevermind", "2018-02-31"] 36 | for timestamp in timestamps: 37 | result, err = kismetdb.Utility.timestamp_string_to_tuple(timestamp) 38 | assert isinstance(result, tuple) 39 | assert err != "" 40 | assert result == (0, 0) 41 | 42 | def test_unit_utility_timestamp_tuple_validates_true(self): 43 | all_things_that_are_good = [(123, 456), (0, 0), (555, 100)] 44 | for thing in all_things_that_are_good: 45 | assert kismetdb.Utility.timestamp_tuple_validates(thing) == "" 46 | 47 | def test_unit_utility_timestamp_tuple_validates_false(self): 48 | will_fail_validation = [(123.1, "abcde"), (-10.5, 0), (None, 100)] 49 | for thing in will_fail_validation: 50 | assert kismetdb.Utility.timestamp_tuple_validates(thing) != "" 51 | 52 | def test_unit_utility_generate_single_tstamp_secs_eq_str(self): 53 | column_name = "tstamp_abc" 54 | filter_value = "2018-01-01" 55 | result = kismetdb.Utility.generate_single_tstamp_secs_eq(column_name, 56 | filter_value) 57 | assert result[0] == "tstamp_abc = :tstamp_abc" 58 | assert result[1] == {"tstamp_abc": 1514764800} 59 | 60 | def test_unit_utility_generate_single_tstamp_secs_eq_tup(self): 61 | column_name = "tstamp_abc" 62 | filter_value = (1514764800, 1000) 63 | result = kismetdb.Utility.generate_single_tstamp_secs_eq(column_name, 64 | filter_value) 65 | assert result[0] == "tstamp_abc = :tstamp_abc" 66 | assert result[1] == {"tstamp_abc": 1514764800} 67 | --------------------------------------------------------------------------------