├── Dockerfile ├── LICENSE ├── README.md ├── docs ├── diag1.png └── diag2.png ├── entrypoint.sh └── kibana.yml /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rockylinux:9 2 | 3 | EXPOSE 9200 4 | EXPOSE 5601 5 | 6 | ENV ES_VERSION 7.2.0 7 | ENV KIBANA_VERSION 7.2.0 8 | 9 | RUN dnf -y install epel-release && dnf clean all 10 | RUN dnf -y install unzip zip curl git java-1.8.0-openjdk python2 python2-pip && dnf clean all 11 | 12 | RUN pip2 install --upgrade pip 13 | RUN pip2 install beautifulsoup4 python-dateutil html5lib lxml tornado retrying pyelasticsearch joblib click chardet 14 | 15 | RUN mkdir /toolbox 16 | ADD kibana.yml /toolbox 17 | #Trick to adjust access rights between host and docker shared directories 18 | RUN groupadd -g 1001 elasticsearch 19 | RUN useradd -r elasticsearch --uid 1000 --gid 1001 20 | 21 | RUN cd /toolbox && \ 22 | #Elasticsearch is now a tar.gz file 23 | curl -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz && \ 24 | tar -xvzf elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz && \ 25 | rm -rf elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz && \ 26 | ln -s elasticsearch-${ES_VERSION} elasticsearch && \ 27 | chown -R elasticsearch elasticsearch-${ES_VERSION} 28 | 29 | # our entrypoint.sh sets and can override this 30 | RUN sed -i '/-Xm[xs]/s/^/#/' /toolbox/elasticsearch/config/jvm.options 31 | 32 | RUN cd /toolbox && \ 33 | curl -O https://artifacts.elastic.co/downloads/kibana/kibana-${KIBANA_VERSION}-linux-x86_64.tar.gz && \ 34 | tar -xvf kibana-${KIBANA_VERSION}-linux-x86_64.tar.gz && \ 35 | rm -rf kibana-${KIBANA_VERSION}-linux-x86_64.tar.gz && \ 36 | ln -s kibana-${KIBANA_VERSION}-linux-x86_64 kibana && \ 37 | chown -R elasticsearch kibana-${KIBANA_VERSION}-linux-x86_64 38 | 39 | RUN cd /toolbox && git clone https://github.com/bitsofinfo/elasticsearch-gmail.git 40 | RUN cd /toolbox && git clone https://github.com/bitsofinfo/csv2es.git 41 | #get this intersting repo too 42 | RUN cd /toolbox && git clone https://github.com/cvandeplas/ELK-forensics 43 | 44 | #Trick to modify elasticsearch-gmail.git repo to comply to new elastic search requirements 45 | RUN sed -i 's~request = HTTPRequest(tornado.options.options.es_url + "/_bulk", method="POST", body=upload_data_txt, request_timeout=tornado.options.options.es_http_timeout_seconds)~request = HTTPRequest(tornado.options.options.es_url + "/_bulk", method="POST", body=upload_data_txt, request_timeout=tornado.options.options.es_http_timeout_seconds,headers={"content-type":"application/json"})~g' /toolbox/elasticsearch-gmail/src/index_emails.py 46 | #New elasticsearch mandatory params 47 | RUN sed -i 's/#node.name: node-1/node.name: node-1/g' /toolbox/elasticsearch/config/elasticsearch.yml 48 | RUN sed -i 's/#cluster.initial_master_nodes: \["node-1", "node-2"\]/cluster.initial_master_nodes: \["node-1"\]/g' /toolbox/elasticsearch/config/elasticsearch.yml 49 | 50 | 51 | 52 | ADD entrypoint.sh /entrypoint.sh 53 | RUN chmod 755 /entrypoint.sh 54 | ENTRYPOINT ["/entrypoint.sh"] 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # comms-analyzer-toolbox 2 | 3 | Docker image that provides a simplified OSINT toolset for the import and analysis of communications content from email [MBOX](https://en.wikipedia.org/wiki/Mbox) files, and other CSV data (such as text messages) using Elasticsearch and Kibana. This provides a single command that launches a full OSINT analytical software stack as well as imports all of your communications into it, ready for analysis w/ Kibana and ElasticSearch. 4 | 5 | * [Summary](#summary) 6 | * [Docker setup](#dockersetup) 7 | * Importing email from MBOX files 8 | * [MBOX import summary](#mboxsummary) 9 | * [Example: Export from Gmail](#gmailexample) 10 | * [Example: Import emails from MBOX export file](#runningmbox) 11 | * [MBOX import options](#mboxoptions) 12 | * [Troubleshooting](#mboxwarn) 13 | * Importing data from CSV files 14 | * [CSV import summary](#csvsummary) 15 | * [Example: Export text messages from Iphone](#iphoneexample) 16 | * [Example: Import text messages from CSV data file](#runningcsv) 17 | * [CSV import options](#csvoptions) 18 | * [Analyze previously imported data](#analyzeonly) 19 | * [Expected warnings](#warn) 20 | * [Help/Resources](#help) 21 | * [Security/Privacy](#security) 22 | 23 | ## Summary 24 | 25 | This project manages a Dockerfile to produce an image that when run starts both ElasticSearch and Kibana and then optionally imports communications data using the the following tools bundled within the container: 26 | 27 | **IMPORTANT** *the links below are **FORKS** of the original projects due to outstanding issues w/ the original projects that were not fixed at the time of this projects development* 28 | 29 | * [elasticsearch-gmail](https://github.com/bitsofinfo/elasticsearch-gmail) python scripts which import email data from an MBOX file. (See [this link](https://github.com/oliver006/elasticsearch-gmail/pulls?q=is%3Apr+author%3Abitsofinfo+is%3Aclosed) for issues this fork addresses) 30 | * [csv2es](https://github.com/bitsofinfo/csv2es) python scripts which can import any data from an CSV file. (See [this link](https://github.com/rholder/csv2es/pulls/bitsofinfo) for issues this fork addresses) 31 | 32 | From there... well, you can analyze and visualize practically anything about your communications. Enjoy. 33 | 34 | ![Diag1](/docs/diag1.png "Diagram1") 35 | 36 | ![Diag2](/docs/diag2.png "Diagram2") 37 | 38 | ## Docker setup 39 | 40 | Before running the example below, you need [Docker](https://www.docker.com/get-docker) installed. 41 | 42 | * [Docker for Mac](https://store.docker.com/editions/community/docker-ce-desktop-mac) 43 | * [Docker Toolbox for Windows 10+ home or earlier versions](https://www.docker.com/products/docker-toolbox) 44 | * [Docker for Windows 10+ pro, enterprise, hyper-v capable](https://www.docker.com/docker-windows) 45 | 46 | **Windows Note**: When you `git clone` this project on Windows prior to building be sure to add the git clone flag `--config core.autocrlf=input`. Example `git clone https://github.com/bitsofinfo/comms-analyzer-toolbox.git --config core.autocrlf=input`. [read more here](http://willi.am/blog/2016/08/11/docker-for-windows-dealing-with-windows-line-endings/) 47 | 48 | Once Docker is installed bring up a command line shell and type the following to build the docker image for the toolbox: 49 | 50 | ``` 51 | docker build -t comms-analyzer-toolbox . 52 | ``` 53 | 54 | **Docker toolbox for Windows notes** 55 | 56 | The `default` docker machine VM created is likely to underpowered to run this out of the box. You will need to do the following to increase the CPU and memory of the local virtual-box machine 57 | 58 | 1. Bring up a "Docker Quickstart Terminal" 59 | 60 | 2. Remove the default machine: `docker-machine rm default` 61 | 62 | 3. Recreate it: `docker-machine create -d virtualbox --virtualbox-cpu-count=[N cpus] --virtualbox-memory=[XXXX megabytes] --virtualbox-disk-size=[XXXXXX] default` 63 | 64 | **Troubleshooting error: "max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]"** 65 | 66 | If you see this error when starting the toolbox (the error is reported from Elasticsearch) you will need do the following on the docker host the container is being launched on. 67 | 68 | `sysctl -w vm.max_map_count=262144` 69 | 70 | If you are using Docker Toolbox, you have to first shell into the boot2docker VM first with `docker ssh default` to run this command. Or do the following to make it permanent: https://github.com/docker/machine/issues/3859 71 | 72 | ## MBOX import summary 73 | 74 | For every email message in your MBOX file, each message becomes a separate document in ElasticSearch where all email headers are indexed as individual fields and all body content indexed and stripped of html/css/js. 75 | 76 | For example, each email imported into the index has the following fields available for searching and analysis in Kibana (plus many, many more) 77 | 78 | * date_ts (epoch_millis timestamp in GMT/UTC) 79 | * to 80 | * from 81 | * cc 82 | * bcc 83 | * subject 84 | * body 85 | * body_size 86 | 87 | ## Example: export Gmail email to mbox file 88 | 89 | Once Docker is available on your system, before you run `comms-analyzer-toolbox` you need to have some email to analyze in MBOX format. As an example, below is how to export email from Gmail. 90 | 91 | 1. Login to your gmail account with a web-browser on a computer 92 | 93 | 2. Go to: https://takeout.google.com/settings/takeout 94 | 95 | 3. On the screen that says **"Download your data"**, under the section **"Select data to include"** click on the **"Select None"** button. This will grey-out all the **"Products"** listed below it 96 | 97 | 4. Now scroll down and find the greyed out section labeled **"Mail"** and click on the **X** checkbox on the right hand side. It will now turn green indicating this data will be prepared for you to download. 98 | 99 | 5. Scroll down and click on the blue **"Next"** button 100 | 101 | 6. Leave the **"Customize archive format"** settings as-is and hit the **"Create Archive"** button 102 | 103 | 7. This will now take you to a **"We're preparing your archive."** screen. This might take a few hours depending on the size of all the email you have. 104 | 105 | 8. You will receive an email from google when the archive is ready to download. When you get it, download the zip file to your local computer's hard drive, it will be named something like `takeout-[YYYMMMDDD..].zip` 106 | 107 | 9. Once save to your hard drive, you will want to unzip the file, once unzipped all of your exported mail from Gmail will live in an **mbox** export file in the `Takeout/Mail/` folder and the filename with all your mail is in: `All mail Including Spam and Trash.mbox` 108 | 109 | 10. You should rename this file to something simpler like `my-email.mbox` 110 | 111 | 11. Take note of the location of your *.mbox* file as you will use it below when running the toolbox. 112 | 113 | 114 | ## Running: import emails for analysis 115 | 116 | Before running the example below, you need [Docker](#dockersetup) installed. 117 | 118 | Bring up a terminal or command prompt on your computer and run the following, before doing so, you need to replace `PATH/TO/YOUR/email.mbox` and `PATH/TO/ELASTICSEARCH_DATA_DIR` below with the proper paths on your local system as appropriate. 119 | 120 | *Note: if using Docker Toolbox for Windows*: All of the mounted volumes below should live somewhere under your home directory under `c:\Users\[your username]\...` due to permissions issues. 121 | 122 | ``` 123 | docker run --rm -ti \ 124 | --ulimit nofile=65536:65536 \ 125 | -v PATH/TO/YOUR/my-email.mbox:/toolbox/email.mbox \ 126 | -v PATH/TO/ELASTICSEARCH_DATA_DIR:/toolbox/elasticsearch/data \ 127 | comms-analyzer-toolbox:latest \ 128 | python /toolbox/elasticsearch-gmail/src/index_emails.py \ 129 | --infile=/toolbox/email.mbox \ 130 | --init=[True | False] \ 131 | --index-bodies=True \ 132 | --index-bodies-ignore-content-types=application,image \ 133 | --index-bodies-html-parser=html5lib \ 134 | --index-name=comm_data 135 | ``` 136 | 137 | Setting `--init=True` will delete and re-create the `comm_data` index. Setting `--init=False` will retain whatever data already exists 138 | 139 | The console will log output of what is going on, when the system is booted up you can bring up a web browser on your desktop and go to *http://localhost:5601* to start using Kibana to analyze your data. *Note: if running docker toolbox; 'localhost' might not work, execute a `docker-machine env default` to determine your docker hosts IP address, then go to http://[machine-ip]:5601"* 140 | 141 | On the first screen that says `Configure an index pattern`, in the field labeled `Index name or pattern` you type `comm_data` you will then see the `date_ts` field auto-selected, then hit the `Create` button. From there Kibana is ready to use! 142 | 143 | Launching does several things in the following order 144 | 145 | 1. Starts ElasticSearch (where your indexed emails are stored) 146 | 2. Starts Kibana (the user-interface to query the index) 147 | 3. Starts the mbox importer 148 | 149 | When then mbox importer is running you will see the following entries in the logs as the system does its work importing your mail from the mbox files 150 | 151 | ``` 152 | ... 153 | [I 170825 18:46:53 index_emails:96] Upload: OK - upload took: 467ms, total messages uploaded: 1000 154 | [I 170825 18:48:23 index_emails:96] Upload: OK - upload took: 287ms, total messages uploaded: 2000 155 | ... 156 | ``` 157 | 158 | ## Toolbox MBOX import options 159 | 160 | When running the `comms-analyzer-toolbox` image, one of the arguments is to invoke the [elasticsearch-gmail](https://github.com/bitsofinfo/elasticsearch-gmail) script which takes the following arguments. You can adjust the `docker run` command above to pass the following flags as you please: 161 | 162 | ``` 163 | Usage: /toolbox/elasticsearch-gmail/src/index_emails.py [OPTIONS] 164 | 165 | Options: 166 | 167 | --help show this help information 168 | 169 | /toolbox/elasticsearch-gmail/src/index_emails.py options: 170 | 171 | --batch-size Elasticsearch bulk index batch size (default 172 | 500) 173 | --es-url URL of your Elasticsearch node (default 174 | http://localhost:9200) 175 | --index-bodies Will index all body content, stripped of 176 | HTML/CSS/JS etc. Adds fields: 'body', 177 | 'body_size' and 'body_filenames' for any 178 | multi-part attachments (default False) 179 | --index-bodies-html-parser The BeautifulSoup parser to use for 180 | HTML/CSS/JS stripping. Valid values 181 | 'html.parser', 'lxml', 'html5lib' (default 182 | html.parser) 183 | --index-bodies-ignore-content-types 184 | If --index-bodies enabled, optional list of 185 | body 'Content-Type' header keywords to match 186 | to ignore and skip decoding/indexing. For 187 | all ignored parts, the content type will be 188 | added to the indexed field 189 | 'body_ignored_content_types' (default 190 | application,image) 191 | --index-name Name of the index to store your messages 192 | (default gmail) 193 | --infile The mbox input file 194 | 195 | --init Force deleting and re-initializing the 196 | Elasticsearch index (default False) 197 | --num-of-shards Number of shards for ES index (default 2) 198 | 199 | --skip Number of messages to skip from the mbox 200 | file (default 0) 201 | 202 | ``` 203 | 204 | ## MBOX import expected warnings 205 | 206 | When importing MBOX email data, in the log output you may see warnings/errors like the following. 207 | 208 | They are expected and ok, they are simply warnings about some special characters that are not able to be decoded etc. 209 | 210 | ``` 211 | ... 212 | /usr/lib/python2.7/site-packages/bs4/__init__.py:282: UserWarning: "https://someurl.com/whatever" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client like requests to get the document behind the URL, and feed that document to Beautiful Soup. 213 | ' that document to Beautiful Soup.' % decoded_markup 214 | [W 170825 18:41:56 dammit:381] Some characters could not be decoded, and were replaced with REPLACEMENT CHARACTER. 215 | [W 170825 18:41:56 dammit:381] Some characters could not be decoded, and were replaced with REPLACEMENT CHARACTER. 216 | ... 217 | ``` 218 | 219 | 220 | ## CSV import summary 221 | 222 | The CSV import tool `csv2es` embedded in the toolbox can import ANY CSV file, not just this example format below. 223 | 224 | For every row of data in a CSV file, each row becomes a separate document in ElasticSearch where all CSV columns are indexed as individual fields 225 | 226 | For example, each line in the CSV data file below (text messages from an iphone) imported into the index has the following fields available for searching and analysis in Kibana 227 | 228 | ``` 229 | "Name","Address","date_ts","Message","Attachment","iMessage" 230 | "Me","+1 555-555-5555","7/17/2016 9:21:39 AM","How are you doing?","","True" 231 | "Joe Smith","+1 555-444-4444","7/17/2016 9:38:56 AM","Pretty good you?","","True" 232 | "Me","+1 555-555-5555","7/17/2016 9:39:02 AM","Great!","","True" 233 | .... 234 | ``` 235 | 236 | * date_ts (epoch_millis timestamp in GMT/UTC) 237 | * name 238 | * address 239 | * message 240 | * attachment 241 | * imessage 242 | 243 | *The above text messages export CSV is just an example.* The `csv2es` tool that is bundled with the toolbox *can import ANY data set you want* not just the example format above. 244 | 245 | # Example: Export text messages from Iphone 246 | 247 | Once Docker is available on your system, before you run `comms-analyzer-toolbox` you need to have some data to analyze in CSV format. As an example, below is how to export text messages from an iphone to a CSV file. 248 | 249 | 1. Export iphone messages using [iExplorer for mac or windows](https://macroplant.com/iexplorer/tutorials/how-to-transfer-and-backup-sms-and-imessages) 250 | 251 | 2. Edit the generated CSV file and change the first row's header value of `"Time"` to `"date_ts"`, save and exit. 252 | 253 | 2. Take note of the location of your *.csv* file as you will use it below when running the toolbox. 254 | 255 | ## Running: import CSV of text messages for analysis 256 | 257 | Before running the example below, you need [Docker](#dockersetup) installed. 258 | 259 | This example below is specifically for a CSV data file containing text message data exported using [IExplorer](https://macroplant.com/iexplorer) 260 | 261 | *Contents of data.csv* 262 | ``` 263 | "Name","Address","date_ts","Message","Attachment","iMessage" 264 | "Me","+1 555-555-5555","7/17/2016 9:21:39 AM","How are you doing?","","True" 265 | "Joe Smith","+1 555-444-4444","7/17/2016 9:38:56 AM","Pretty good you?","","True" 266 | "Me","+1 555-555-5555","7/17/2016 9:39:02 AM","Great!","","True" 267 | .... 268 | ``` 269 | 270 | *Contents of csvdata.mapping.json* 271 | ``` 272 | { 273 | "dynamic": "true", 274 | "properties": { 275 | "date_ts": {"type": "date" }, 276 | "name": {"type": "string", "index" : "not_analyzed"}, 277 | "address": {"type": "string", "index" : "not_analyzed"}, 278 | "imessage": {"type": "string", "index" : "not_analyzed"} 279 | } 280 | } 281 | ``` 282 | 283 | Bring up a terminal or command prompt on your computer and run the following, before doing so, you need to replace `PATH/TO/YOUR/data.csv`, `PATH/TO/YOUR/csvdata.mapping.json` and `PATH/TO/ELASTICSEARCH_DATA_DIR` below with the proper paths on your local system as appropriate. 284 | 285 | *Note: if using Docker Toolbox for Windows*: All of the mounted volumes below should live somewhere under your home directory under `c:\Users\[your username]\...` due to permissions issues. 286 | 287 | ``` 288 | docker run --rm -ti -p 5601:5601 \ 289 | -v PATH/TO/YOUR/data.csv:/toolbox/data.csv \ 290 | -v PATH/TO/YOUR/csvdata.mapping.json:/toolbox/csvdata.mapping.json \ 291 | -v PATH/TO/ELASTICSEARCH_DATA_DIR:/toolbox/elasticsearch/data \ 292 | comms-analyzer-toolbox:latest \ 293 | python /toolbox/csv2es/csv2es.py \ 294 | [--existing-index \] 295 | [--delete-index \] 296 | --index-name comm_data \ 297 | --doc-type txtmsg \ 298 | --mapping-file /toolbox/csvdata.mapping.json \ 299 | --import-file /toolbox/data.csv \ 300 | --delimiter ',' \ 301 | --csv-clean-fieldnames \ 302 | --csv-date-field date_ts \ 303 | --csv-date-field-gmt-offset -1 304 | ``` 305 | 306 | If running against a pre-existing `comm_data` index make sure to include the `--existing-index` flag only. If you want to re-create the `comm_data` index prior to import, include the `--delete-index` flag only. 307 | 308 | The console will log output of what is going on, when the system is booted up you can bring up a web browser on your desktop and go to *http://localhost:5601* to start using Kibana to analyze your data. *Note: if running docker toolbox; 'localhost' might not work, execute a `docker-machine env default` to determine your docker hosts IP address, then go to http://[machine-ip]:5601"* 309 | 310 | On the first screen that says `Configure an index pattern`, in the field labeled `Index name or pattern` you type `comm_data` you will then see the `date_ts` field auto-selected, then hit the `Create` button. From there Kibana is ready to use! 311 | 312 | Launching does several things in the following order 313 | 314 | 1. Starts ElasticSearch (where your indexed CSV data is stored) 315 | 2. Starts Kibana (the user-interface to query the index) 316 | 3. Starts the CSV file importer 317 | 318 | When then mbox importer is running you will see the following entries in the logs as the system does its work importing your mail from the mbox files 319 | 320 | ## Toolbox CSV import options 321 | 322 | When running the `comms-analyzer-toolbox` image, one of the arguments is to invoke the [csv2es](https://github.com/bitsofinfo/csv2es) script which takes the following arguments. You can adjust the `docker run` command above to pass the following flags as you please: 323 | 324 | ``` 325 | Usage: /toolbox/csv2es/csv2es.py [OPTIONS] 326 | 327 | Bulk import a delimited file into a target Elasticsearch instance. Common 328 | delimited files include things like CSV and TSV. 329 | 330 | Load a CSV file: 331 | csv2es --index-name potatoes --doc-type potato --import-file potatoes.csv 332 | 333 | For a TSV file, note the tab delimiter option 334 | csv2es --index-name tomatoes --doc-type tomato --import-file tomatoes.tsv --tab 335 | 336 | For a nifty pipe-delimited file (delimiters must be one character): 337 | csv2es --index-name pipes --doc-type pipe --import-file pipes.psv --delimiter '|' 338 | 339 | Options: 340 | --index-name TEXT Index name to load data into 341 | [required] 342 | --doc-type TEXT The document type (like user_records) 343 | [required] 344 | --import-file TEXT File to import (or '-' for stdin) 345 | [required] 346 | --mapping-file TEXT JSON mapping file for index 347 | --delimiter TEXT The field delimiter to use, defaults to CSV 348 | --tab Assume tab-separated, overrides delimiter 349 | --host TEXT The Elasticsearch host 350 | (http://127.0.0.1:9200/) 351 | --docs-per-chunk INTEGER The documents per chunk to upload (5000) 352 | --bytes-per-chunk INTEGER The bytes per chunk to upload (100000) 353 | --parallel INTEGER Parallel uploads to send at once, defaults 354 | to 1 355 | --delete-index Delete existing index if it exists 356 | --existing-index Don't create index. 357 | --quiet Minimize console output 358 | --csv-clean-fieldnames Strips double quotes and lower-cases all CSV 359 | header names for proper ElasticSearch 360 | fieldnames 361 | --csv-date-field TEXT The CSV header name that represents a date 362 | string to parsed (via python-dateutil) into 363 | an ElasticSearch epoch_millis 364 | --csv-date-field-gmt-offset INTEGER 365 | The GMT offset for the csv-date-field (i.e. 366 | +/- N hours) 367 | --tags TEXT Custom static key1=val1,key2=val2 pairs to 368 | tag all entries with 369 | --version Show the version and exit. 370 | --help Show this message and exit. 371 | ``` 372 | 373 | ## Running: analyze previously imported data 374 | 375 | Running in this mode will just launch elasticsearch and kibana and will not import anything. It just brings up the 376 | toolbox so you can analyze previously imported data that resides in elasticsearch. 377 | 378 | *Note: if using Docker Toolbox for Windows*: All of the mounted volumes below should live somewhere under your home directory under `c:\Users\[your username]\...` due to permissions issues. 379 | 380 | ``` 381 | docker run --rm -ti -p 5601:5601 \ 382 | -v PATH/TO/ELASTICSEARCH_DATA_DIR:/toolbox/elasticsearch/data \ 383 | comms-analyzer-toolbox:latest \ 384 | analyze-only 385 | ``` 386 | 387 | Want to control the default ElasticSearch JVM memory heap options you can do so via 388 | a docker environment variable i.e. `-e ES_JAVA_OPTS="-Xmx1g -Xms1g"` etc. 389 | 390 | ## Help/Resources 391 | 392 | ### Gmail 393 | * [Exporting Gmail](https://www.lifewire.com/how-to-export-your-emails-from-gmail-as-mbox-files-1171881) 394 | * [Gmail download data](https://support.google.com/accounts/answer/3024190?hl=en) 395 | 396 | ### IPhone text messages 397 | * [Exporting text messages from IPhone to CSV](https://macroplant.com/iexplorer/tutorials/how-to-transfer-and-backup-sms-and-imessages) 398 | 399 | ### Hotmail/Outlook 400 | 401 | For hotmail/outlook, you need to export to PST, and then as a second step convert to MBOX 402 | 403 | * https://support.microsoft.com/en-us/help/980534/export-windows-live-mail-email--contacts--and-calendar-data-to-outlook 404 | * https://gallery.technet.microsoft.com/Convert-PST-to-MBOX-25f4bb0e 405 | * http://www.hotmail.googleapps--backup.com/pst 406 | * https://steemit.com/hotmail/@ariyantoooo/how-to-export-hotmail-to-pst 407 | * http://www.techhit.com/outlook/convert_outlook_mbox.html 408 | * https://gallery.technet.microsoft.com/office/PST-to-MBOX-Converter-to-e5ae03ae 409 | 410 | ### Kibana, graphs, searching 411 | * [Kibana 5 tutorial](https://www.youtube.com/watch?v=mMhnGjp8oOI) 412 | * [Kibana 101](https://www.elastic.co/webinars/getting-started-kibana?baymax=default&elektra=docs&storm=top-video) 413 | * [Kibana getting started](https://www.elastic.co/guide/en/kibana/current/getting-started.html) 414 | * [Kibana introduction](https://www.timroes.de/2016/10/23/kibana5-introduction/) 415 | * [Kibana logz.io tutorial](https://logz.io/blog/kibana-tutorial/) 416 | * [Kibana search syntax](https://www.elastic.co/guide/en/kibana/current/search.html) 417 | 418 | 419 | ## Security/Privacy 420 | 421 | Using this tool is completely local to whatever machine you are running this tool on (i.e. your Docker host). In the case of running it on your laptop or desktop computer its 100% local. 422 | 423 | Data is not uploaded or transferred anywhere. 424 | 425 | The data does not go anywhere other than on disk locally to the Docker host this is running on. 426 | 427 | To completely remove the data analyzed, you can `docker rm -f [container-id]` of the `comms-analyzer-toolbox` container running on your machine. 428 | 429 | If you mounted the elasticsearch data directory via a volume on the host (i.e. `-v PATH/TO/ELASTICSEARCH_DATA_DIR:/toolbox/elasticsearch/data`) that locally directory is where all the indexed data resides locally on disk. 430 | -------------------------------------------------------------------------------- /docs/diag1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitsofinfo/comms-analyzer-toolbox/e4e78dfedc192e22ad7b83474f88fe5e29793477/docs/diag1.png -------------------------------------------------------------------------------- /docs/diag2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitsofinfo/comms-analyzer-toolbox/e4e78dfedc192e22ad7b83474f88fe5e29793477/docs/diag2.png -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$ES_JAVA_OPTS" ]; then 4 | # if not set just use ES defaults (that were formally in jvm.options see Dockerfile) 5 | export ES_JAVA_OPTS="-Xmx2g -Xms2g" 6 | else 7 | echo 8 | echo "using: ES_JAVA_OPTS=$ES_JAVA_OPTS" 9 | fi 10 | 11 | echo 12 | echo "Starting ElasticSearch.... please wait" 13 | echo 14 | su -c "export ES_JAVA_OPTS='$ES_JAVA_OPTS'; nohup /toolbox/elasticsearch/bin/elasticsearch -d -Enetwork.host=0.0.0.0 &>/toolbox/elasticsearch/logs/elasticsearch.log &" -s /bin/bash elasticsearch 15 | sleep 10 16 | timeout 30 tail -f /toolbox/elasticsearch/logs/elasticsearch.log 17 | 18 | echo 19 | echo "Starting Kibana....please wait" 20 | echo 21 | su -c "nohup /toolbox/kibana/bin/kibana -c /toolbox/kibana.yml &>/toolbox/kibana/kibana.log &" -s /bin/bash elasticsearch 22 | sleep 5 23 | timeout 30 tail -f /toolbox/kibana/kibana.log 24 | 25 | 26 | set -e 27 | 28 | #command="$1 $2 $3"; 29 | command="$1" 30 | script="$2" 31 | 32 | if [[ "$command" == "python" && "$script" == "/toolbox/elasticsearch-gmail/src/index_emails.py" ]]; then 33 | echo 34 | echo "Launching MBOX email indexer...."; 35 | echo 36 | 37 | # launch it! 38 | args=( "$@" ) 39 | python2 /toolbox/elasticsearch-gmail/src/index_emails.py ${args[@]:2} 40 | 41 | echo "" 42 | echo "MBOX email indexing is complete!" 43 | echo "" 44 | 45 | elif [[ "$command" == "python" && "$script" == "/toolbox/csv2es/csv2es.py" ]]; then 46 | echo 47 | echo "Launching CSV indexer...."; 48 | echo 49 | 50 | args=( "$@" ) 51 | python2 /toolbox/csv2es/csv2es.py ${args[@]:2} 52 | 53 | echo "" 54 | echo "CSV indexing is complete!" 55 | echo "" 56 | 57 | elif [[ "$command" == "analyze-only" ]]; then 58 | echo 59 | echo "System started in analyze-only mode"; 60 | echo 61 | 62 | else 63 | echo 64 | echo "WARN: You should start with one of the following commands: " 65 | echo " 1. 'python /toolbox/elasticsearch-gmail/src/index_emails.py'"; 66 | echo " 2. 'python /toolbox/csv2es/csv2es.py'"; 67 | echo " 2. 'analyze-only' (default)"; 68 | 69 | echo 70 | echo "System started in analyze-only mode"; 71 | echo 72 | fi 73 | 74 | echo 75 | echo "ElasticSearch and Kibana processes....." 76 | ps aux | grep 'java\|kibana' 77 | 78 | echo 79 | echo 80 | echo "In your web browser go to http://localhost:5601" 81 | echo "" 82 | echo "On the first screen that says 'Configure an index pattern', in the field labeled 'Index name or pattern' type 'mbox'" 83 | echo "you will then see the 'date_ts' field auto-selected, then hit the 'Create' button. From there Kibana is ready to use!" 84 | echo "" 85 | echo "Kibana 5 tutorial: https://www.youtube.com/watch?v=mMhnGjp8oOI" 86 | echo "" 87 | echo "Note: if running docker toolbox; 'localhost' above might not work, execute a 'docker-machine env default'" 88 | echo "to determine your docker hosts IP address, then go to http://[machine-ip]:5601" 89 | echo 90 | echo "To quit the entire engine type ^C (cntrl C)" 91 | echo "" 92 | 93 | while true; do sleep 60; done 94 | -------------------------------------------------------------------------------- /kibana.yml: -------------------------------------------------------------------------------- 1 | server.host: "0.0.0.0" 2 | --------------------------------------------------------------------------------