├── LICENSE ├── README.md ├── conf_files ├── bro │ ├── README.md │ ├── bro-conn_log.conf │ ├── bro-dns_log.conf │ ├── bro-files_log.conf │ ├── bro-http_log.conf │ ├── bro-ids_logstash.conf │ ├── bro-notice_log.conf │ ├── bro-ssh_log.conf │ ├── bro-ssl_log.conf │ ├── bro-weird_log.conf │ └── bro-x509_log.conf ├── log2timeline │ └── logstash-log2timeline.conf └── web_logs │ ├── logstash-apache-combined.conf │ ├── logstash-apache-common.conf │ ├── logstash-iis6.conf │ ├── logstash-iis7.conf │ └── logstash-iis8.conf ├── dictionaries ├── logstash-bro-conn-log.dict ├── logstash-ftp-status-codes.dict └── logstash-http-status-codes.dict ├── grok_patterns └── bro-ids └── type_mappings ├── log2timeline.type └── mhn-hpfeed.type /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 505Forensics 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | logstash-dfir 2 | ============= 3 | 4 | Logstash configuration files for analyzing various types of logs. These configuration files are provided to analyze various types of log files using logstash, elasticsearch, and kibana. 5 | 6 | Whether you are running a full-blown setup of ElasticSearch, Kibana, and log shippers, or a single instance for rapid analysis, these configuration files will help you quickly parse various log files found on system images. 7 | 8 | Logstash 9 | ============= 10 | [Logstash Website](http://www.logstash.net) 11 | 12 | Other Resources 13 | ============= 14 | [sysforensics GitHub](https://github.com/sysforensics/LogstashConfigs) 15 | 16 | Related Posts 17 | ============= 18 | [I'll take some Elasticsearch/Kibana with my Plaso (Windows edition)](http://blog.kiddaland.net/2014/06/ill-take-some-elasticsearchkibana-with.html) 19 | 20 | [Finding the Needle in the Haystack with ELK](https://digital-forensics.sans.org/summit-archives/dfirprague14/Finding_the_Needle_in_the_Haystack_with_FLK_Christophe_Vandeplas.pdf) 21 | 22 | [Rapid Log Analysis](http://www.505forensics.com/rapid-log-analysis/) 23 | 24 | [Do you even Bro, bro?](http://www.505forensics.com/do-you-even-bro-bro/) 25 | 26 | [Utilizing Dictionaries with Logstash](http://www.505forensics.com/utilizing-dictionaries-with-logstash/) 27 | 28 | Changelog 29 | ============= 30 | 07 Jan 2015 - Uploaded logstash dictionaries for HTTP, FTP, and Bro IDS conn log status codes 31 | 32 | 04 Sep 2014 - Uploaded Bro IDS logs; thanks to team at http://www.appliednsm.com for laying the groundwork 33 | 34 | 02 Mar 2014 - Added log2timeline logstash config 35 | 36 | 01 Mar 2014 - Added apache-combined logstash config 37 | 38 | 22 Feb 2014 - Repository created; uploaded apache-common logstash config. 39 | -------------------------------------------------------------------------------- /conf_files/bro/README.md: -------------------------------------------------------------------------------- 1 | ## Logstash 2 | 3 | sudo nano /etc/logstash/conf.d/bro-conn_log.conf 4 | sudo -u logstash /opt/logstash/bin/logstash -f /etc/logstash/conf.d/bro-conn_log.conf --debug 5 | sudo -u logstash /opt/logstash/bin/logstash agent -f /etc/logstash/conf.d/bro-conn_log.conf --configtest 6 | sudo -u logstash /opt/logstash/bin/logstash agent -f /etc/logstash/conf.d --configtest 7 | 8 | ## Bro 9 | 10 | tail -f /nsm/bro/logs/current/conn.log 11 | 12 | ## Print out bro log headers 13 | 14 | cd /nsm/bro/logs/current/ 15 | grep -E "^#fields" *.log 16 | 17 | ## Pull down conf files 18 | 19 | cd /etc/logstash/conf.d/ 20 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-conn_log.conf 21 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-dns_log.conf 22 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-files_log.conf 23 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-http_log.conf 24 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-notice_log.conf 25 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-ssh_log.conf 26 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-ssl_log.conf 27 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-weird_log.conf 28 | sudo wget -N https://raw.githubusercontent.com/timmolter/logstash-dfir/master/conf_files/bro/bro-x509_log.conf 29 | -------------------------------------------------------------------------------- /conf_files/bro/bro-conn_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro conn logs 9 | # 10 | # Limitations: Standard Bro log delimiter is tab. 11 | # 12 | # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these 13 | # 14 | ####################### 15 | 16 | input { 17 | file { 18 | type => "bro-conn_log" 19 | start_position => "end" 20 | sincedb_path => "/var/tmp/.bro_conn_sincedb" 21 | 22 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 23 | path => "/nsm/bro/logs/current/conn.log" 24 | } 25 | } 26 | 27 | filter { 28 | 29 | #Let's get rid of those header lines; they begin with a hash 30 | if [message] =~ /^#/ { 31 | drop { } 32 | } 33 | 34 | #Now, using the csv filter, we can define the Bro log fields 35 | if [type] == "bro-conn_log" { 36 | csv { 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","service","duration","orig_bytes","resp_bytes","conn_state","local_orig","missed_bytes","history","orig_pkts","orig_ip_bytes","resp_pkts","resp_ip_bytes","tunnel_parents"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | #The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection 59 | translate { 60 | field => "conn_state" 61 | 62 | destination => "conn_state_full" 63 | 64 | dictionary => [ 65 | "S0", "Connection attempt seen, no reply", 66 | "S1", "Connection established, not terminated", 67 | "S2", "Connection established and close attempt by originator seen (but no reply from responder)", 68 | "S3", "Connection established and close attempt by responder seen (but no reply from originator)", 69 | "SF", "Normal SYN/FIN completion", 70 | "REJ", "Connection attempt rejected", 71 | "RSTO", "Connection established, originator aborted (sent a RST)", 72 | "RSTR", "Established, responder aborted", 73 | "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder", 74 | "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator", 75 | "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)", 76 | "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator", 77 | "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)" 78 | ] 79 | } 80 | 81 | mutate { 82 | convert => [ "id.orig_p", "integer" ] 83 | convert => [ "id.resp_p", "integer" ] 84 | convert => [ "orig_bytes", "integer" ] 85 | convert => [ "duration", "float" ] 86 | convert => [ "resp_bytes", "integer" ] 87 | convert => [ "missed_bytes", "integer" ] 88 | convert => [ "orig_pkts", "integer" ] 89 | convert => [ "orig_ip_bytes", "integer" ] 90 | convert => [ "resp_pkts", "integer" ] 91 | convert => [ "resp_ip_bytes", "integer" ] 92 | rename => [ "id.orig_h", "id_orig_host" ] 93 | rename => [ "id.orig_p", "id_orig_port" ] 94 | rename => [ "id.resp_h", "id_resp_host" ] 95 | rename => [ "id.resp_p", "id_resp_port" ] 96 | } 97 | } 98 | } 99 | 100 | output { 101 | # stdout { codec => rubydebug } 102 | elasticsearch { hosts => localhost } 103 | } 104 | -------------------------------------------------------------------------------- /conf_files/bro/bro-dns_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro dns logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-dns_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_dns_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/dns.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | 28 | 29 | #Let's get rid of those header lines; they begin with a hash 30 | if [message] =~ /^#/ { 31 | drop { } 32 | } 33 | 34 | #Now, using the csv filter, we can define the Bro log fields 35 | if [type] == "bro-dns_log" { 36 | csv { 37 | 38 | #dns.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto trans_id query qclass qclass_name qtype qtype_name rcode rcode_name AA TC RD RA Z answers TTLs rejected 39 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","trans_id","query","qclass","qclass_name","qtype","qtype_name","rcode","rcode_name","AA","TC","RD","RA","Z","answers","TTLs","rejected"] 40 | 41 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 42 | separator => " " 43 | } 44 | 45 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 46 | date { 47 | match => [ "ts", "UNIX" ] 48 | } 49 | 50 | # add geoip attributes 51 | geoip { 52 | source => "id.orig_h" 53 | target => "orig_geoip" 54 | } 55 | geoip { 56 | source => "id.resp_h" 57 | target => "resp_geoip" 58 | } 59 | 60 | mutate { 61 | convert => [ "id.orig_p", "integer" ] 62 | convert => [ "id.resp_p", "integer" ] 63 | convert => [ "trans_id", "integer" ] 64 | convert => [ "qclass", "integer" ] 65 | convert => [ "qtype", "integer" ] 66 | convert => [ "rcode", "integer" ] 67 | rename => [ "id.orig_h", "id_orig_host" ] 68 | rename => [ "id.orig_p", "id_orig_port" ] 69 | rename => [ "id.resp_h", "id_resp_host" ] 70 | rename => [ "id.resp_p", "id_resp_port" ] 71 | } 72 | } 73 | } 74 | 75 | output { 76 | # stdout { codec => rubydebug } 77 | elasticsearch { hosts => localhost } 78 | } 79 | -------------------------------------------------------------------------------- /conf_files/bro/bro-files_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro files logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-files_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_files_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/files.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-files_log" { 34 | csv { 35 | 36 | #files.log:#fields ts fuid tx_hosts rx_hosts conn_uids source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid md5 sha1 sha256 extracted 37 | columns => ["ts","fuid","tx_hosts","rx_hosts","conn_uids","source","depth","analyzers","mime_type","filename","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timedout","parent_fuid","md5","sha1","sha256","extracted"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "tx_hosts" 51 | target => "tx_hosts_geoip" 52 | } 53 | geoip { 54 | source => "rx_hosts" 55 | target => "rx_hosts_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "duration", "float" ] 60 | convert => [ "depth", "integer" ] 61 | convert => [ "seen_bytes", "integer" ] 62 | convert => [ "total_bytes", "integer" ] 63 | convert => [ "missing_bytes", "integer" ] 64 | convert => [ "overflow_bytes", "integer" ] 65 | } 66 | } 67 | } 68 | 69 | output { 70 | # stdout { codec => rubydebug } 71 | elasticsearch { hosts => localhost } 72 | } 73 | -------------------------------------------------------------------------------- /conf_files/bro/bro-http_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro http logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-http_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_http_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/http.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-http_log" { 34 | csv { 35 | 36 | #http.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p trans_depth method host uri referrer user_agent request_body_len response_body_len status_code status_msg info_code info_msg filename tags username password proxied orig_fuids orig_mime_types resp_fuids resp_mime_types 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","trans_depth","method","host","uri","referrer","user_agent","request_body_len","response_body_len","status_code","status_msg","info_code","info_msg","filename","tags","username","password","proxied","orig_fuids","orig_mime_types","resp_fuids","resp_mime_types"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "id.orig_p", "integer" ] 60 | convert => [ "id.resp_p", "integer" ] 61 | convert => [ "trans_depth", "integer" ] 62 | convert => [ "request_body_len", "integer" ] 63 | convert => [ "response_body_len", "integer" ] 64 | convert => [ "status_code", "integer" ] 65 | convert => [ "info_code", "integer" ] 66 | rename => [ "id.orig_h", "id_orig_host" ] 67 | rename => [ "id.orig_p", "id_orig_port" ] 68 | rename => [ "id.resp_h", "id_resp_host" ] 69 | rename => [ "id.resp_p", "id_resp_port" ] 70 | } 71 | } 72 | } 73 | 74 | output { 75 | # stdout { codec => rubydebug } 76 | elasticsearch { hosts => localhost } 77 | } 78 | -------------------------------------------------------------------------------- /conf_files/bro/bro-ids_logstash.conf: -------------------------------------------------------------------------------- 1 | # Bro-IDS Logstash parser 2 | 3 | # Bro-IDS Logstash parser 4 | # Parts of this taken from http://www.appliednsm.com/wp-content/uploads/logstash-SObro22-parse.conf_.txt 5 | # With help from http://brostash.herokuapp.com/ 6 | 7 | #Logs being parsed: 8 | #communication.log 9 | #conn.log 10 | #dns.log 11 | #files.log 12 | #http.log 13 | #notice.log 14 | #ssh.log 15 | #ssl.log 16 | #weird.log 17 | #x509.log 18 | 19 | input { 20 | 21 | file { 22 | type => "BRO_communicationlog" 23 | path => "/nsm/bro/logs/current/communication.log" 24 | sincedb_path => "/var/tmp/.bro_communication_sincedb" 25 | start_position => "end" 26 | } 27 | 28 | file { 29 | type => "BRO_connlog" 30 | path => "/nsm/bro/logs/current/conn.log" 31 | sincedb_path => "/var/tmp/.bro_conn_sincedb" 32 | start_position => "end" 33 | } 34 | 35 | file { 36 | type => "BRO_dnslog" 37 | path => "/nsm/bro/logs/current/dns.log" 38 | sincedb_path => "/var/tmp/.bro_dns_sincedb" 39 | start_position => "end" 40 | } 41 | 42 | file { 43 | type => "BRO_fileslog" 44 | path => "/nsm/bro/logs/current/files.log" 45 | sincedb_path => "/var/tmp/.bro_files_sincedb" 46 | start_position => "end" 47 | } 48 | 49 | file { 50 | type => "BRO_httplog" 51 | path => "/nsm/bro/logs/current/http.log" 52 | sincedb_path => "/var/tmp/.bro_http_sincedb" 53 | start_position => "end" 54 | } 55 | 56 | file { 57 | type => "BRO_noticelog" 58 | path => "/nsm/bro/logs/current/notice.log" 59 | sincedb_path => "/var/tmp/.bro_notice_sincedb" 60 | start_position => "end" 61 | } 62 | 63 | file { 64 | type => "BRO_sshlog" 65 | path => "/nsm/bro/logs/current/ssh.log" 66 | sincedb_path => "/var/tmp/.bro_ssh_sincedb" 67 | start_position => "end" 68 | } 69 | 70 | file { 71 | type => "BRO_ssllog" 72 | path => "/nsm/bro/logs/current/ssl.log" 73 | sincedb_path => "/var/tmp/.bro_ssl_sincedb" 74 | start_position => "end" 75 | } 76 | 77 | file { 78 | type => "BRO_weirdlog" 79 | path => "/nsm/bro/logs/current/weird.log" 80 | sincedb_path => "/var/tmp/.bro_weird_sincedb" 81 | start_position => "end" 82 | } 83 | 84 | file { 85 | type => "BRO_x509log" 86 | path => "/nsm/bro/logs/current/x509.log" 87 | sincedb_path => "/var/tmp/.bro_x509_sincedb" 88 | start_position => "end" 89 | } 90 | 91 | } 92 | 93 | filter { 94 | if [message] =~ /^#/ { 95 | drop { } 96 | } 97 | else { 98 | 99 | # BRO_communicationlog ###################### 100 | if [type] == "BRO_communicationlog" { 101 | grok { 102 | match => [ 103 | "message", "%{DATA:ts}\t%{DATA:peer}\t%{DATA:src_name}\t%{DATA:connected_peer_desc}\t%{DATA:connected_peer_addr}\t%{DATA:connected_peer_port}\t%{DATA:level}\t%{DATA:message}" 104 | ] 105 | } 106 | } 107 | 108 | # BRO_connlog ###################### 109 | if [type] == "BRO_connlog" { 110 | grok { 111 | match => [ 112 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:proto}\t%{DATA:service}\t%{DATA:duration}\t%{DATA:orig_bytes}\t%{DATA:resp_bytes}\t%{DATA:conn_state}\t%{DATA:local_orig}\t%{DATA:missed_bytes}\t%{DATA:history}\t%{DATA:orig_pkts}\t%{DATA:orig_ip_bytes}\t%{DATA:resp_pkts}\t%{DATA:resp_ip_bytes}\t%{DATA:tunnel_parents}" 113 | ] 114 | } 115 | } 116 | 117 | # BRO_dnslog ###################### 118 | if [type] == "BRO_dnslog" { 119 | grok { 120 | match => [ 121 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:proto}\t%{DATA:trans_id}\t%{DATA:query}\t%{DATA:qclass}\t%{DATA:qclass_name}\t%{DATA:qtype}\t%{DATA:qtype_name}\t%{DATA:rcode}\t%{DATA:rcode_name}\t%{DATA:aa}\t%{DATA:tc}\t%{DATA:rd}\t%{DATA:ra}\t%{DATA:z}\t%{DATA:answers}\t%{DATA:ttls}\t%{DATA:rejected}" 122 | ] 123 | } 124 | } 125 | 126 | # BRO_fileslog ###################### 127 | if [type] == "BRO_fileslog" { 128 | grok { 129 | match => [ 130 | "message", "%{DATA:ts}\t%{DATA:fuid}\t%{DATA:tx_hosts}\t%{DATA:rx_hosts}\t%{DATA:conn_uids}\t%{DATA:source}\t%{DATA:depth}\t%{DATA:analyzers}\t%{DATA:mime_type}\t%{DATA:filename}\t%{DATA:duration}\t%{DATA:local_orig}\t%{DATA:is_orig}\t%{DATA:seen_bytes}\t%{DATA:total_bytes}\t%{DATA:missing_bytes}\t%{DATA:overflow_bytes}\t%{DATA:timedout}\t%{DATA:parent_fuid}\t%{DATA:md5}\t%{DATA:sha1}\t%{DATA:sha256}\t%{DATA:extracted}" 131 | ] 132 | } 133 | } 134 | 135 | # BRO_httplog ###################### 136 | if [type] == "BRO_httplog" { 137 | grok { 138 | match => [ 139 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:trans_depth}\t%{DATA:method}\t%{DATA:host}\t%{DATA:uri}\t%{DATA:referrer}\t%{DATA:user_agent}\t%{DATA:request_body_len}\t%{DATA:response_body_len}\t%{DATA:status_code}\t%{DATA:status_msg}\t%{DATA:info_code}\t%{DATA:info_msg}\t%{DATA:filename}\t%{DATA:tags}\t%{DATA:username}\t%{DATA:password}\t%{DATA:proxied}\t%{DATA:orig_fuids}\t%{DATA:orig_mime_types}\t%{DATA:resp_fuids}\t%{DATA:resp_mime_types}" 140 | ] 141 | } 142 | } 143 | 144 | # BRO_noticelog ###################### 145 | if [type] == "BRO_noticelog" { 146 | grok { 147 | match => [ 148 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:fuid}\t%{DATA:file_mime_type}\t%{DATA:file_desc}\t%{DATA:proto}\t%{DATA:note}\t%{DATA:msg}\t%{DATA:sub}\t%{DATA:src}\t%{DATA:dst}\t%{DATA:p}\t%{DATA:n}\t%{DATA:peer_descr}\t%{DATA:actions}\t%{DATA:suppress_for}\t%{DATA:dropped}\t%{DATA:remote_location.country_code}\t%{DATA:remote_location.region}\t%{DATA:remote_location.city}\t%{DATA:remote_location.latitude}\t%{DATA:remote_location.longitude}" 149 | ] 150 | } 151 | } 152 | 153 | # BRO_sshlog ###################### 154 | if [type] == "BRO_sshlog" { 155 | grok { 156 | match => [ 157 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:status}\t%{DATA:direction}\t%{DATA:client}\t%{DATA:server}\t%{DATA:remote_location.country_code}\t%{DATA:remote_location.region}\t%{DATA:remote_location.city}\t%{DATA:remote_location.latitude}\t%{DATA:remote_location.longitude}" 158 | ] 159 | } 160 | } 161 | 162 | # BRO_ssllog ###################### 163 | if [type] == "BRO_ssllog" { 164 | grok { 165 | match => [ 166 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:version}\t%{DATA:cipher}\t%{DATA:curve}\t%{DATA:server_name}\t%{DATA:session_id}\t%{DATA:last_alert}\t%{DATA:established}\t%{DATA:cert_chain_fuids}\t%{DATA:client_cert_chain_fuids}\t%{DATA:subject}\t%{DATA:issuer}\t%{DATA:client_subject}\t%{DATA:client_issuer}\t%{DATA:validation_status}" 167 | ] 168 | } 169 | } 170 | 171 | # BRO_weirdlog ###################### 172 | if [type] == "BRO_weirdlog" { 173 | grok { 174 | match => [ 175 | "message", "%{DATA:ts}\t%{DATA:uid}\t%{DATA:id.orig_h}\t%{DATA:id.orig_p}\t%{DATA:id.resp_h}\t%{DATA:id.resp_p}\t%{DATA:name}\t%{DATA:addl}\t%{DATA:notice}\t%{DATA:peer}" 176 | ] 177 | } 178 | } 179 | 180 | # BRO_x509log ###################### 181 | if [type] == "BRO_x509log" { 182 | grok { 183 | match => [ 184 | "message", "%{DATA:ts}\t%{DATA:id}\t%{DATA:certificate.version}\t%{DATA:certificate.serial}\t%{DATA:certificate.subject}\t%{DATA:certificate.issuer}\t%{DATA:certificate.not_valid_before}\t%{DATA:certificate.not_valid_after}\t%{DATA:certificate.key_alg}\t%{DATA:certificate.sig_algcertificate.key_type}\t%{DATA:certificate.key_length}\t%{DATA:certificate.exponent}\t%{DATA:certificate.curve}\t%{DATA:san.dns}\t%{DATA:san.uri}\t%{DATA:san.email}\t%{DATA:san.ip}\t%{DATA:basic_constraints.ca}\t%{DATA:basic_constraints.path_len}" 185 | ] 186 | } 187 | } 188 | 189 | } 190 | date { 191 | match => [ "ts", "UNIX" ] 192 | } 193 | } 194 | filter { 195 | if [bro_host] { 196 | mutate { 197 | replace => [ "host", "%{bro_host}" ] 198 | } 199 | } 200 | } 201 | filter { 202 | if [type] == "BRO_connlog" { 203 | #The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection 204 | translate { 205 | field => "conn_state" 206 | destination => "conn_state_full" 207 | dictionary => [ 208 | "S0", "Connection attempt seen, no reply", 209 | "S1", "Connection established, not terminated", 210 | "S2", "Connection established and close attempt by originator seen (but no reply from responder)", 211 | "S3", "Connection established and close attempt by responder seen (but no reply from originator)", 212 | "SF", "Normal SYN/FIN completion", 213 | "REJ", "Connection attempt rejected", 214 | "RSTO", "Connection established, originator aborted (sent a RST)", 215 | "RSTR", "Established, responder aborted", 216 | "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder", 217 | "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator", 218 | "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)", 219 | "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator", 220 | "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)" 221 | ] 222 | } 223 | } 224 | } 225 | 226 | filter { 227 | if "BRO" in [type] { 228 | mutate { 229 | add_tag => [ "BRO" ] 230 | } 231 | mutate { 232 | convert => [ "id.orig_p", "integer" ] 233 | convert => [ "id.resp_p", "integer" ] 234 | convert => [ "orig_bytes", "integer" ] 235 | convert => [ "resp_bytes", "integer" ] 236 | convert => [ "missed_bytes", "integer" ] 237 | convert => [ "orig_pkts", "integer" ] 238 | convert => [ "orig_ip_bytes", "integer" ] 239 | convert => [ "resp_pkts", "integer" ] 240 | convert => [ "resp_ip_bytes", "integer" ] 241 | } 242 | } 243 | } 244 | output { 245 | # stdout { codec => rubydebug } 246 | elasticsearch { hosts => localhost } 247 | } 248 | -------------------------------------------------------------------------------- /conf_files/bro/bro-notice_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro notice logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-notice_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_notice_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/notice.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-notice_log" { 34 | csv { 35 | 36 | #notice.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc proto note msg sub src dst p n peer_descr actions suppress_for dropped remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","fuid","file_mime_type","file_desc","proto","note","msg","sub","src","dst","p","n","peer_descr","actions","suppress_for","dropped","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "id.orig_p", "integer" ] 60 | convert => [ "id.resp_p", "integer" ] 61 | convert => [ "p", "integer" ] 62 | convert => [ "n", "integer" ] 63 | convert => [ "suppress_for", "float" ] 64 | rename => [ "id.orig_h", "id_orig_host" ] 65 | rename => [ "id.orig_p", "id_orig_port" ] 66 | rename => [ "id.resp_h", "id_resp_host" ] 67 | rename => [ "id.resp_p", "id_resp_port" ] 68 | rename => [ "remote_location.country_code", "remote_location_country_code" ] 69 | rename => [ "remote_location.region", "remote_location_region" ] 70 | rename => [ "remote_location.city", "remote_location_city" ] 71 | rename => [ "remote_location.latitude", "remote_location_latitude" ] 72 | rename => [ "remote_location.longitude", "remote_location_longitude" ] 73 | } 74 | } 75 | } 76 | 77 | output { 78 | # stdout { codec => rubydebug } 79 | elasticsearch { hosts => localhost } 80 | } 81 | -------------------------------------------------------------------------------- /conf_files/bro/bro-ssh_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by Knowm (http://www.knowm.org) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro ssh logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-ssh_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_ssh_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/ssh.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-ssh_log" { 34 | csv { 35 | 36 | #ssh.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p status direction client server remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","status","direction","client","server","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "id.orig_p", "integer" ] 60 | convert => [ "id.resp_p", "integer" ] 61 | rename => [ "id.orig_h", "id_orig_host" ] 62 | rename => [ "id.orig_p", "id_orig_port" ] 63 | rename => [ "id.resp_h", "id_resp_host" ] 64 | rename => [ "id.resp_p", "id_resp_port" ] 65 | rename => [ "remote_location.country_code", "remote_location_country_code" ] 66 | rename => [ "remote_location.region", "remote_location_region" ] 67 | rename => [ "remote_location.city", "remote_location_city" ] 68 | rename => [ "remote_location.latitude", "remote_location_latitude" ] 69 | rename => [ "remote_location.longitude", "remote_location_longitude" ] 70 | } 71 | } 72 | } 73 | 74 | output { 75 | # stdout { codec => rubydebug } 76 | elasticsearch { hosts => localhost } 77 | } 78 | -------------------------------------------------------------------------------- /conf_files/bro/bro-ssl_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by Knowm (http://www.knowm.org) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-ssl_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_ssl_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/ssl.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-ssl_log" { 34 | csv { 35 | 36 | #ssl.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version cipher curve server_name session_id last_alert established cert_chain_fuids client_cert_chain_fuids subject issuer client_subject client_issuer validation_status 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","version","cipher","curve","server_name","session_id","last_alert","established","cert_chain_fuids","client_cert_chain_fuids","subject","issuer","client_subject","client_issuer","validation_status"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "id.orig_p", "integer" ] 60 | convert => [ "id.resp_p", "integer" ] 61 | rename => [ "id.orig_h", "id_orig_host" ] 62 | rename => [ "id.orig_p", "id_orig_port" ] 63 | rename => [ "id.resp_h", "id_resp_host" ] 64 | rename => [ "id.resp_p", "id_resp_port" ] 65 | } 66 | } 67 | } 68 | 69 | output { 70 | # stdout { codec => rubydebug } 71 | elasticsearch { hosts => localhost } 72 | } 73 | -------------------------------------------------------------------------------- /conf_files/bro/bro-weird_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro weird logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-weird_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_weird_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/weird.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-weird_log" { 34 | csv { 35 | 36 | #weird.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer 37 | columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","name","addl","notice","peer"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | } 47 | 48 | # add geoip attributes 49 | geoip { 50 | source => "id.orig_h" 51 | target => "orig_geoip" 52 | } 53 | geoip { 54 | source => "id.resp_h" 55 | target => "resp_geoip" 56 | } 57 | 58 | mutate { 59 | convert => [ "id.orig_p", "integer" ] 60 | convert => [ "id.resp_p", "integer" ] 61 | rename => [ "id.orig_h", "id_orig_host" ] 62 | rename => [ "id.orig_p", "id_orig_port" ] 63 | rename => [ "id.resp_h", "id_resp_host" ] 64 | rename => [ "id.resp_p", "id_resp_port" ] 65 | } 66 | } 67 | } 68 | 69 | output { 70 | # stdout { codec => rubydebug } 71 | elasticsearch { hosts => localhost } 72 | } 73 | -------------------------------------------------------------------------------- /conf_files/bro/bro-x509_log.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Bro IDS Logs 3 | # Created by Knowm (http://www.knowm.org) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse Bro x509 logs 9 | # 10 | # Limitations: Standard bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "bro-x509_log" 17 | start_position => "end" 18 | sincedb_path => "/var/tmp/.bro_x509_sincedb" 19 | 20 | #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else 21 | path => "/nsm/bro/logs/current/x509.log" 22 | } 23 | } 24 | 25 | filter { 26 | 27 | #Let's get rid of those header lines; they begin with a hash 28 | if [message] =~ /^#/ { 29 | drop { } 30 | } 31 | 32 | #Now, using the csv filter, we can define the Bro log fields 33 | if [type] == "bro-x509_log" { 34 | csv { 35 | 36 | #x509.log:#fields ts id certificate.version certificate.serial certificate.subject certificate.issuer certificate.not_valid_before certificate.not_valid_after certificate.key_alg certificate.sig_alg certificate.key_type certificate.key_length certificate.exponent certificate.curve san.dns san.uri san.email san.ip basic_constraints.ca basic_constraints.path_len 37 | columns => ["ts","id","certificate.version","certificate.serial","certificate.subject","icertificate.issuer","certificate.not_valid_before","certificate.not_valid_after","certificate.key_alg","certificate.sig_alg","certificate.key_type","certificate.key_length","certificate.exponent","certificate.curve","san.dns","san.uri","san.email","san.ip","basic_constraints.ca","basic_constraints.path_len"] 38 | 39 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 40 | separator => " " 41 | } 42 | 43 | #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively 44 | date { 45 | match => [ "ts", "UNIX" ] 46 | 47 | } 48 | 49 | mutate { 50 | rename => [ "certificate.version", "certificate_version" ] 51 | rename => [ "certificate.serial", "certificate_serial" ] 52 | rename => [ "certificate.subject", "certificate_subject" ] 53 | rename => [ "icertificate.issuer", "icertificate_issuer" ] 54 | rename => [ "certificate.not_valid_before", "certificate_not_valid_before" ] 55 | rename => [ "certificate.not_valid_after", "certificate_not_valid_after" ] 56 | rename => [ "certificate.key_alg", "certificate_key_alg" ] 57 | rename => [ "certificate.sig_alg", "certificate_sig_alg" ] 58 | rename => [ "certificate.key_type", "certificate_key_type" ] 59 | rename => [ "certificate.key_length", "certificate_key_length" ] 60 | rename => [ "certificate.exponent", "certificate_exponent" ] 61 | rename => [ "certificate.curve", "certificate_curve" ] 62 | rename => [ "san.dns", "san_dns" ] 63 | rename => [ "san.uri", "san_uri" ] 64 | rename => [ "san.email", "san_email" ] 65 | rename => [ "san.ip", "san_ip" ] 66 | rename => [ "basic_constraints.ca", "basic_constraints_ca" ] 67 | rename => [ "basic_constraints.path_len", "basic_constraints_path_len" ] 68 | } 69 | } 70 | } 71 | 72 | output { 73 | #stdout { codec => rubydebug } 74 | elasticsearch { hosts => localhost } 75 | } 76 | -------------------------------------------------------------------------------- /conf_files/log2timeline/logstash-log2timeline.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Log2timeline Output Files 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse already-created log2timeline supertimelines 9 | # 10 | # Limitations: This file will parse raw text, and there must be a delimiter provided if not the default comma 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "log2timeline-perl" 17 | start_position => "beginning" 18 | sincedb_path => "/dev/null" 19 | 20 | #Edit the following path to reflect the location of your timeline files. You can also change the extension if you use something else 21 | path => "/path/to/your/timelines/*.csv" 22 | } 23 | } 24 | 25 | filter { 26 | if [type] == "log2timeline-perl" { 27 | csv { 28 | columns => ["date","time","timezone","MACB","source","sourcetype","type","user","host","short","desc","version","filename","inode","notes","format","extra"] 29 | 30 | #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone. 31 | separator => "," 32 | } 33 | 34 | mutate { 35 | replace => [ "date" , "%{date} %{time}" ] 36 | } 37 | 38 | if [timezone] == "timezone" { 39 | drop { } 40 | } 41 | } 42 | } 43 | 44 | output { 45 | elasticsearch { 46 | embedded => true 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /conf_files/web_logs/logstash-apache-combined.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Combined Apache Files 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse combined Apache log files 9 | # 10 | # Limitations: This file will parse raw text, not .gz log files. For .gz files, utilize a 'tcp' input, and zcat the files to netcat 11 | # 12 | ####################### 13 | 14 | input { 15 | 16 | # Sitting file input. Comment out or delete to remove this feature 17 | file { 18 | type => "apache-combined" 19 | start_position => "beginning" 20 | sincedb_path => "/dev/null" 21 | 22 | #Edit the following line to reflect the location of your .log files 23 | path => "/path/to/*.log_files" 24 | } 25 | 26 | # Receive files via local tcp port, either via netcat or other transfer methods. Comment out or delete to remove this feature 27 | tcp { 28 | type => "apache-common" 29 | 30 | #Edit the following line to reflect your port of choice. Note that you should ignore ports 9200-9300 for ElasticSearch 31 | port => 54321 32 | } 33 | } 34 | 35 | filter { 36 | if [type] == "apache-combined" { 37 | grok { 38 | match => { "message" => "%{COMBINEDAPACHELOG}" } 39 | } 40 | 41 | date { 42 | match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] 43 | } 44 | } 45 | } 46 | 47 | output { 48 | elasticsearch { 49 | embedded => true 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /conf_files/web_logs/logstash-apache-common.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - Common Apache Files 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse common Apache log files 9 | # 10 | # Limitations: This file will parse raw text, not .gz log files. For .gz files, utilize a 'tcp' input, and zcat the files to netcat 11 | # 12 | # Thanks to @hiddenillusion for his help with the 'file' input, which allows parsing of sitting log files 13 | ####################### 14 | 15 | input { 16 | 17 | # Sitting file input. Comment out or delete to remove this feature 18 | file { 19 | type => "apache-common" 20 | start_position => "beginning" 21 | sincedb_path => "/dev/null" 22 | 23 | #Edit the following line to reflect the location of your .log files 24 | path => "/path/to/*.log_files" 25 | } 26 | 27 | # Receive files via local tcp port, either via netcat or other transfer methods. Comment out or delete to remove this feature 28 | tcp { 29 | type => "apache-common" 30 | 31 | #Edit the following line to reflect your port of choice. Note that you should ignore ports 9200-9300 for ElasticSearch 32 | port => 54321 33 | } 34 | } 35 | 36 | filter { 37 | if [type] == "apache-common" { 38 | grok { 39 | match => { "message" => "%{COMMONAPACHELOG}" } 40 | } 41 | 42 | date { 43 | match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] 44 | } 45 | } 46 | } 47 | 48 | output { 49 | elasticsearch { 50 | embedded => true 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /conf_files/web_logs/logstash-iis6.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - IIS6 Extended Log Files (Internet and Intranet) 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse IIS6 web log files. Note that IIS6 has different log formats based on intranet or internet; comment or remove what you don't need below 9 | # 10 | # Limitations: This file will parse raw text, and there must be a delimiter provided if not the default space 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "iis6-intranet" 17 | start_position => "beginning" 18 | sincedb_path => "/dev/null" 19 | 20 | path => "/path/to/iis6-intranet-logs/*.log" 21 | } 22 | 23 | file { 24 | type => "iis6-internet" 25 | start_position => "beginning" 26 | sincedb_path => "/dev/null" 27 | 28 | path => "/path/to/iis6-internet-logs/*.log" 29 | } 30 | } 31 | 32 | filter { 33 | if ([message] =~ /^#/) { 34 | drop { } 35 | } 36 | 37 | if [type] == "iis6-intranet" { 38 | grok { 39 | match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{IP:client_IP} %{WORD:username} %{IP:source_IP} %{NUMBER:port} %{WORD:method} %{URI:URI_Stem} %{GREEDYDATA:URL_Query} %{NUMBER:Status} %{GREEDYDATA.User-Agent}" ] 40 | } 41 | } 42 | 43 | if [type] == "iis6-internet" { 44 | grok { 45 | match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{IP:client_IP} %{WORD:username} %{IP:source_IP} %{NUMBER:port} %{WORD:method} %{URI:URI_Stem} %{GREEDYDATA:URI_Query} %{NUMBER:Status} %{NUMBER:Server_Bytes} %{NUMBER:Client_Bytes} %{NUMBER:Time_Taken} %{GREEDYDATA.User-Agent} %{GREEDYDATA:Referrer" ] 46 | } 47 | } 48 | } 49 | 50 | output { 51 | elasticsearch { 52 | embedded => true 53 | } 54 | } -------------------------------------------------------------------------------- /conf_files/web_logs/logstash-iis7.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - IIS 7 Log Files 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse IIS 7 web log files. 9 | # 10 | # Limitations: This file will parse raw text. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "iis7" 17 | start_position => "beginning" 18 | sincedb_path => "/dev/null" 19 | 20 | path => "/path/to/iis7-logs/*.log" 21 | } 22 | } 23 | 24 | filter { 25 | # The following lines remove any commented fields 26 | if ([message] =~ /^#/) { 27 | drop { } 28 | } 29 | 30 | if [type] == "iis7" { 31 | grok { 32 | match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{IP:source_IP} %{WORD:method} %{URI:URI_Stem} %{GREEDYDATA:URI_Query} %{NUMBER:port} %{WORD:username} %{IP:client_IP} %{GREEDYDATA.User-Agent} %{NUMBER:Status} % %{NUMBER:Sub-Status} %{NUMBER:Win32_Status} %{NUMBER:Time_Taken}" ] 33 | } 34 | } 35 | } 36 | 37 | output { 38 | elasticsearch { 39 | embedded => true 40 | } 41 | } -------------------------------------------------------------------------------- /conf_files/web_logs/logstash-iis8.conf: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Configuration Files - IIS 8 Log Files 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash, elasticsearch, and kibana to analyze logs 7 | # 8 | # Usage: Reference this config file for your instance of logstash to parse IIS 8 web log files. 9 | # 10 | # Limitations: This file will parse raw text. 11 | # 12 | ####################### 13 | 14 | input { 15 | file { 16 | type => "iis8" 17 | start_position => "beginning" 18 | sincedb_path => "/dev/null" 19 | 20 | path => "/path/to/iis8-logs/*.log" 21 | } 22 | } 23 | 24 | filter { 25 | # The following lines remove any commented fields 26 | if ([message] =~ /^#/) { 27 | drop { } 28 | } 29 | 30 | if [type] == "iis8" { 31 | grok { 32 | match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{WORD:Server_SiteName} %{WORD:ComputerName} %{IP:Server_IP} %{WORD:method} %{URI:URI_Stem} %{GREEDYDATA:URI_Query} %{NUMBER:port} %{WORD:username} %{IP:Client_IP} %{WORD:Version} %{GREEDYDATA.User-Agent} %{GREEDYDATA:Cookie} %{WORD:Referer} %{WORD:Host} %{NUMBER:Status} % %{NUMBER:Sub-Status} %{NUMBER:Win32_Status} %{NUMBER:Bytes_Received} %{NUMBER:Bytes_Sent} %{NUMBER:Time_Taken}" ] 33 | } 34 | } 35 | } 36 | 37 | output { 38 | elasticsearch { 39 | embedded => true 40 | } 41 | } -------------------------------------------------------------------------------- /dictionaries/logstash-bro-conn-log.dict: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Lookup Dictionaries - Bro IDS Conn Log Status Codes 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash to translate Bro IDS conn log codes into text for analysis 7 | # 8 | # Usage: Insert this dictionary into your logstash configuration file; make sure to insert source and destination fields in the correct places 9 | # 10 | # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these. 11 | # 12 | ####################### 13 | 14 | translate { 15 | #Insert the source field below between the quotes 16 | field => "" 17 | 18 | #Insert the destination field below between the quotes. This is a new field, so it can be any name you want 19 | destination => "" 20 | 21 | dictionary => [ 22 | "S0", "Connection attempt seen, no reply", 23 | "S1", "Connection established, not terminated", 24 | "S2", "Connection established and close attempt by originator seen (but no reply from responder)", 25 | "S3", "Connection established and close attempt by responder seen (but no reply from originator)", 26 | "SF", "Normal SYN/FIN completion", 27 | "REJ", "Connection attempt rejected", 28 | "RSTO", "Connection established, originator aborted (sent a RST)", 29 | "RSTR", "Established, responder aborted", 30 | "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder", 31 | "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator", 32 | "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)", 33 | "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator", 34 | "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)" 35 | ] 36 | } -------------------------------------------------------------------------------- /dictionaries/logstash-ftp-status-codes.dict: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Lookup Dictionaries - FTP Status Codes 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash to translate FTP status codes into text for analysis 7 | # 8 | # Usage: Insert this dictionary into your logstash configuration file; make sure to insert source and destination fields in the correct places 9 | # 10 | # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these. 11 | # 12 | # Also note that the dictionary entries below are integers (numbers are not encapsulated with quotes). Therefore, the type must be converted to integer within Logstash/Elasticsearch. 13 | # 14 | ####################### 15 | 16 | translate { 17 | #Insert the source field below between the quotes 18 | field => "" 19 | 20 | #Insert the destination field below between the quotes. This is a new field, so it can be any name you want 21 | destination => "" 22 | 23 | dictionary => [ 24 | #1xx Codes are Preliminary Positive Replies 25 | 110, "Restart marker replay", 26 | 120, "Service ready in n minutes", 27 | 125, "Data connection is already open", 28 | 150, "File status is OK; opening data connection" 29 | 30 | #2xx Codes are Positive Completion Replies 31 | 202, "OK", 32 | 211, "System status or system help reply", 33 | 212, "Directory status", 34 | 213, "File status", 35 | 214, "Help message", 36 | 215, "NAME system type", 37 | 220, "Service is ready for new user", 38 | 221, "Service closing control connection", 39 | 225, "Data connection open; no transfer in progress", 40 | 226, "Closing data connection; request successful", 41 | 227, "Entering passive mode", 42 | 228, "Entering Long Passive Mode", 43 | 229, "Entering Extended Passive Mode", 44 | 230, "User is logged in", 45 | 231, "User is logged out", 46 | 232, "Logout will occur when transfer is complete", 47 | 250, "Requested file action OK, completed", 48 | 257, "Path created", 49 | 50 | #3xx Codes are Positive Intermediate Replies 51 | 331, "User name OK; need password", 52 | 332, "Login account required", 53 | 350, "Requested action pending further information", 54 | 55 | #4xx Codes are Transient Negative Completion Replies 56 | 421, "Service not available, closing connection", 57 | 425, "Can't open connection", 58 | 426, "Connection closed; transfer aborted", 59 | 430, "Invalid username or password", 60 | 434, "Requested host is unavailable", 61 | 450, "Requested file action not performed", 62 | 451, "Requested action aborted; local processing error", 63 | 452, "Requested action aborted; insufficient storage space", 64 | 65 | #5xx Codes are Permanent Negative Completion Replies 66 | 501, "Syntax error in parameters or arguments", 67 | 502, "Command not implemented", 68 | 503, "Bad sequence of commands", 69 | 504, "Command not implemented for that parameter", 70 | 530, "User not logged in", 71 | 532, "Need account for storing files", 72 | 550, "Requested action aborted; file unavailable", 73 | 551, "Requested action aborted; page type unknown", 74 | 552, "Requested action aborted; exceeded storage allocation", 75 | 553, "Requested action aborted; file name not allowed", 76 | 77 | #6xx Codes are Protected Replies. These are typically base64 encoded 78 | 631, "Integrity protected reply", 79 | 632, "Confidentiality and integrity protected reply", 80 | 633, "Confidentiality protected reply", 81 | 82 | #100xx Codes are Winsock Error Codes 83 | 10054, "Connection reset by peer; forcibly closed by remote host", 84 | 10060, "Cannot connect to remote server", 85 | 10061, "Cannot connect to remote server; connection is actively refused", 86 | 10066, "Directory not empty", 87 | 10068, "Too many users; server is full" 88 | ] 89 | } 90 | -------------------------------------------------------------------------------- /dictionaries/logstash-http-status-codes.dict: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Lookup Dictionaries - HTTP Status Codes 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash to translate HTTP status codes into text for analysis 7 | # 8 | # Usage: Insert this dictionary into your logstash configuration file; make sure to insert source and destination fields in the correct places 9 | # 10 | # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these. 11 | # 12 | # Also note that the dictionary entries below are integers (numbers are not encapsulated with quotes). Therefore, the type must be converted to integer within Logstash/Elasticsearch. 13 | # 14 | ####################### 15 | 16 | translate { 17 | #Insert the source field below between the quotes 18 | field => "" 19 | 20 | #Insert the destination field below between the quotes. This is a new field, so it can be any name you want 21 | destination => "" 22 | 23 | dictionary => [ 24 | #1xx Codes Are Informational 25 | 100, "Continue", 26 | 101, "Switching Protocols", 27 | 102, "Processing", 28 | 29 | #2xx Codes Are Successful; We Like These 30 | 200, "OK", 31 | 201, "Created", 32 | 202, "Accepted", 33 | 203, "Non-Authoritative Information", 34 | 204, "No Content", 35 | 205, "Reset Content", 36 | 206, "Partial Content", 37 | 207, "Multi-Status", 38 | 208, "Already Reported", 39 | 226, "Instance Manipulation Used", 40 | 41 | #3xx Codes Indicate that Further User Agent Action May be Needed 42 | 300, "Multiple Choices", 43 | 301, "Moved Permanently", 44 | 302, "Not Found", 45 | 303, "See Other", 46 | 304, "Not Modified", 47 | 305, "Use Proxy", 48 | #306 is no longer used, however is still reserved 49 | 307, "Temporary Redirect", 50 | 308, "Permanent Redirect", 51 | 52 | #4xx Codes Refer to Client Errors 53 | 400, "Bad Request", 54 | 401, "Unauthorized", 55 | 402, "Payment Required", 56 | 403, "Forbidden", 57 | 404, "Not Found", 58 | 405, "Method Not Allowed", 59 | 406, "Not Acceptable", 60 | 407, "Proxy Authentication Required", 61 | 408, "Request Timeout", 62 | 409, "Conflict", 63 | 410, "Gone", 64 | 411, "Length Required", 65 | 412, "Precondition Failed", 66 | 413, "Request Entity Too Large", 67 | 414, "Request URI Too Long", 68 | 415, "Unsupported Media Type", 69 | 416, "Requested Range Not Satisfiable", 70 | 417, "Expectation Failed", 71 | 422, "Unprocessable Entity", 72 | 423, "Locked", 73 | 424, "Failed Dependency", 74 | 426, "Upgrade Required", 75 | 428, "Precondition Required", 76 | 429, "Too Many Requests", 77 | 431, "Request Header Fields Too Large", 78 | 440, "Login Timeout", 79 | 444, "No Response (This is specific to Nginx)", 80 | 450, "Blocked my Microsoft Windows Parental Controls", 81 | 82 | #5xx Codes Refer to Server Errors 83 | 500, "Internal Server Error", 84 | 501, "Not Implemented", 85 | 502, "Bad Gateway", 86 | 503, "Service Unavailable", 87 | 504, "Gateway Timeout", 88 | 505, "HTTP Version Not Supported", 89 | 506, "Variant Also Negotiates", 90 | 507, "Insufficient Storage", 91 | 508, "Loop Detected", 92 | 510, "Not Extended", 93 | 511, "Network Authentication Required" 94 | ] 95 | } 96 | -------------------------------------------------------------------------------- /grok_patterns/bro-ids: -------------------------------------------------------------------------------- 1 | ######################## 2 | # logstash Grok Pattern File - Bro IDS Logs 3 | # Created by 505Forensics (http://www.505forensics.com) 4 | # MIT License, so do what you want with it! 5 | # 6 | # For use with logstash to wrangle Bro IDS logs 7 | # 8 | # Usage: Reference this Grok Pattern File in your logstash config when importing Bro IDS logs 9 | # 10 | # Dependencies: Standard Bro log delimiter is tab. 11 | # 12 | ####################### 13 | 14 | #Bro Conn Logs 15 | BRO-CONN %{DATA:ts}\t%{DATA:uid}\t%{IP:id.orig_h}\t%{NUMBER:id.orig_p:int}\t%{IP:id.resp_h}\t%{NUMBER:id.resp_p:int}\t%{DATA:proto}\t%{DATA:service}\t%{DATA:duration}\t%{NUMBER:orig_bytes:int}\t%{NUMBER:resp_bytes:int}\t%{DATA:conn_state}\t%{DATA:local_orig}\t%{NUMBER:missed_bytes:int}\t%{DATA:history}\t%{NUMBER:orig_pkts:int}\t%{NUMBER:orig_ip_bytes:int}\t%{NUMBER:resp_pkts:int}\t%{NUMBER:resp_ip_bytes:int}\t%{DATA:tunnel_parents} -------------------------------------------------------------------------------- /type_mappings/log2timeline.type: -------------------------------------------------------------------------------- 1 | { 2 | "log2timeline": { 3 | "_timestamp": { 4 | "enabled": "true", 5 | "path": "timestamp", 6 | "default": "null" 7 | }, 8 | "_source": { 9 | "enabled": true 10 | }, 11 | "properties": { 12 | "timestamp": { 13 | "type": "date", 14 | "format" : "dateOptionalTime" 15 | }, 16 | "timezone": { 17 | "type": "string" 18 | }, 19 | "MACB": { 20 | "type": "string" 21 | }, 22 | "source": { 23 | "type": "string", 24 | "index": "not_analyzed" 25 | }, 26 | "sourcetype": { 27 | "type": "string", 28 | "index": "not_analyzed" 29 | }, 30 | "type": { 31 | "type": "string" 32 | }, 33 | "user": { 34 | "type": "string", 35 | "index": "not_analyzed" 36 | }, 37 | "host": { 38 | "type": "string", 39 | "index": "not_analyzed" 40 | }, 41 | "short": { 42 | "type": "string", 43 | "index": "not_analyzed" 44 | }, 45 | "desc": { 46 | "type": "string", 47 | "index": "not_analyzed" 48 | }, 49 | "version": { 50 | "type": "string" 51 | }, 52 | "filename": { 53 | "type": "string", 54 | "index": "not_analyzed" 55 | }, 56 | "inode": { 57 | "type": "long" 58 | }, 59 | "notes": { 60 | "type": "string", 61 | "store" : "no" 62 | }, 63 | "format": { 64 | "type": "string" 65 | }, 66 | "extra": { 67 | "type": "string", 68 | "store" : "no" 69 | } 70 | } 71 | } 72 | } -------------------------------------------------------------------------------- /type_mappings/mhn-hpfeed.type: -------------------------------------------------------------------------------- 1 | { 2 | "hpfeed": { 3 | "properties": { 4 | "channel": { 5 | "type": "string" 6 | }, 7 | "ident": { 8 | "type": "string" 9 | }, 10 | "last_error": { 11 | "type": "string" 12 | }, 13 | "last_error_timestamp": { 14 | "type": "date", 15 | "format": "dateOptionalTime" 16 | }, 17 | "normalized": { 18 | "type": "boolean" 19 | }, 20 | "payload": { 21 | "properties": { 22 | "commands": { 23 | "type": "string" 24 | }, 25 | "credentials": { 26 | "type": "string" 27 | }, 28 | "endTime": { 29 | "type": "date", 30 | "format": "dateOptionalTime" 31 | }, 32 | "hostIP": { 33 | "type": "ip", 34 | "null_value": "null" 35 | }, 36 | "hostPort": { 37 | "type": "long" 38 | }, 39 | "loggedin": { 40 | "type": "string" 41 | }, 42 | "peerIP": { 43 | "type": "ip", 44 | "null_value": "null" 45 | }, 46 | "peerPort": { 47 | "type": "long" 48 | }, 49 | "session": { 50 | "type": "string" 51 | }, 52 | "startTime": { 53 | "type": "date", 54 | "format": "dateOptionalTime" 55 | }, 56 | "ttylog": { 57 | "type": "string" 58 | }, 59 | "unknownCommands": { 60 | "type": "string" 61 | }, 62 | "version": { 63 | "type": "string" 64 | } 65 | } 66 | }, 67 | "timestamp": { 68 | "type": "date", 69 | "format": "dateOptionalTime" 70 | } 71 | } 72 | } 73 | } --------------------------------------------------------------------------------