├── .travis.yml ├── .gitignore ├── patterns ├── legacy │ ├── mcollective │ ├── maven │ ├── postgresql │ ├── ruby │ ├── mcollective-patterns │ ├── redis │ ├── bind │ ├── squid │ ├── mongodb │ ├── rails │ ├── httpd │ ├── linux-syslog │ ├── junos │ ├── java │ ├── exim │ ├── aws │ ├── bro │ ├── haproxy │ ├── bacula │ ├── grok-patterns │ ├── nagios │ └── firewalls └── ecs-v1 │ ├── maven │ ├── postgresql │ ├── ruby │ ├── mcollective │ ├── redis │ ├── squid │ ├── mongodb │ ├── bind │ ├── rails │ ├── linux-syslog │ ├── httpd │ ├── junos │ ├── exim │ ├── java │ ├── haproxy │ ├── bro │ ├── aws │ ├── zeek │ ├── grok-patterns │ ├── bacula │ └── nagios ├── Rakefile ├── NOTICE.TXT ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── ISSUE_TEMPLATE.md └── CONTRIBUTING.md ├── Gemfile ├── lib └── logstash │ └── patterns │ └── core.rb ├── spec ├── patterns │ ├── mcollective_spec.rb │ ├── maven_spec.rb │ ├── rails3_spec.rb │ ├── bind_spec.rb │ ├── shorewall_spec.rb │ ├── junos_spec.rb │ ├── netscreen_spec.rb │ ├── squid_spec.rb │ ├── mongodb_spec.rb │ ├── redis_spec.rb │ ├── exim_spec.rb │ └── bacula_spec.rb └── spec_helper.rb ├── logstash-patterns-core.gemspec ├── CONTRIBUTORS ├── README.md ├── CHANGELOG.md └── LICENSE /.travis.yml: -------------------------------------------------------------------------------- 1 | import: 2 | - logstash-plugins/.ci:travis/travis.yml@1.x -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.gem 2 | Gemfile.lock 3 | .bundle 4 | /.buildpath 5 | /.project 6 | -------------------------------------------------------------------------------- /patterns/legacy/mcollective: -------------------------------------------------------------------------------- 1 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: 2 | -------------------------------------------------------------------------------- /patterns/ecs-v1/maven: -------------------------------------------------------------------------------- 1 | MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))? 2 | -------------------------------------------------------------------------------- /patterns/legacy/maven: -------------------------------------------------------------------------------- 1 | MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))? 2 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | @files=[] 2 | 3 | task :default do 4 | system("rake -T") 5 | end 6 | 7 | require "logstash/devutils/rake" 8 | -------------------------------------------------------------------------------- /NOTICE.TXT: -------------------------------------------------------------------------------- 1 | Elasticsearch 2 | Copyright 2012-2015 Elasticsearch 3 | 4 | This product includes software developed by The Apache Software 5 | Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thanks for contributing to Logstash! If you haven't already signed our CLA, here's a handy link: https://www.elastic.co/contributor-agreement/ 2 | -------------------------------------------------------------------------------- /patterns/legacy/postgresql: -------------------------------------------------------------------------------- 1 | # Default postgresql pg_log format pattern 2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} 3 | 4 | -------------------------------------------------------------------------------- /patterns/legacy/ruby: -------------------------------------------------------------------------------- 1 | RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) 2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message} 3 | -------------------------------------------------------------------------------- /patterns/ecs-v1/postgresql: -------------------------------------------------------------------------------- 1 | # Default postgresql pg_log format pattern 2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ:[event][timezone]} %{DATA:[user][name]} %{GREEDYDATA:[postgresql][log][connection_id]} %{POSINT:[process][pid]:int} 3 | -------------------------------------------------------------------------------- /patterns/ecs-v1/ruby: -------------------------------------------------------------------------------- 1 | RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) 2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:int}\] *%{RUBY_LOGLEVEL:[log][level]} -- +%{DATA:[process][name]}: %{GREEDYDATA:message} 3 | -------------------------------------------------------------------------------- /patterns/legacy/mcollective-patterns: -------------------------------------------------------------------------------- 1 | # Remember, these can be multi-line events. 2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level} 3 | 4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: 5 | -------------------------------------------------------------------------------- /patterns/ecs-v1/mcollective: -------------------------------------------------------------------------------- 1 | # Remember, these can be multi-line events. 2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:int}\]%{SPACE}%{LOGLEVEL:[log][level]} 3 | 4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: 5 | -------------------------------------------------------------------------------- /patterns/legacy/redis: -------------------------------------------------------------------------------- 1 | REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} 2 | REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} \* 3 | REDISMONLOG %{NUMBER:timestamp} \[%{INT:database} %{IP:client}:%{NUMBER:port}\] "%{WORD:command}"\s?%{GREEDYDATA:params} 4 | -------------------------------------------------------------------------------- /patterns/legacy/bind: -------------------------------------------------------------------------------- 1 | BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME} 2 | 3 | BIND9 %{BIND9_TIMESTAMP:timestamp} queries: %{LOGLEVEL:loglevel}: client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:clientip}#%{POSINT:clientport} \(%{GREEDYDATA:query}\): query: %{GREEDYDATA:query} IN %{GREEDYDATA:querytype} \(%{IP:dns}\) 4 | -------------------------------------------------------------------------------- /patterns/ecs-v1/redis: -------------------------------------------------------------------------------- 1 | REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} 2 | REDISLOG \[%{POSINT:[process][pid]:int}\] %{REDISTIMESTAMP:timestamp} \* 3 | REDISMONLOG %{NUMBER:timestamp} \[%{INT:[redis][database][id]} %{IP:[client][ip]}:%{POSINT:[client][port]:int}\] "%{WORD:[redis][command][name]}"\s?%{GREEDYDATA:[redis][command][args]} 4 | -------------------------------------------------------------------------------- /patterns/legacy/squid: -------------------------------------------------------------------------------- 1 | # Pattern squid3 2 | # Documentation of squid3 logs formats can be found at the following link: 3 | # http://wiki.squid-cache.org/Features/LogFormat 4 | SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:duration}\s%{IP:client_address}\s%{WORD:cache_result}/%{NONNEGINT:status_code}\s%{NUMBER:bytes}\s%{WORD:request_method}\s%{NOTSPACE:url}\s(%{NOTSPACE:user}|-)\s%{WORD:hierarchy_code}/(%{IPORHOST:server}|-)\s%{NOTSPACE:content_type} 5 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gemspec 4 | 5 | logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash" 6 | use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1" 7 | 8 | if Dir.exist?(logstash_path) && use_logstash_source 9 | gem 'logstash-core', :path => "#{logstash_path}/logstash-core" 10 | gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api" 11 | end 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please post all product and debugging questions on our [forum](https://discuss.elastic.co/c/logstash). Your questions will reach our wider community members there, and if we confirm that there is a bug, then we can open a new issue here. 2 | 3 | For all general issues, please provide the following details for fast resolution: 4 | 5 | - Version: 6 | - Operating System: 7 | - Config File (if you have sensitive info, please remove it): 8 | - Sample Data: 9 | - Steps to Reproduce: 10 | -------------------------------------------------------------------------------- /lib/logstash/patterns/core.rb: -------------------------------------------------------------------------------- 1 | module LogStash 2 | module Patterns 3 | module Core 4 | extend self 5 | 6 | BASE_PATH = ::File.expand_path('../../../patterns', ::File.dirname(__FILE__)) 7 | private_constant :BASE_PATH 8 | 9 | def path(type = 'legacy') 10 | case type = type.to_s 11 | when 'legacy', 'ecs-v1' 12 | ::File.join(BASE_PATH, type) 13 | else 14 | raise ArgumentError, "#{type.inspect} path not supported" 15 | end 16 | end 17 | 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /patterns/legacy/mongodb: -------------------------------------------------------------------------------- 1 | MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message} 2 | MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} 3 | MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ (?[0-9]+)ms 4 | MONGO_WORDDASH \b[\w-]+\b 5 | MONGO3_SEVERITY \w 6 | MONGO3_COMPONENT %{WORD}|- 7 | MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:severity} %{MONGO3_COMPONENT:component}%{SPACE}(?:\[%{DATA:context}\])? %{GREEDYDATA:message} 8 | -------------------------------------------------------------------------------- /patterns/ecs-v1/squid: -------------------------------------------------------------------------------- 1 | # Pattern squid3 2 | # Documentation of squid3 logs formats can be found at the following link: 3 | # http://wiki.squid-cache.org/Features/LogFormat 4 | SQUID3_STATUS (?:%{POSINT:[http][response][status_code]:int}|0|000) 5 | SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:[squid][request][duration]:int}\s%{IP:[source][ip]}\s%{WORD:[event][action]}/%{SQUID3_STATUS}\s%{INT:[http][response][bytes]:int}\s%{WORD:[http][request][method]}\s%{NOTSPACE:[url][original]}\s(?:-|%{NOTSPACE:[user][name]})\s%{WORD:[squid][hierarchy_code]}/(?:-|%{IPORHOST:[destination][address]})\s(?:-|%{NOTSPACE:[http][response][mime_type]}) 6 | # :long - %{INT:[http][response][bytes]:int} 7 | -------------------------------------------------------------------------------- /patterns/ecs-v1/mongodb: -------------------------------------------------------------------------------- 1 | MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:[mongodb][component]}\] %{GREEDYDATA:message} 2 | MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} 3 | MONGO_SLOWQUERY %{WORD:[mongodb][profile][op]} %{MONGO_WORDDASH:[mongodb][database]}\.%{MONGO_WORDDASH:[mongodb][collection]} %{WORD}: %{MONGO_QUERY:[mongodb][query][original]} ntoreturn:%{NONNEGINT:[mongodb][profile][ntoreturn]:int} ntoskip:%{NONNEGINT:[mongodb][profile][ntoskip]:int} nscanned:%{NONNEGINT:[mongodb][profile][nscanned]:int}.*? nreturned:%{NONNEGINT:[mongodb][profile][nreturned]:int}.*? %{INT:[mongodb][profile][duration]:int}ms 4 | MONGO_WORDDASH \b[\w-]+\b 5 | MONGO3_SEVERITY \w 6 | MONGO3_COMPONENT %{WORD} 7 | MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:[log][level]} (?:-|%{MONGO3_COMPONENT:[mongodb][component]})%{SPACE}(?:\[%{DATA:[mongodb][context]}\])? %{GREEDYDATA:message} 8 | -------------------------------------------------------------------------------- /patterns/legacy/rails: -------------------------------------------------------------------------------- 1 | RUUID \h{32} 2 | # rails controller with action 3 | RCONTROLLER (?[^#]+)#(?\w+) 4 | 5 | # this will often be the only line: 6 | RAILS3HEAD (?m)Started %{WORD:verb} "%{URIPATHPARAM:request}" for %{IPORHOST:clientip} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}) 7 | # for some a strange reason, params are stripped of {} - not sure that's a good idea. 8 | RPROCESSING \W*Processing by %{RCONTROLLER} as (?\S+)(?:\W*Parameters: {%{DATA:params}}\W*)? 9 | RAILS3FOOT Completed %{NUMBER:response}%{DATA} in %{NUMBER:totalms}ms %{RAILS3PROFILE}%{GREEDYDATA} 10 | RAILS3PROFILE (?:\(Views: %{NUMBER:viewms}ms \| ActiveRecord: %{NUMBER:activerecordms}ms|\(ActiveRecord: %{NUMBER:activerecordms}ms)? 11 | 12 | # putting it all together 13 | RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?(?:%{DATA}\n)*)(?:%{RAILS3FOOT})? 14 | -------------------------------------------------------------------------------- /patterns/ecs-v1/bind: -------------------------------------------------------------------------------- 1 | BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME} 2 | 3 | BIND9_DNSTYPE (?:A|AAAA|CAA|CDNSKEY|CDS|CERT|CNAME|CSYNC|DLV|DNAME|DNSKEY|DS|HINFO|LOC|MX|NAPTR|NS|NSEC|NSEC3|OPENPGPKEY|PTR|RRSIG|RP|SIG|SMIMEA|SOA|SRV|TSIG|TXT|URI) 4 | BIND9_CATEGORY (?:queries) 5 | 6 | # dns.question.class is static - only 'IN' is supported by Bind9 7 | # bind.log.question.name is expected to be a 'duplicate' (same as the dns.question.name capture) 8 | BIND9_QUERYLOGBASE client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:[client][ip]}#%{POSINT:[client][port]:int} \(%{GREEDYDATA:[bind][log][question][name]}\): query: %{GREEDYDATA:[dns][question][name]} (?<[dns][question][class]>IN) %{BIND9_DNSTYPE:[dns][question][type]}(:? %{DATA:[bind][log][question][flags]})? \(%{IP:[server][ip]}\) 9 | 10 | # for query-logging category and severity are always fixed as "queries: info: " 11 | BIND9_QUERYLOG %{BIND9_TIMESTAMP:timestamp} %{BIND9_CATEGORY:[bind][log][category]}: %{LOGLEVEL:[log][level]}: %{BIND9_QUERYLOGBASE} 12 | 13 | BIND9 %{BIND9_QUERYLOG} 14 | -------------------------------------------------------------------------------- /spec/patterns/mcollective_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "MCOLLECTIVE", ['legacy', 'ecs-v1'] do 6 | 7 | let(:message) { "I, [2010-12-29T11:15:32.321744 #11479] INFO -- : mcollectived:33 The Marionette Collective 1.1.0 started logging at info level" } 8 | 9 | it do 10 | should include("timestamp" => "2010-12-29T11:15:32.321744") 11 | end 12 | 13 | it do 14 | if ecs_compatibility? 15 | should include("process" => { "pid" => 11479 }) 16 | else 17 | should include("pid" => "11479") 18 | end 19 | end 20 | 21 | it do 22 | if ecs_compatibility? 23 | should include("log" => hash_including("level" => "INFO")) 24 | else 25 | should include("event_level" => "INFO") 26 | end 27 | end 28 | 29 | # NOTE: pattern seems unfinished - missing match of remaining message 30 | it 'should have extracted message' do 31 | # but did not : 32 | expect( subject['message'] ).to eql message 33 | end 34 | 35 | end 36 | -------------------------------------------------------------------------------- /patterns/legacy/httpd: -------------------------------------------------------------------------------- 1 | HTTPDUSER %{EMAILADDRESS}|%{USER} 2 | HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} 3 | 4 | # Log formats 5 | HTTPD_COMMONLOG %{IPORHOST:clientip} %{HTTPDUSER:ident} %{HTTPDUSER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" (?:-|%{NUMBER:response}) (?:-|%{NUMBER:bytes}) 6 | HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} %{QS:referrer} %{QS:agent} 7 | 8 | # Error logs 9 | HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:message} 10 | HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:module})?:%{LOGLEVEL:loglevel}\] \[pid %{POSINT:pid}(:tid %{NUMBER:tid})?\]( \(%{POSINT:proxy_errorcode}\)%{DATA:proxy_message}:)?( \[client %{IPORHOST:clientip}:%{POSINT:clientport}\])?( %{DATA:errorcode}:)? %{GREEDYDATA:message} 11 | HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} 12 | 13 | # Deprecated 14 | COMMONAPACHELOG %{HTTPD_COMMONLOG} 15 | COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG} 16 | -------------------------------------------------------------------------------- /patterns/legacy/linux-syslog: -------------------------------------------------------------------------------- 1 | SYSLOG5424PRINTASCII [!-~]+ 2 | 3 | SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource}+(?: %{SYSLOGPROG}:|) 4 | SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})? 5 | 6 | CRON_ACTION [A-Z ]+ 7 | CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\) 8 | 9 | SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} 10 | 11 | # IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) 12 | SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}> 13 | SYSLOG5424SD \[%{DATA}\]+ 14 | SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{IPORHOST:syslog5424_host}|-) +(-|%{SYSLOG5424PRINTASCII:syslog5424_app}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_proc}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_msgid}) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|) 15 | 16 | SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg} 17 | -------------------------------------------------------------------------------- /patterns/legacy/junos: -------------------------------------------------------------------------------- 1 | # JUNOS 11.4 RT_FLOW patterns 2 | RT_FLOW_EVENT (RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) 3 | 4 | RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} \d+\(%{DATA:sent}\) \d+\(%{DATA:received}\) %{INT:elapsed-time} .* 5 | 6 | RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .* 7 | 8 | RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .* 9 | 10 | -------------------------------------------------------------------------------- /patterns/ecs-v1/rails: -------------------------------------------------------------------------------- 1 | RUUID \h{32} 2 | # rails controller with action 3 | RCONTROLLER (?<[rails][controller][class]>[^#]+)#(?<[rails][controller][action]>\w+) 4 | 5 | # this will often be the only line: 6 | RAILS3HEAD (?m)Started %{WORD:[http][request][method]} "%{URIPATHPARAM:[url][original]}" for %{IPORHOST:[source][address]} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}) 7 | # for some a strange reason, params are stripped of {} - not sure that's a good idea. 8 | RPROCESSING \W*Processing by %{RCONTROLLER} as (?<[rails][request][format]>\S+)(?:\W*Parameters: {%{DATA:[rails][request][params]}}\W*)? 9 | RAILS3FOOT Completed %{POSINT:[http][response][status_code]:int}%{DATA} in %{NUMBER:[rails][request][duration][total]:float}ms %{RAILS3PROFILE}%{GREEDYDATA} 10 | RAILS3PROFILE (?:\(Views: %{NUMBER:[rails][request][duration][view]:float}ms \| ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms|\(ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms)? 11 | 12 | # putting it all together 13 | RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?<[rails][request][explain][original]>(?:%{DATA}\n)*)(?:%{RAILS3FOOT})? 14 | -------------------------------------------------------------------------------- /patterns/ecs-v1/linux-syslog: -------------------------------------------------------------------------------- 1 | SYSLOG5424PRINTASCII [!-~]+ 2 | 3 | SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp})(?: %{SYSLOGFACILITY})?(?: %{SYSLOGHOST:[host][hostname]})?(?: %{SYSLOGPROG}:)? 4 | SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:[system][auth][pam][module]}\(%{DATA:[system][auth][pam][origin]}\): session %{WORD:[system][auth][pam][session_state]} for user %{USERNAME:[user][name]}(?: by %{GREEDYDATA})? 5 | 6 | CRON_ACTION [A-Z ]+ 7 | CRONLOG %{SYSLOGBASE} \(%{USER:[user][name]}\) %{CRON_ACTION:[system][cron][action]} \(%{DATA:message}\) 8 | 9 | SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} 10 | 11 | # IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) 12 | SYSLOG5424PRI <%{NONNEGINT:[log][syslog][priority]:int}> 13 | SYSLOG5424SD \[%{DATA}\]+ 14 | SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:[system][syslog][version]} +(?:-|%{TIMESTAMP_ISO8601:timestamp}) +(?:-|%{IPORHOST:[host][hostname]}) +(?:-|%{SYSLOG5424PRINTASCII:[process][name]}) +(?:-|%{POSINT:[process][pid]:int}) +(?:-|%{SYSLOG5424PRINTASCII:[event][code]}) +(?:-|%{SYSLOG5424SD:[system][syslog][structured_data]})? 15 | 16 | SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:message} 17 | -------------------------------------------------------------------------------- /logstash-patterns-core.gemspec: -------------------------------------------------------------------------------- 1 | Gem::Specification.new do |s| 2 | 3 | s.name = 'logstash-patterns-core' 4 | s.version = '4.3.4' 5 | s.licenses = ['Apache License (2.0)'] 6 | s.summary = "Patterns to be used in logstash" 7 | s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program" 8 | s.authors = ["Elastic"] 9 | s.email = 'info@elastic.co' 10 | s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html" 11 | s.require_paths = ["lib"] 12 | 13 | # Files 14 | s.files = Dir['lib/**/*','spec/**/*','patterns/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT'] 15 | 16 | # Tests 17 | s.test_files = s.files.grep(%r{^(test|spec|features)/}) 18 | 19 | # Special flag to let us know this is actually a logstash plugin 20 | s.metadata = { "logstash_plugin" => "true" } 21 | 22 | # Gem dependencies 23 | s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99" 24 | 25 | s.add_development_dependency "logstash-devutils" 26 | s.add_development_dependency "logstash-filter-grok" 27 | end 28 | 29 | -------------------------------------------------------------------------------- /patterns/legacy/java: -------------------------------------------------------------------------------- 1 | JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* 2 | #Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' 3 | JAVAFILE (?:[a-zA-Z$_0-9. -]+) 4 | #Allow special , methods 5 | JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*) 6 | #Line number is optional in special cases 'Native method' or 'Unknown source' 7 | JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\) 8 | # Java Logs 9 | JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) 10 | JAVALOGMESSAGE (.*) 11 | # MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM 12 | CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM) 13 | # yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800 14 | TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE} 15 | CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage} 16 | # 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened... 17 | TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage} 18 | -------------------------------------------------------------------------------- /patterns/legacy/exim: -------------------------------------------------------------------------------- 1 | EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} 2 | EXIM_FLAGS (<=|[-=>*]>|[*]{2}|==) 3 | EXIM_DATE %{YEAR:exim_year}-%{MONTHNUM:exim_month}-%{MONTHDAY:exim_day} %{TIME:exim_time} 4 | EXIM_PID \[%{POSINT:pid}\] 5 | EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) 6 | EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) 7 | EXIM_REMOTE_HOST (H=(%{NOTSPACE:remote_hostname} )?(\(%{NOTSPACE:remote_heloname}\) )?\[%{IP:remote_host}\])(?::%{POSINT:remote_port})? 8 | EXIM_INTERFACE (I=\[%{IP:exim_interface}\](:%{NUMBER:exim_interface_port})) 9 | EXIM_PROTOCOL (P=%{NOTSPACE:protocol}) 10 | EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size}) 11 | EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id}) 12 | EXIM_SUBJECT (T=%{QS:exim_subject}) 13 | 14 | EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=%{NOTSPACE}) 15 | EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))* 16 | 17 | EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:exim_msgid} (?<=) (?[a-z:] )?%{EMAILADDRESS:exim_sender_email}%{EXIM_NAMED_FIELDS}(?: for %{EMAILADDRESS:exim_recipient_email})? 18 | 19 | EXIM %{EXIM_MESSAGE_ARRIVAL} 20 | -------------------------------------------------------------------------------- /patterns/ecs-v1/httpd: -------------------------------------------------------------------------------- 1 | HTTPDUSER %{EMAILADDRESS}|%{USER} 2 | HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} 3 | 4 | # Log formats 5 | HTTPD_COMMONLOG %{IPORHOST:[source][address]} (?:-|%{HTTPDUSER:[apache][access][user][identity]}) (?:-|%{HTTPDUSER:[user][name]}) \[%{HTTPDATE:timestamp}\] "(?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?|%{DATA})" (?:-|%{INT:[http][response][status_code]:int}) (?:-|%{INT:[http][response][body][bytes]:int}) 6 | # :long - %{INT:[http][response][body][bytes]:int} 7 | HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" 8 | 9 | # Error logs 10 | HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:[log][level]}\] (?:\[client %{IPORHOST:[source][address]}\] )?%{GREEDYDATA:message} 11 | HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:[apache][error][module]})?:%{LOGLEVEL:[log][level]}\] \[pid %{POSINT:[process][pid]:int}(:tid %{INT:[process][thread][id]:int})?\](?: \(%{POSINT:[apache][error][proxy][error][code]?}\)%{DATA:[apache][error][proxy][error][message]}:)?(?: \[client %{IPORHOST:[source][address]}(?::%{POSINT:[source][port]:int})?\])?(?: %{DATA:[error][code]}:)? %{GREEDYDATA:message} 12 | # :long - %{INT:[process][thread][id]:int} 13 | HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} 14 | 15 | # Deprecated 16 | COMMONAPACHELOG %{HTTPD_COMMONLOG} 17 | COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG} 18 | -------------------------------------------------------------------------------- /spec/patterns/maven_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe "MAVEN_VERSION" do 6 | 7 | let(:pattern) { 'MAVEN_VERSION' } 8 | 9 | context "when maven version is simple" do 10 | let(:value) { '1.1.0' } 11 | 12 | it "should match the version" do 13 | expect(grok_match(pattern,value)).to pass 14 | end 15 | end 16 | 17 | context "when maven version is a bit more complex" do 18 | let(:value) { '2.35.128' } 19 | 20 | it "should match the version" do 21 | expect(grok_match(pattern,value)).to pass 22 | end 23 | end 24 | 25 | context "when maven version contains release" do 26 | let(:value) { '1.1.0.RELEASE' } 27 | 28 | it "should match the version" do 29 | expect(grok_match(pattern,value)).to pass 30 | end 31 | end 32 | 33 | context "when maven version contains shapshot" do 34 | let(:value) { '1.1.0.SNAPSHOT' } 35 | 36 | it "should match the version" do 37 | expect(grok_match(pattern,value)).to pass 38 | end 39 | end 40 | 41 | context "when maven version contains release" do 42 | context "and the version contains a dash" do 43 | let(:value) { '1.1.0-RELEASE' } 44 | 45 | it "should match the version" do 46 | expect(grok_match(pattern,value)).to pass 47 | end 48 | end 49 | end 50 | 51 | context "when maven version contains shapshot" do 52 | context "and the version contains a dash" do 53 | let(:value) { '1.1.0-SNAPSHOT' } 54 | 55 | it "should match the version" do 56 | expect(grok_match(pattern,value)).to pass 57 | end 58 | end 59 | end 60 | 61 | end 62 | -------------------------------------------------------------------------------- /patterns/legacy/aws: -------------------------------------------------------------------------------- 1 | S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest}) 2 | 3 | S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id}) 4 | 5 | ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})? 6 | 7 | ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})? 8 | 9 | ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest}) 10 | 11 | ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(?:%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:int} (?:-|%{INT:backend_response:int}) %{INT:received_bytes:int} %{INT:bytes:int} "%{ELB_REQUEST_LINE}" 12 | 13 | CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{WORD:x_edge_location}\t(?:%{NUMBER:sc_bytes:int}|-)\t%{IPORHOST:clientip}\t%{WORD:cs_method}\t%{HOSTNAME:cs_host}\t%{NOTSPACE:cs_uri_stem}\t%{NUMBER:sc_status:int}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:agent}\t%{GREEDYDATA:cs_uri_query}\t%{GREEDYDATA:cookies}\t%{WORD:x_edge_result_type}\t%{NOTSPACE:x_edge_request_id}\t%{HOSTNAME:x_host_header}\t%{URIPROTO:cs_protocol}\t%{INT:cs_bytes:int}\t%{GREEDYDATA:time_taken:float}\t%{GREEDYDATA:x_forwarded_for}\t%{GREEDYDATA:ssl_protocol}\t%{GREEDYDATA:ssl_cipher}\t%{GREEDYDATA:x_edge_response_result_type} 14 | 15 | -------------------------------------------------------------------------------- /patterns/ecs-v1/junos: -------------------------------------------------------------------------------- 1 | # JUNOS 11.4 RT_FLOW patterns 2 | RT_FLOW_TAG (?:RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) 3 | # deprecated legacy name: 4 | RT_FLOW_EVENT RT_FLOW_TAG 5 | 6 | RT_FLOW1 %{RT_FLOW_TAG:[juniper][srx][tag]}: %{GREEDYDATA:[juniper][srx][reason]}: %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:int}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:int} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} \d+\(%{INT:[source][bytes]:int}\) \d+\(%{INT:[destination][bytes]:int}\) %{INT:[juniper][srx][elapsed_time]:int} .* 7 | # :long - %{INT:[source][bytes]:int} 8 | # :long - %{INT:[destination][bytes]:int} 9 | 10 | RT_FLOW2 %{RT_FLOW_TAG:[juniper][srx][tag]}: session created %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:int}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:int} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} .* 11 | 12 | RT_FLOW3 %{RT_FLOW_TAG:[juniper][srx][tag]}: session denied %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{INT:[network][iana_number]}\(\d\) %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} .* 13 | 14 | -------------------------------------------------------------------------------- /patterns/ecs-v1/exim: -------------------------------------------------------------------------------- 1 | EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} 2 | # <= message arrival 3 | # => normal message delivery 4 | # -> additional address in same delivery 5 | # *> delivery suppressed by -N 6 | # ** delivery failed; address bounced 7 | # == delivery deferred; temporary problem 8 | EXIM_FLAGS (?:<=|=>|->|\*>|\*\*|==|<>|>>) 9 | EXIM_DATE (:?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}) 10 | EXIM_PID \[%{POSINT:[process][pid]:int}\] 11 | EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) 12 | EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) 13 | EXIM_REMOTE_HOST (H=(%{NOTSPACE:[source][address]} )?(\(%{NOTSPACE:[exim][log][remote_address]}\) )?\[%{IP:[source][ip]}\](?::%{POSINT:[source][port]:int})?) 14 | EXIM_INTERFACE (I=\[%{IP:[destination][ip]}\](?::%{NUMBER:[destination][port]:int})) 15 | EXIM_PROTOCOL (P=%{NOTSPACE:[network][protocol]}) 16 | EXIM_MSG_SIZE (S=%{NUMBER:[exim][log][message][size]:int}) 17 | EXIM_HEADER_ID (id=%{NOTSPACE:[exim][log][header_id]}) 18 | EXIM_QUOTED_CONTENT (?:\\.|[^\\"])* 19 | EXIM_SUBJECT (T="%{EXIM_QUOTED_CONTENT:[exim][log][message][subject]}") 20 | 21 | EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=(?:%{QUOTEDSTRING}|%{NOTSPACE})) 22 | EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))* 23 | 24 | EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:[exim][log][message][id]} (?<[exim][log][flags]><=) (?<[exim][log][status]>[a-z:] )?%{EMAILADDRESS:[exim][log][sender][email]}%{EXIM_NAMED_FIELDS}(?:(?: from ?)? for %{EMAILADDRESS:[exim][log][recipient][email]})? 25 | 26 | EXIM %{EXIM_MESSAGE_ARRIVAL} 27 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | The following is a list of people who have contributed ideas, code, bug 2 | reports, or in general have helped logstash along its way. 3 | 4 | Contributors: 5 | * Aaron Blew (blewa) 6 | * Alexander Fortin (shaftoe) 7 | * Avishai Ish-Shalom (avishai-ish-shalom) 8 | * Brad Fritz (bfritz) 9 | * Brian DeFreitas (briandef) 10 | * Chris Mague (maguec) 11 | * Christian Häussler (cniweb) 12 | * Colin Surprenant (colinsurprenant) 13 | * Corry Haines (tabletcorry) 14 | * Dimitri Tischenko (timidri) 15 | * Dr. Alexander Papaspyrou (lxndrp) 16 | * Erez Zarum (erezzarum) 17 | * Eugen Dinca (decbis) 18 | * Félix Barbeira (fbarbeira) 19 | * Greg Brockman (gdb) 20 | * Greg Mefford (GregMefford) 21 | * Greg Swallow (gswallow) 22 | * Guillaume ESPANEL (quatre) 23 | * Hugo Lopes Tavares (hltbra) 24 | * Ilya Nazarov (nilya) 25 | * Jared Everett (jarsever) 26 | * Jelle Smet (smetj) 27 | * Jeremiah Shirk (jeremiahshirk) 28 | * Jordan Sissel (jordansissel) 29 | * Jose Armesto (fiunchinho) 30 | * Joseph Price (PriceChild) 31 | * Kevin Nuckolls (knuckolls) 32 | * Kurt Hurtado (kurtado) 33 | * Lorenzo González (logongas) 34 | * Marcus Pamelia (pamelia) 35 | * Matt Dainty (bodgit) 36 | * Matthew Baxa (mbaxa) 37 | * MikeSchuette 38 | * Nick Padilla (NickPadilla) 39 | * Olaf van Zandwijk (olafz) 40 | * Oluf Lorenzen (Finkregh) 41 | * Paul Myjavec (pmyjavec) 42 | * Pete Fritchman (fetep) 43 | * Peter Fern (pdf) 44 | * Philippe Weber (wiibaa) 45 | * Pier-Hugues Pellerin (ph) 46 | * Prune (prune998) 47 | * Richard Pijnenburg (electrical) 48 | * Robin Bowes (robinbowes) 49 | * Simon Mulser (simonmulser) 50 | * Tom Robinson (tjrobinson) 51 | * Tray (torrancew) 52 | * Vincent Bernat (vincentbernat) 53 | * olagache 54 | * xiaclo 55 | 56 | Note: If you've sent us patches, bug reports, or otherwise contributed to 57 | Logstash, and you aren't on the list above and want to be, please let us know 58 | and we'll make sure you're here. Contributions from folks like you are what make 59 | open source awesome. 60 | -------------------------------------------------------------------------------- /patterns/legacy/bro: -------------------------------------------------------------------------------- 1 | # https://www.bro.org/sphinx/script-reference/log-files.html 2 | 3 | # http.log 4 | BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types} 5 | 6 | # dns.log 7 | BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected} 8 | 9 | # conn.log 10 | BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents} 11 | 12 | # files.log 13 | BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted} 14 | -------------------------------------------------------------------------------- /patterns/ecs-v1/java: -------------------------------------------------------------------------------- 1 | JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* 2 | #Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' 3 | JAVAFILE (?:[a-zA-Z$_0-9. -]+) 4 | #Allow special , methods 5 | JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*) 6 | #Line number is optional in special cases 'Native method' or 'Unknown source' 7 | JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:[java][log][origin][class][name]}\.%{JAVAMETHOD:[log][origin][function]}\(%{JAVAFILE:[log][origin][file][name]}(?::%{INT:[log][origin][file][line]:int})?\) 8 | # Java Logs 9 | JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) 10 | JAVALOGMESSAGE (?:.*) 11 | 12 | # MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM 13 | # matches default logging configuration in Tomcat 4.1, 5.0, 5.5, 6.0, 7.0 14 | CATALINA7_DATESTAMP %{MONTH} %{MONTHDAY}, %{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} (?:AM|PM) 15 | CATALINA7_LOG %{CATALINA7_DATESTAMP:timestamp} %{JAVACLASS:[java][log][origin][class][name]}(?: %{JAVAMETHOD:[log][origin][function]})?\s*(?:%{LOGLEVEL:[log][level]}:)? %{JAVALOGMESSAGE:message} 16 | 17 | # 31-Jul-2020 16:40:38.578 in Tomcat 8.5/9.0 18 | CATALINA8_DATESTAMP %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} 19 | CATALINA8_LOG %{CATALINA8_DATESTAMP:timestamp} %{LOGLEVEL:[log][level]} \[%{DATA:[java][log][origin][thread][name]}\] %{JAVACLASS:[java][log][origin][class][name]}\.(?:%{JAVAMETHOD:[log][origin][function]})? %{JAVALOGMESSAGE:message} 20 | 21 | CATALINA_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP}) 22 | CATALINALOG (?:%{CATALINA8_LOG})|(?:%{CATALINA7_LOG}) 23 | 24 | # in Tomcat 5.5, 6.0, 7.0 it is the same as catalina.out logging format 25 | TOMCAT7_LOG %{CATALINA7_LOG} 26 | TOMCAT8_LOG %{CATALINA8_LOG} 27 | 28 | # NOTE: a weird log we started with - not sure what TC version this should match out of the box (due the | delimiters) 29 | TOMCATLEGACY_DATESTAMP %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}(?: %{ISO8601_TIMEZONE})? 30 | TOMCATLEGACY_LOG %{TOMCATLEGACY_DATESTAMP:timestamp} \| %{LOGLEVEL:[log][level]} \| %{JAVACLASS:[java][log][origin][class][name]} - %{JAVALOGMESSAGE:message} 31 | 32 | TOMCAT_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP})|(?:%{TOMCATLEGACY_DATESTAMP}) 33 | 34 | TOMCATLOG (?:%{TOMCAT8_LOG})|(?:%{TOMCAT7_LOG})|(?:%{TOMCATLEGACY_LOG}) 35 | -------------------------------------------------------------------------------- /patterns/legacy/haproxy: -------------------------------------------------------------------------------- 1 | ## These patterns were tested w/ haproxy-1.4.15 2 | 3 | ## Documentation of the haproxy log formats can be found at the following links: 4 | ## http://code.google.com/p/haproxy-docs/wiki/HTTPLogFormat 5 | ## http://code.google.com/p/haproxy-docs/wiki/TCPLogFormat 6 | 7 | HAPROXYTIME (?!<[0-9])%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?![0-9]) 8 | HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds} 9 | 10 | # Override these default patterns to parse out what is captured in your haproxy.cfg 11 | HAPROXYCAPTUREDREQUESTHEADERS %{DATA:captured_request_headers} 12 | HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers} 13 | 14 | # Example: 15 | # These haproxy config lines will add data to the logs that are captured 16 | # by the patterns below. Place them in your custom patterns directory to 17 | # override the defaults. 18 | # 19 | # capture request header Host len 40 20 | # capture request header X-Forwarded-For len 50 21 | # capture request header Accept-Language len 50 22 | # capture request header Referer len 200 23 | # capture request header User-Agent len 200 24 | # 25 | # capture response header Content-Type len 30 26 | # capture response header Content-Encoding len 10 27 | # capture response header Cache-Control len 200 28 | # capture response header Last-Modified len 200 29 | # 30 | # HAPROXYCAPTUREDREQUESTHEADERS %{DATA:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{DATA:request_header_referer}\|%{DATA:request_header_user_agent} 31 | # HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified} 32 | 33 | # parse a haproxy 'httplog' line 34 | HAPROXYHTTPBASE %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?"? 35 | 36 | HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{HAPROXYHTTPBASE} 37 | 38 | # parse a haproxy 'tcplog' line 39 | HAPROXYTCP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Logstash Plugin 2 | 3 | [![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-patterns-core.svg)](https://travis-ci.com/logstash-plugins/logstash-patterns-core) 4 | 5 | This plugin provides [pattern definitions][1] used by the [grok filter][2]. 6 | 7 | It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way. 8 | 9 | ## Documentation 10 | 11 | Logstash provides infrastructure to automatically generate documentation for this plugin. 12 | We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc 13 | and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/). 14 | 15 | - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive 16 | - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide 17 | 18 | ## Need Help? 19 | 20 | Need help? Try https://discuss.elastic.co/c/logstash discussion forum. 21 | 22 | ## Developing 23 | 24 | ### 1. Plugin Developement and Testing 25 | 26 | #### Code 27 | 28 | - Install dependencies 29 | ```sh 30 | bundle install 31 | ``` 32 | 33 | #### Test 34 | 35 | - Update your dependencies 36 | 37 | ```sh 38 | bundle install 39 | ``` 40 | 41 | - Run tests 42 | 43 | ```sh 44 | bundle exec rspec 45 | ``` 46 | 47 | ### 2. Running your unpublished Plugin in Logstash 48 | 49 | #### 2.1 Run in a local Logstash clone 50 | 51 | - Edit Logstash `Gemfile` and add the local plugin path, for example: 52 | ```ruby 53 | gem "logstash-patterns-core", :path => "/your/local/logstash-patterns-core" 54 | ``` 55 | - Install plugin 56 | ```sh 57 | # Logstash 2.3 and higher 58 | bin/logstash-plugin install --no-verify 59 | ``` 60 | - Run Logstash with your plugin 61 | ```sh 62 | bin/logstash -e 'filter { grok { } }' 63 | ``` 64 | At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash. 65 | 66 | #### 2.2 Run in an installed Logstash 67 | 68 | You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using: 69 | 70 | - Build your plugin gem 71 | ```sh 72 | gem build logstash-patterns-core.gemspec 73 | ``` 74 | - Install the plugin from the Logstash home 75 | ```sh 76 | bin/logstash-plugin install --no-verify 77 | ``` 78 | - Start Logstash and proceed to test the plugin 79 | 80 | ## Contributing 81 | 82 | All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin. 83 | 84 | Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here. 85 | 86 | It is more important to the community that you are able to contribute. 87 | 88 | For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file. 89 | 90 | [1]: https://github.com/logstash-plugins/logstash-patterns-core/tree/main/patterns 91 | [2]: https://github.com/logstash-plugins/logstash-filter-grok 92 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Logstash 2 | 3 | All contributions are welcome: ideas, patches, documentation, bug reports, 4 | complaints, etc! 5 | 6 | Programming is not a required skill, and there are many ways to help out! 7 | It is more important to us that you are able to contribute. 8 | 9 | That said, some basic guidelines, which you are free to ignore :) 10 | 11 | ## Want to learn? 12 | 13 | Want to lurk about and see what others are doing with Logstash? 14 | 15 | * The irc channel (#logstash on irc.freenode.org) is a good place for this 16 | * The [forum](https://discuss.elastic.co/c/logstash) is also 17 | great for learning from others. 18 | 19 | ## Got Questions? 20 | 21 | Have a problem you want Logstash to solve for you? 22 | 23 | * You can ask a question in the [forum](https://discuss.elastic.co/c/logstash) 24 | * Alternately, you are welcome to join the IRC channel #logstash on 25 | irc.freenode.org and ask for help there! 26 | 27 | ## Have an Idea or Feature Request? 28 | 29 | * File a ticket on [GitHub](https://github.com/elastic/logstash/issues). Please remember that GitHub is used only for issues and feature requests. If you have a general question, the [forum](https://discuss.elastic.co/c/logstash) or IRC would be the best place to ask. 30 | 31 | ## Something Not Working? Found a Bug? 32 | 33 | If you think you found a bug, it probably is a bug. 34 | 35 | * If it is a general Logstash or a pipeline issue, file it in [Logstash GitHub](https://github.com/elasticsearch/logstash/issues) 36 | * If it is specific to a plugin, please file it in the respective repository under [logstash-plugins](https://github.com/logstash-plugins) 37 | * or ask the [forum](https://discuss.elastic.co/c/logstash). 38 | 39 | # Contributing Documentation and Code Changes 40 | 41 | If you have a bugfix or new feature that you would like to contribute to 42 | logstash, and you think it will take more than a few minutes to produce the fix 43 | (ie; write code), it is worth discussing the change with the Logstash users and developers first! You can reach us via [GitHub](https://github.com/elastic/logstash/issues), the [forum](https://discuss.elastic.co/c/logstash), or via IRC (#logstash on freenode irc) 44 | Please note that Pull Requests without tests will not be merged. If you would like to contribute but do not have experience with writing tests, please ping us on IRC/forum or create a PR and ask our help. 45 | 46 | ## Contributing to plugins 47 | 48 | Check our [documentation](https://www.elastic.co/guide/en/logstash/current/contributing-to-logstash.html) on how to contribute to plugins or write your own! It is super easy! 49 | 50 | ## Contribution Steps 51 | 52 | 1. Test your changes! [Run](https://github.com/elastic/logstash#testing) the test suite 53 | 2. Please make sure you have signed our [Contributor License 54 | Agreement](https://www.elastic.co/contributor-agreement/). We are not 55 | asking you to assign copyright to us, but to give us the right to distribute 56 | your code without restriction. We ask this of all contributors in order to 57 | assure our users of the origin and continuing existence of the code. You 58 | only need to sign the CLA once. 59 | 3. Send a pull request! Push your changes to your fork of the repository and 60 | [submit a pull 61 | request](https://help.github.com/articles/using-pull-requests). In the pull 62 | request, describe what your changes do and mention any bugs/issues related 63 | to the pull request. 64 | 65 | 66 | -------------------------------------------------------------------------------- /patterns/ecs-v1/haproxy: -------------------------------------------------------------------------------- 1 | 2 | HAPROXYTIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) 3 | HAPROXYDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{HAPROXYTIME}.%{INT} 4 | 5 | # Override these default patterns to parse out what is captured in your haproxy.cfg 6 | HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][captured_headers]} 7 | HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[haproxy][http][response][captured_headers]} 8 | 9 | # Example: 10 | # These haproxy config lines will add data to the logs that are captured 11 | # by the patterns below. Place them in your custom patterns directory to 12 | # override the defaults. 13 | # 14 | # capture request header Host len 40 15 | # capture request header X-Forwarded-For len 50 16 | # capture request header Accept-Language len 50 17 | # capture request header Referer len 200 18 | # capture request header User-Agent len 200 19 | # 20 | # capture response header Content-Type len 30 21 | # capture response header Content-Encoding len 10 22 | # capture response header Cache-Control len 200 23 | # capture response header Last-Modified len 200 24 | # 25 | # HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][host]}\|%{DATA:[haproxy][http][request][x_forwarded_for]}\|%{DATA:[haproxy][http][request][accept_language]}\|%{DATA:[http][request][referrer]}\|%{DATA:[user_agent][original]} 26 | # HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[http][response][mime_type]}\|%{DATA:[haproxy][http][response][encoding]}\|%{DATA:[haproxy][http][response][cache_control]}\|%{DATA:[haproxy][http][response][last_modified]} 27 | 28 | HAPROXYURI (?:%{URIPROTO:[url][scheme]}://)?(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:int})?)?(?:%{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})?)? 29 | 30 | HAPROXYHTTPREQUESTLINE (?:|(?:%{WORD:[http][request][method]} %{HAPROXYURI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)) 31 | 32 | # parse a haproxy 'httplog' line 33 | HAPROXYHTTPBASE %{IP:[source][address]}:%{INT:[source][port]:int} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][http][request][time_wait_ms]:int})/(?:-1|%{INT:[haproxy][total_waiting_time_ms]:int})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:int})/(?:-1|%{INT:[haproxy][http][request][time_wait_without_data_ms]:int})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[http][response][status_code]:int} %{INT:[source][bytes]:int} (?:-|%{DATA:[haproxy][http][request][captured_cookie]}) (?:-|%{DATA:[haproxy][http][response][captured_cookie]}) %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:int}/%{INT:[haproxy][connections][frontend]:int}/%{INT:[haproxy][connections][backend]:int}/%{INT:[haproxy][connections][server]:int}/%{INT:[haproxy][connections][retries]:int} %{INT:[haproxy][server_queue]:int}/%{INT:[haproxy][backend_queue]:int}(?: \{%{HAPROXYCAPTUREDREQUESTHEADERS}\}(?: \{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?)?(?: "%{HAPROXYHTTPREQUESTLINE}"?)? 34 | # :long - %{INT:[source][bytes]:int} 35 | 36 | HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{HAPROXYHTTPBASE} 37 | 38 | # parse a haproxy 'tcplog' line 39 | HAPROXYTCP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{IP:[source][address]}:%{INT:[source][port]:int} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][total_waiting_time_ms]:int})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:int})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[source][bytes]:int} %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:int}/%{INT:[haproxy][connections][frontend]:int}/%{INT:[haproxy][connections][backend]:int}/%{INT:[haproxy][connections][server]:int}/%{INT:[haproxy][connections][retries]:int} %{INT:[haproxy][server_queue]:int}/%{INT:[haproxy][backend_queue]:int} 40 | # :long - %{INT:[source][bytes]:int} 41 | -------------------------------------------------------------------------------- /spec/patterns/rails3_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "RAILS3", ['legacy', 'ecs-v1'] do 6 | 7 | context "single-line log" do 8 | 9 | let(:message) { 'Started POST "/api/v3/internal/allowed" for 127.0.0.1 at 2015-08-05 11:37:01 +0200' } 10 | 11 | # Started 12 | it do 13 | if ecs_compatibility? 14 | should include("http" => hash_including("request" => { "method" => "POST" })) 15 | else 16 | should include("verb" => "POST") 17 | end 18 | end 19 | 20 | it do 21 | if ecs_compatibility? 22 | else 23 | should include("request" => "/api/v3/internal/allowed") 24 | end 25 | end 26 | # for 27 | it do 28 | if ecs_compatibility? 29 | should include("source" => { "address" => "127.0.0.1" }) 30 | else 31 | should include("clientip" => "127.0.0.1") 32 | end 33 | end 34 | # at 35 | it { should include("timestamp" => "2015-08-05 11:37:01 +0200" ) } 36 | end 37 | 38 | context "multi-line log" do 39 | 40 | let(:message) { 'Started GET "/puppet/postfix/notes?target_id=162&target_type=issue&last_fetched_at=1438695732" for 127.0.0.1 at 2015-08-05 07:40:22 +0200 41 | Processing by Projects::NotesController#index as JSON 42 | Parameters: {"target_id"=>"162", "target_type"=>"issue", "last_fetched_at"=>"1438695732", "namespace_id"=>"puppet", "project_id"=>"postfix"} 43 | Completed 200 OK in 640ms (Views: 1.7ms | ActiveRecord: 91.0ms)' } 44 | 45 | # started 46 | it do 47 | if ecs_compatibility? 48 | should include("http" => hash_including("request" => { "method" => "GET" })) 49 | else 50 | should include("verb" => "GET") 51 | end 52 | end 53 | 54 | it do 55 | if ecs_compatibility? 56 | should include("url" => {"original"=>"/puppet/postfix/notes?target_id=162&target_type=issue&last_fetched_at=1438695732"}) 57 | else 58 | should include("request" => "/puppet/postfix/notes?target_id=162&target_type=issue&last_fetched_at=1438695732" ) 59 | end 60 | end 61 | # for 62 | it do 63 | if ecs_compatibility? 64 | should include("source" => { "address" => "127.0.0.1" }) 65 | else 66 | should include("clientip" => "127.0.0.1") 67 | end 68 | end 69 | # at 70 | it { should include("timestamp" => "2015-08-05 07:40:22 +0200") } 71 | # Processing by 72 | it do 73 | if ecs_compatibility? 74 | should include("rails" => hash_including("controller" => { "class"=>"Projects::NotesController", "action"=>"index" })) 75 | else 76 | should include("controller" => "Projects::NotesController") 77 | should include("action" => "index") 78 | end 79 | end 80 | # as 81 | it do 82 | if ecs_compatibility? 83 | should include("rails" => hash_including("request" => hash_including("format" => 'JSON'))) 84 | else 85 | should include("format" => "JSON" ) 86 | end 87 | end 88 | # Parameters 89 | it do 90 | params = '"target_id"=>"162", "target_type"=>"issue", "last_fetched_at"=>"1438695732", "namespace_id"=>"puppet", "project_id"=>"postfix"' 91 | if ecs_compatibility? 92 | should include("rails" => hash_including("request" => hash_including("params" => params))) 93 | else 94 | should include("params" => params) 95 | end 96 | end 97 | # Completed 98 | it do 99 | if ecs_compatibility? 100 | should include("http" => hash_including("response" => { "status_code" => 200 })) 101 | else 102 | should include("response" => "200" ) 103 | end 104 | end 105 | # in 106 | it do 107 | if ecs_compatibility? 108 | should include("rails" => hash_including("request" => hash_including("duration" => { "total" => 640.0, "view" => 1.7, "active_record" => 91.0 }))) 109 | else 110 | should include("totalms" => "640", "viewms" => "1.7", "activerecordms" => "91.0") 111 | end 112 | end 113 | end 114 | end 115 | -------------------------------------------------------------------------------- /spec/patterns/bind_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "BIND9", ['legacy', 'ecs-v1'] do 6 | 7 | let(:message) do # Bind 9.10 8 | '17-Feb-2018 23:06:56.326 queries: info: client 172.26.0.1#12345 (test.example.com): query: test.example.com IN A +E(0)K (172.26.0.3)' 9 | end 10 | 11 | it 'matches' do 12 | should include("timestamp" => "17-Feb-2018 23:06:56.326") 13 | if ecs_compatibility? 14 | should include("log" => hash_including("level" => "info")) 15 | should include("client" => { "ip" => "172.26.0.1", "port" => 12345 }) 16 | should include("dns" => { "question" => { "name" => "test.example.com", "type" => 'A', "class" => 'IN' }}) 17 | should include("bind" => { "log" => hash_including("question" => hash_including("flags" => '+E(0)K'))}) 18 | should include("server" => { "ip" => "172.26.0.3" }) 19 | # NOTE: duplicate but still captured since we've been doing that before as well : 20 | should include("bind" => { "log" => hash_including("question" => hash_including("name" => 'test.example.com'))}) 21 | else 22 | should include("loglevel" => "info") 23 | should include("clientip" => "172.26.0.1") 24 | should include("clientport" => "12345") 25 | should include("query" => ["test.example.com", "test.example.com"]) 26 | should include("querytype" => "A +E(0)K") 27 | should include("dns" => "172.26.0.3") 28 | end 29 | end 30 | 31 | context 'with client memory address (since Bind 9.11)' do 32 | # logging format is the same <= 9.16, but if using a separate query-log all options need to be enabled : 33 | # channel query.log { 34 | # file "/var/log/named/query.log"; 35 | # severity debug 3; 36 | # //print-time YES; // @timestamp 37 | # //print-category YES; // queries: 38 | # //print-severity YES; // info: 39 | # }; 40 | 41 | let(:message) do # client @0x7f64500020ef - memory address of the data structure representing the client 42 | '30-Jun-2018 15:50:00.999 queries: info: client @0x7f64500020ef 192.168.10.48#60061 (91.2.10.170.in-addr.internal): query: 91.2.10.170.in-addr.internal IN PTR + (192.168.2.2)' 43 | end 44 | 45 | it 'matches' do 46 | should include("timestamp" => "30-Jun-2018 15:50:00.999") 47 | if ecs_compatibility? 48 | should include("log" => hash_including("level" => "info")) 49 | should include("client" => { "ip" => "192.168.10.48", "port" => 60061 }) 50 | should include("dns" => { "question" => { "name" => "91.2.10.170.in-addr.internal", "type" => 'PTR', "class" => 'IN' }}) 51 | should include("bind" => { "log" => hash_including("question" => hash_including("flags" => '+')) }) 52 | should include("server" => { "ip" => "192.168.2.2" }) 53 | else 54 | should include("loglevel" => "info") 55 | should include("clientip" => "192.168.10.48") 56 | should include("clientport" => "60061") 57 | should include("query" => ["91.2.10.170.in-addr.internal", "91.2.10.170.in-addr.internal"]) 58 | should include("querytype" => "PTR +") 59 | should include("dns" => "192.168.2.2") 60 | end 61 | end 62 | 63 | end 64 | 65 | end 66 | 67 | describe_pattern "BIND9_QUERYLOGBASE", ['ecs-v1'] do 68 | let(:message) do 69 | 'client @0x7f85b4026ed0 127.0.0.1#42520 (ci.elastic.co): query: ci.elastic.co IN A +E(0)K (35.193.103.164)' 70 | end 71 | 72 | it 'matches' do 73 | should include("client" => { "ip" => "127.0.0.1", "port" => 42520 }) 74 | should include("dns" => { "question" => { "name" => "ci.elastic.co", "type" => 'A', "class" => 'IN' }}) 75 | should include("bind" => { "log" => hash_including("question" => hash_including("flags" => '+E(0)K') )}) 76 | should include("server" => { "ip" => "35.193.103.164" }) 77 | end 78 | end 79 | 80 | describe_pattern "BIND9_QUERYLOG", ['ecs-v1'] do 81 | let(:message) do 82 | '01-May-2019 00:27:48.084 queries: info: client @0x7f82bc11d4e0 192.168.1.111#53995 (google.com): query: google.com IN A +E(0) (10.80.1.88)' 83 | end 84 | 85 | it 'matches' do 86 | should include("client" => { "ip" => "192.168.1.111", "port" => 53995 }) 87 | should include("dns" => { "question" => { "name" => "google.com", "type" => 'A', "class" => 'IN' }}) 88 | should include("bind" => { "log" => hash_including("question" => { "flags" => '+E(0)', "name" => 'google.com' })}) 89 | should include("server" => { "ip" => "10.80.1.88" }) 90 | should include("log" => { "level" => "info" }) 91 | end 92 | end -------------------------------------------------------------------------------- /patterns/ecs-v1/bro: -------------------------------------------------------------------------------- 1 | # supports the 'old' BRO log files, for updated Zeek log format see the patters/ecs-v1/zeek 2 | # https://www.bro.org/sphinx/script-reference/log-files.html 3 | 4 | BRO_BOOL [TF] 5 | BRO_DATA [^\t]+ 6 | 7 | # http.log - old format (before the Zeek rename) : 8 | BRO_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{INT:[zeek][http][trans_depth]:int}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{BRO_DATA:[url][domain]})\t(?:-|%{BRO_DATA:[url][original]})\t(?:-|%{BRO_DATA:[http][request][referrer]})\t(?:-|%{BRO_DATA:[user_agent][original]})\t(?:-|%{NUMBER:[http][request][body][bytes]:int})\t(?:-|%{NUMBER:[http][response][body][bytes]:int})\t(?:-|%{POSINT:[http][response][status_code]:int})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:int})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:-|%{BRO_DATA:[zeek][http][filename]})\t(?:\(empty\)|%{BRO_DATA:[zeek][http][tags]})\t(?:-|%{BRO_DATA:[url][username]})\t(?:-|%{BRO_DATA:[url][password]})\t(?:-|%{BRO_DATA:[zeek][http][proxied]})\t(?:-|%{BRO_DATA:[zeek][http][orig_fuids]})\t(?:-|%{BRO_DATA:[http][request][mime_type]})\t(?:-|%{BRO_DATA:[zeek][http][resp_fuids]})\t(?:-|%{BRO_DATA:[http][response][mime_type]}) 9 | # :long - %{NUMBER:[http][request][body][bytes]:int} 10 | # :long - %{NUMBER:[http][response][body][bytes]:int} 11 | 12 | # dns.log - old format 13 | BRO_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:int})\t(?:-|%{BRO_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:int})\t(?:-|%{BRO_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:int})\t(?:-|%{BRO_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:int})\t(?:-|%{BRO_DATA:[dns][response_code]})\t(?:-|%{BRO_BOOL:[zeek][dns][AA]})\t(?:-|%{BRO_BOOL:[zeek][dns][TC]})\t(?:-|%{BRO_BOOL:[zeek][dns][RD]})\t(?:-|%{BRO_BOOL:[zeek][dns][RA]})\t(?:-|%{NONNEGINT:[zeek][dns][Z]:int})\t(?:-|%{BRO_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{BRO_BOOL:[zeek][dns][rejected]}) 14 | 15 | # conn.log - old bro, also supports 'newer' format (optional *zeek.connection.local_resp* flag) compared to non-ecs mode 16 | BRO_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{BRO_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:int})\t(?:-|%{INT:[zeek][connection][resp_bytes]:int})\t(?:-|%{BRO_DATA:[zeek][connection][state]})\t(?:-|%{BRO_BOOL:[zeek][connection][local_orig]})\t(?:(?:-|%{BRO_BOOL:[zeek][connection][local_resp]})\t)?(?:-|%{INT:[zeek][connection][missed_bytes]:int})\t(?:-|%{BRO_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:int})\t(?:-|%{INT:[source][bytes]:int})\t(?:-|%{INT:[destination][packets]:int})\t(?:-|%{INT:[destination][bytes]:int})\t(?:\(empty\)|%{BRO_DATA:[zeek][connection][tunnel_parents]}) 17 | # :long - %{INT:[zeek][connection][orig_bytes]:int} 18 | # :long - %{INT:[zeek][connection][resp_bytes]:int} 19 | # :long - %{INT:[zeek][connection][missed_bytes]:int} 20 | # :long - %{INT:[source][packets]:int} 21 | # :long - %{INT:[source][bytes]:int} 22 | # :long - %{INT:[destination][packets]:int} 23 | # :long - %{INT:[destination][bytes]:int} 24 | 25 | # files.log - old format 26 | BRO_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t(?:-|%{IP:[server][ip]})\t(?:-|%{IP:[client][ip]})\t(?:-|%{BRO_DATA:[zeek][files][session_ids]})\t(?:-|%{BRO_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:int})\t(?:-|%{BRO_DATA:[zeek][files][analyzers]})\t(?:-|%{BRO_DATA:[file][mime_type]})\t(?:-|%{BRO_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{BRO_DATA:[zeek][files][local_orig]})\t(?:-|%{BRO_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:int})\t(?:-|%{INT:[file][size]:int})\t(?:-|%{INT:[zeek][files][missing_bytes]:int})\t(?:-|%{INT:[zeek][files][overflow_bytes]:int})\t(?:-|%{BRO_BOOL:[zeek][files][timedout]})\t(?:-|%{BRO_DATA:[zeek][files][parent_fuid]})\t(?:-|%{BRO_DATA:[file][hash][md5]})\t(?:-|%{BRO_DATA:[file][hash][sha1]})\t(?:-|%{BRO_DATA:[file][hash][sha256]})\t(?:-|%{BRO_DATA:[zeek][files][extracted]}) 27 | # :long - %{INT:[zeek][files][seen_bytes]:int} 28 | # :long - %{INT:[file][size]:int} 29 | # :long - %{INT:[zeek][files][missing_bytes]:int} 30 | # :long - %{INT:[zeek][files][overflow_bytes]:int} 31 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | require "logstash/devutils/rspec/spec_helper" 2 | require 'rspec/expectations' 3 | 4 | # running the grok code outside a logstash package means 5 | # LOGSTASH_HOME will not be defined, so let's set it here 6 | # before requiring the grok filter 7 | unless LogStash::Environment.const_defined?(:LOGSTASH_HOME) 8 | LogStash::Environment::LOGSTASH_HOME = File.expand_path("../../", __FILE__) 9 | end 10 | 11 | # temporary fix to have the spec pass for an urgen mass-publish requirement. 12 | # cut & pasted from the same tmp fix in the grok spec 13 | # see https://github.com/logstash-plugins/logstash-filter-grok/issues/72 14 | # this needs to be refactored and properly fixed 15 | module LogStash::Environment 16 | # also :pattern_path method must exist so we define it too 17 | unless self.method_defined?(:pattern_path) 18 | def pattern_path(path) 19 | ::File.join(LOGSTASH_HOME, "patterns", path) 20 | end 21 | end 22 | end 23 | 24 | require "logstash/filters/grok" 25 | 26 | module GrokHelpers 27 | module PatternModeSupport 28 | @@pattern_mode = nil 29 | def pattern_mode 30 | @@pattern_mode 31 | end 32 | module_function :pattern_mode 33 | 34 | def pattern_mode=(mode) 35 | @@pattern_mode = mode 36 | end 37 | end 38 | 39 | def ecs_compatibility? 40 | case ecs_compatibility 41 | when :disabled then false 42 | when nil then nil 43 | else true 44 | end 45 | end 46 | 47 | def ecs_compatibility 48 | case mode = PatternModeSupport.pattern_mode 49 | when 'legacy' then :disabled 50 | when 'ecs-v1' then :v1 51 | when nil then nil 52 | else fail "pattern_mode: #{mode.inspect}" 53 | end 54 | end 55 | 56 | def grok_match(label, message, exact_match = false) 57 | grok_match_event(label, message, exact_match).to_hash 58 | end 59 | 60 | def grok_match_event(label, message, exact_match = false) 61 | grok = build_grok(label, exact_match) 62 | event = build_event(message) 63 | grok.filter(event) 64 | event 65 | end 66 | 67 | def grok_exact_match(label, message) 68 | grok_match(label, message, true) 69 | end 70 | 71 | def build_grok(label, exact_match = false) 72 | grok_opts = { "match" => [ "message", exact_match ? "^%{#{label}}$" : "%{#{label}}" ] } 73 | ecs_compat = ecs_compatibility # if not set use the plugin default 74 | grok_opts["ecs_compatibility"] = ecs_compat unless ecs_compat.nil? 75 | grok = LogStash::Filters::Grok.new(grok_opts) 76 | grok.register 77 | grok 78 | end 79 | 80 | def build_event(message) 81 | LogStash::Event.new("message" => message) 82 | end 83 | end 84 | 85 | RSpec.configure do |c| 86 | c.include GrokHelpers 87 | c.include GrokHelpers::PatternModeSupport 88 | c.extend GrokHelpers::PatternModeSupport 89 | end 90 | 91 | def describe_pattern(name, pattern_modes = [ nil ], &block) 92 | pattern_modes.each do |mode| 93 | RSpec.describe "#{name}#{mode ? " (#{mode})" : nil}" do 94 | 95 | before(:each) do 96 | @restore_pattern_mode = pattern_mode 97 | self.pattern_mode = mode 98 | end 99 | after(:each) do 100 | self.pattern_mode = @restore_pattern_mode 101 | end 102 | 103 | let(:pattern) { name } 104 | let(:message) { raise 'let(:message) { ... } is missing' } 105 | let(:event) { grok_match_event(pattern, message) } 106 | let(:grok) { event.to_hash } 107 | subject(:grok_result) { grok } 108 | 109 | instance_eval(&block) 110 | end 111 | end 112 | end 113 | 114 | RSpec::Matchers.define :pass do |expected| 115 | match do |actual| 116 | !actual.include?("tags") 117 | end 118 | end 119 | 120 | RSpec::Matchers.define :match do |value| 121 | match do |grok| 122 | grok = build_grok(grok) 123 | event = build_event(value) 124 | grok.filter(event) 125 | !event.include?("tags") 126 | end 127 | end 128 | 129 | RSpec.shared_examples_for 'top-level namespaces' do |namespaces, opts| 130 | let(:internal_keys) { ['@timestamp', '@version'] } 131 | let(:allowed_keys) { namespaces } 132 | it "event is expected to only use namespaces: #{namespaces.inspect}" do 133 | if instance_exec &(opts[:if] || -> { true }) 134 | event_hash = subject.to_hash 135 | (event_hash.keys - (internal_keys + ['message'])).each do |top_level_key| 136 | fail_msg = "found event.get('#{top_level_key}') : #{event_hash[top_level_key].inspect}" 137 | expect(allowed_keys).to include(top_level_key), fail_msg 138 | end 139 | end 140 | end 141 | end 142 | -------------------------------------------------------------------------------- /patterns/ecs-v1/aws: -------------------------------------------------------------------------------- 1 | S3_REQUEST_LINE (?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?) 2 | 3 | S3_ACCESS_LOG %{WORD:[aws][s3access][bucket_owner]} %{NOTSPACE:[aws][s3access][bucket]} \[%{HTTPDATE:timestamp}\] (?:-|%{IP:[client][ip]}) (?:-|%{NOTSPACE:[client][user][id]}) %{NOTSPACE:[aws][s3access][request_id]} %{NOTSPACE:[aws][s3access][operation]} (?:-|%{NOTSPACE:[aws][s3access][key]}) (?:-|"%{S3_REQUEST_LINE:[aws][s3access][request_uri]}") (?:-|%{INT:[http][response][status_code]:int}) (?:-|%{NOTSPACE:[aws][s3access][error_code]}) (?:-|%{INT:[aws][s3access][bytes_sent]:int}) (?:-|%{INT:[aws][s3access][object_size]:int}) (?:-|%{INT:[aws][s3access][total_time]:int}) (?:-|%{INT:[aws][s3access][turn_around_time]:int}) "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[aws][s3access][version_id]})(?: (?:-|%{NOTSPACE:[aws][s3access][host_id]}) (?:-|%{NOTSPACE:[aws][s3access][signature_version]}) (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][s3access][authentication_type]}) (?:-|%{NOTSPACE:[aws][s3access][host_header]}) (?:-|%{NOTSPACE:[aws][s3access][tls_version]}))? 4 | # :long - %{INT:[aws][s3access][bytes_sent]:int} 5 | # :long - %{INT:[aws][s3access][object_size]:int} 6 | 7 | ELB_URIHOST %{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:int})? 8 | ELB_URIPATHQUERY %{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})? 9 | # deprecated - old name: 10 | ELB_URIPATHPARAM %{ELB_URIPATHQUERY} 11 | ELB_URI %{URIPROTO:[url][scheme]}://(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{ELB_URIHOST})?(?:%{ELB_URIPATHQUERY})? 12 | 13 | ELB_REQUEST_LINE (?:%{WORD:[http][request][method]} %{ELB_URI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?) 14 | 15 | # pattern supports 'regular' HTTP ELB format 16 | ELB_V1_HTTP_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:[aws][elb][name]} %{IP:[source][ip]}:%{INT:[source][port]:int} (?:-|(?:%{IP:[aws][elb][backend][ip]}:%{INT:[aws][elb][backend][port]:int})) (?:-1|%{NUMBER:[aws][elb][request_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][backend_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][response_processing_time][sec]:float}) %{INT:[http][response][status_code]:int} (?:-|%{INT:[aws][elb][backend][http][response][status_code]:int}) %{INT:[http][request][body][bytes]:int} %{INT:[http][response][body][bytes]:int} "%{ELB_REQUEST_LINE}"(?: "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][elb][ssl_protocol]}))? 17 | # :long - %{INT:[http][request][body][bytes]:int} 18 | # :long - %{INT:[http][response][body][bytes]:int} 19 | 20 | ELB_ACCESS_LOG %{ELB_V1_HTTP_LOG} 21 | 22 | # Each edge location is identified by a three-letter code and an arbitrarily assigned number. 23 | # The three-letter IATA code typically represents an airport near the edge location. 24 | # examples: "LHR62-C2", "SFO5-P1", ""IND6", "CPT50" 25 | CLOUDFRONT_EDGE_LOCATION [A-Z]{3}[0-9]{1,2}(?:-[A-Z0-9]{2})? 26 | 27 | # pattern used to match a shorted format, that's why we have the optional part (starting with *http.version*) at the end 28 | CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{CLOUDFRONT_EDGE_LOCATION:[aws][cloudfront][x_edge_location]}\t(?:-|%{INT:[destination][bytes]:int})\t%{IPORHOST:[source][ip]}\t%{WORD:[http][request][method]}\t%{HOSTNAME:[url][domain]}\t%{NOTSPACE:[url][path]}\t(?:(?:000)|%{INT:[http][response][status_code]:int})\t(?:-|%{DATA:[http][request][referrer]})\t%{DATA:[user_agent][original]}\t(?:-|%{DATA:[url][query]})\t(?:-|%{DATA:[aws][cloudfront][http][request][cookie]})\t%{WORD:[aws][cloudfront][x_edge_result_type]}\t%{NOTSPACE:[aws][cloudfront][x_edge_request_id]}\t%{HOSTNAME:[aws][cloudfront][http][request][host]}\t%{URIPROTO:[network][protocol]}\t(?:-|%{INT:[source][bytes]:int})\t%{NUMBER:[aws][cloudfront][time_taken]:float}\t(?:-|%{IP:[network][forwarded_ip]})\t(?:-|%{DATA:[aws][cloudfront][ssl_protocol]})\t(?:-|%{NOTSPACE:[tls][cipher]})\t%{WORD:[aws][cloudfront][x_edge_response_result_type]}(?:\t(?:-|HTTP/%{NUMBER:[http][version]})\t(?:-|%{DATA:[aws][cloudfront][fle_status]})\t(?:-|%{DATA:[aws][cloudfront][fle_encrypted_fields]})\t%{INT:[source][port]:int}\t%{NUMBER:[aws][cloudfront][time_to_first_byte]:float}\t(?:-|%{DATA:[aws][cloudfront][x_edge_detailed_result_type]})\t(?:-|%{NOTSPACE:[http][request][mime_type]})\t(?:-|%{INT:[aws][cloudfront][http][request][size]:int})\t(?:-|%{INT:[aws][cloudfront][http][request][range][start]:int})\t(?:-|%{INT:[aws][cloudfront][http][request][range][end]:int}))? 29 | # :long - %{INT:[destination][bytes]:int} 30 | # :long - %{INT:[source][bytes]:int} 31 | # :long - %{INT:[aws][cloudfront][http][request][size]:int} 32 | # :long - %{INT:[aws][cloudfront][http][request][range][start]:int} 33 | # :long - %{INT:[aws][cloudfront][http][request][range][end]:int} 34 | -------------------------------------------------------------------------------- /patterns/legacy/bacula: -------------------------------------------------------------------------------- 1 | BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH} %{HOUR}:%{MINUTE} 2 | BACULA_HOST [a-zA-Z0-9-]+ 3 | BACULA_VOLUME %{USER} 4 | BACULA_DEVICE %{USER} 5 | BACULA_DEVICEPATH %{UNIXPATH} 6 | BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})* 7 | BACULA_VERSION %{USER} 8 | BACULA_JOB %{USER} 9 | 10 | BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY} exceeded on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) 11 | BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:volume}\" Bytes=%{BACULA_CAPACITY} Blocks=%{BACULA_CAPACITY} at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}. 12 | BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:volume}\" in catalog. 13 | BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\). 14 | BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE}\" \(%{BACULA_DEVICEPATH}\) 15 | BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:volume}\" mounted on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}. 16 | BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:berror} 17 | BACULA_LOG_NOOPENDIR \s*Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror} 18 | BACULA_LOG_NOSTAT \s*Could not stat %{DATA}: ERR=%{GREEDYDATA:berror} 19 | BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:volume}\". Marking it purged. 20 | BACULA_LOG_ALL_RECORDS_PRUNED All records pruned from Volume \"%{BACULA_VOLUME:volume}\"; marking it \"Purged\" 21 | BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days . 22 | BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files. 23 | BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:client} from catalog. 24 | BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:client} from catalog. 25 | BACULA_LOG_ENDPRUNE End auto prune. 26 | BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:job} 27 | BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:job} 28 | BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:device}\" 29 | BACULA_LOG_DIFF_FS \s+%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it. 30 | BACULA_LOG_JOBEND Job write elapsed time = %{DATA:elapsed}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second 31 | BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune. 32 | BACULA_LOG_NOPRUNE_FILES No Files found to prune. 33 | BACULA_LOG_VOLUME_PREVWRITTEN Volume \"%{BACULA_VOLUME:volume}\" previously written, moving to end of data. 34 | BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:volume}\" size=%{INT} 35 | BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT}. 36 | BACULA_LOG_MARKCANCEL JobId %{INT}, Job %{BACULA_JOB:job} marked to be canceled. 37 | BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:runjob}\" 38 | BACULA_LOG_VSS (Generate )?VSS (Writer)? 39 | BACULA_LOG_MAXSTART Fatal error: Job canceled because max start delay time exceeded. 40 | BACULA_LOG_DUPLICATE Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed. 41 | BACULA_LOG_NOJOBSTAT Fatal error: No Job status returned from FD. 42 | BACULA_LOG_FATAL_CONN Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA}) 43 | BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA}) 44 | BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at %{DATA}. Possible causes: 45 | BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup. 46 | BACULA_LOG_NOPRIOR No prior Full backup Job record found. 47 | 48 | BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\): 49 | 50 | BACULA_LOGLINE %{BACULA_TIMESTAMP:bts} %{BACULA_HOST:hostname}(?: JobId %{INT:jobid})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR}) 51 | -------------------------------------------------------------------------------- /spec/patterns/shorewall_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "SHOREWALL", ['legacy', 'ecs-v1'] do 6 | 7 | context "parsing a message with OUT interface" do 8 | 9 | let(:message) do 10 | "May 28 17:23:25 myHost kernel: [3124658.791874] Shorewall:FORWARD:REJECT:" + 11 | "IN=eth2 OUT=eth2 SRC=1.2.3.4 DST=192.168.0.10 LEN=141 TOS=0x00 PREC=0x00 TTL=63 ID=55251 PROTO=UDP SPT=5353 DPT=5335 LEN=121" 12 | end 13 | 14 | it 'matches' do 15 | expect(subject).to include("timestamp" => "May 28 17:23:25") 16 | if ecs_compatibility? 17 | expect(subject).to include( 18 | "observer"=>{"hostname"=>"myHost", "ingress"=>{"interface"=>{"name"=>"eth2"}}, "egress"=>{"interface"=>{"name"=>"eth2"}}}, 19 | "shorewall"=>{'firewall'=>{"type"=>"FORWARD", "action"=>"REJECT"}}, 20 | "iptables"=>{ 21 | "length"=>141, 22 | "tos"=>"00", "precedence_bits"=>"00", 23 | "ttl"=>63, 24 | "id"=>"55251" 25 | }, 26 | "network"=>{"transport"=>"UDP"}, 27 | "source"=>{"ip"=>"1.2.3.4", "port"=>5353}, 28 | "destination"=>{"ip"=>"192.168.0.10", "port"=>5335} 29 | ) 30 | else 31 | expect(subject).to include("nf_host" => "myHost") 32 | expect(subject).to include("nf_action1" => "FORWARD") 33 | expect(subject).to include("nf_action2" => "REJECT") 34 | expect(subject).to include("nf_in_interface" => "eth2") 35 | expect(subject).to include("nf_out_interface" => "eth2") 36 | expect(subject).to include("nf_src_ip" => "1.2.3.4") 37 | expect(subject).to include("nf_dst_ip" => "192.168.0.10") 38 | expect(subject).to include("nf_len" => "141") 39 | expect(subject).to include("nf_tos" => "0x00") 40 | expect(subject).to include("nf_prec" => "0x00") 41 | expect(subject).to include("nf_ttl" => "63") 42 | expect(subject).to include("nf_id" => "55251") 43 | expect(subject).to include("nf_protocol" => "UDP") 44 | expect(subject).to include("nf_src_port" => "5353") 45 | expect(subject).to include("nf_dst_port" => "5335") 46 | end 47 | end 48 | 49 | end 50 | 51 | context "parsing a message without OUT interface" do 52 | 53 | 54 | let(:message) do 55 | "May 28 17:31:07 server Shorewall:net2fw:DROP:" + 56 | "IN=eth1 OUT= MAC=00:02:b3:c7:2f:77:38:72:c0:6e:92:9c:08:00 SRC=127.0.0.1 DST=1.2.3.4 LEN=60 TOS=0x00 PREC=0x00 TTL=49 ID=6480 DF PROTO=TCP SPT=59088 DPT=8080 WINDOW=2920 RES=0x00 SYN URGP=0" 57 | end 58 | 59 | it 'matches' do 60 | expect(subject).to include("timestamp" => "May 28 17:31:07") 61 | if ecs_compatibility? 62 | expect(subject).to include( 63 | "observer"=>{"hostname"=>"server", "ingress"=>{"interface"=>{"name"=>"eth1"}}}, # no "output_interface" 64 | "shorewall"=>{'firewall'=>{"type"=>"net2fw", "action"=>"DROP",}}, 65 | "iptables"=>{ 66 | "length"=>60, 67 | "tos"=>"00", "precedence_bits"=>"00", 68 | "ttl"=>49, 69 | "id"=>"6480", 70 | 71 | "fragment_flags"=>"DF", 72 | "tcp"=>{"flags"=>"SYN ", "window"=>2920}, 73 | "tcp_reserved_bits"=>"00", 74 | }, 75 | "network"=>{"transport"=>"TCP"} 76 | ) 77 | expect(subject).to include("source"=>{"ip"=>"127.0.0.1", "port"=>59088, 'mac'=>"38:72:c0:6e:92:9c"}) 78 | expect(subject).to include("destination"=>{"ip"=>"1.2.3.4", "port"=>8080, 'mac'=>"00:02:b3:c7:2f:77"}) 79 | else 80 | expect(subject).to include("nf_host" => "server") 81 | expect(subject).to include("nf_action1" => "net2fw") 82 | expect(subject).to include("nf_action2" => "DROP") 83 | expect(subject).to include("nf_in_interface" => "eth1") 84 | expect(subject["nf_out_interface"]).to be nil 85 | expect(subject).to include("nf_dst_mac" => "00:02:b3:c7:2f:77") 86 | expect(subject).to include("nf_src_mac" => "38:72:c0:6e:92:9c") 87 | expect(subject).to include("nf_src_ip" => "127.0.0.1") 88 | expect(subject).to include("nf_dst_ip" => "1.2.3.4") 89 | expect(subject).to include("nf_len" => "60") 90 | expect(subject).to include("nf_tos" => "0x00") 91 | expect(subject).to include("nf_prec" => "0x00") 92 | expect(subject).to include("nf_ttl" => "49") 93 | expect(subject).to include("nf_id" => "6480") 94 | expect(subject).to include("nf_protocol" => "TCP") 95 | expect(subject).to include("nf_src_port" => "59088") 96 | expect(subject).to include("nf_dst_port" => "8080") 97 | end 98 | end 99 | 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /patterns/ecs-v1/zeek: -------------------------------------------------------------------------------- 1 | # updated Zeek log matching, for legacy matching see the patters/ecs-v1/bro 2 | 3 | ZEEK_BOOL [TF] 4 | ZEEK_DATA [^\t]+ 5 | 6 | # http.log - the 'new' format (compared to BRO_HTTP) 7 | # has *version* and *origin* fields added and *filename* replaced with *orig_filenames* + *resp_filenames* 8 | ZEEK_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{INT:[zeek][http][trans_depth]:int}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{ZEEK_DATA:[url][domain]})\t(?:-|%{ZEEK_DATA:[url][original]})\t(?:-|%{ZEEK_DATA:[http][request][referrer]})\t(?:-|%{NUMBER:[http][version]})\t(?:-|%{ZEEK_DATA:[user_agent][original]})\t(?:-|%{ZEEK_DATA:[zeek][http][origin]})\t(?:-|%{NUMBER:[http][request][body][bytes]:int})\t(?:-|%{NUMBER:[http][response][body][bytes]:int})\t(?:-|%{POSINT:[http][response][status_code]:int})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:int})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:\(empty\)|%{ZEEK_DATA:[zeek][http][tags]})\t(?:-|%{ZEEK_DATA:[url][username]})\t(?:-|%{ZEEK_DATA:[url][password]})\t(?:-|%{ZEEK_DATA:[zeek][http][proxied]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_filenames]})\t(?:-|%{ZEEK_DATA:[http][request][mime_type]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_filenames]})\t(?:-|%{ZEEK_DATA:[http][response][mime_type]}) 9 | # :long - %{NUMBER:[http][request][body][bytes]:int} 10 | # :long - %{NUMBER:[http][response][body][bytes]:int} 11 | 12 | # dns.log - 'updated' BRO_DNS format (added *zeek.dns.rtt*) 13 | ZEEK_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:int})\t(?:-|%{NUMBER:[zeek][dns][rtt]:float})\t(?:-|%{ZEEK_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:int})\t(?:-|%{ZEEK_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:int})\t(?:-|%{ZEEK_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:int})\t(?:-|%{ZEEK_DATA:[dns][response_code]})\t%{ZEEK_BOOL:[zeek][dns][AA]}\t%{ZEEK_BOOL:[zeek][dns][TC]}\t%{ZEEK_BOOL:[zeek][dns][RD]}\t%{ZEEK_BOOL:[zeek][dns][RA]}\t%{NONNEGINT:[zeek][dns][Z]:int}\t(?:-|%{ZEEK_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{ZEEK_BOOL:[zeek][dns][rejected]}) 14 | 15 | # conn.log - the 'new' format (requires *zeek.connection.local_resp*, handles `(empty)` as `-` for tunnel_parents, and optional mac adresses) 16 | ZEEK_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{ZEEK_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:int})\t(?:-|%{INT:[zeek][connection][resp_bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][state]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_resp]})\t(?:-|%{INT:[zeek][connection][missed_bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:int})\t(?:-|%{INT:[source][bytes]:int})\t(?:-|%{INT:[destination][packets]:int})\t(?:-|%{INT:[destination][bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][tunnel_parents]})(?:\t(?:-|%{COMMONMAC:[source][mac]})\t(?:-|%{COMMONMAC:[destination][mac]}))? 17 | # :long - %{INT:[zeek][connection][orig_bytes]:int} 18 | # :long - %{INT:[zeek][connection][resp_bytes]:int} 19 | # :long - %{INT:[zeek][connection][missed_bytes]:int} 20 | # :long - %{INT:[source][packets]:int} 21 | # :long - %{INT:[source][bytes]:int} 22 | # :long - %{INT:[destination][packets]:int} 23 | # :long - %{INT:[destination][bytes]:int} 24 | 25 | # files.log - updated BRO_FILES format (2 new fields added at the end) 26 | ZEEK_FILES_TX_HOSTS (?:-|%{IP:[server][ip]})|(?<[zeek][files][tx_hosts]>%{IP:[server][ip]}(?:[\s,]%{IP})+) 27 | ZEEK_FILES_RX_HOSTS (?:-|%{IP:[client][ip]})|(?<[zeek][files][rx_hosts]>%{IP:[client][ip]}(?:[\s,]%{IP})+) 28 | ZEEK_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t%{ZEEK_FILES_TX_HOSTS}\t%{ZEEK_FILES_RX_HOSTS}\t(?:-|%{ZEEK_DATA:[zeek][files][session_ids]})\t(?:-|%{ZEEK_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:int})\t(?:-|%{ZEEK_DATA:[zeek][files][analyzers]})\t(?:-|%{ZEEK_DATA:[file][mime_type]})\t(?:-|%{ZEEK_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{ZEEK_DATA:[zeek][files][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:int})\t(?:-|%{INT:[file][size]:int})\t(?:-|%{INT:[zeek][files][missing_bytes]:int})\t(?:-|%{INT:[zeek][files][overflow_bytes]:int})\t(?:-|%{ZEEK_BOOL:[zeek][files][timedout]})\t(?:-|%{ZEEK_DATA:[zeek][files][parent_fuid]})\t(?:-|%{ZEEK_DATA:[file][hash][md5]})\t(?:-|%{ZEEK_DATA:[file][hash][sha1]})\t(?:-|%{ZEEK_DATA:[file][hash][sha256]})\t(?:-|%{ZEEK_DATA:[zeek][files][extracted]})(?:\t(?:-|%{ZEEK_BOOL:[zeek][files][extracted_cutoff]})\t(?:-|%{INT:[zeek][files][extracted_size]:int}))? 29 | # :long - %{INT:[zeek][files][seen_bytes]:int} 30 | # :long - %{INT:[file][size]:int} 31 | # :long - %{INT:[zeek][files][missing_bytes]:int} 32 | # :long - %{INT:[zeek][files][overflow_bytes]:int} 33 | # :long - %{INT:[zeek][files][extracted_size]:int} 34 | -------------------------------------------------------------------------------- /spec/patterns/junos_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | # NOTE: we only support non-structured log formats for all RT_FLOW_ 6 | 7 | describe_pattern "RT_FLOW1", ['legacy', 'ecs-v1'] do 8 | 9 | let(:message) do 10 | 'Dec 17 08:05:30 RT_FLOW: RT_FLOW_SESSION_CLOSE: session closed TCP FIN: 10.10.10.2/53836->10.10.10.1/22 junos-ssh' + 11 | ' 10.10.10.2/53836->10.10.10.1/22 None None 6 log-host-traffic untrust junos-host 5 78(6657) 122(13305) 45' + 12 | ' UNKNOWN UNKNOWN N/A(N/A) ge-0/0/1.0 No ' 13 | end 14 | 15 | it 'matches' do 16 | if ecs_compatibility? 17 | expect(subject).to include( 18 | "source"=>{"ip"=>"10.10.10.2", "port"=>53836, "nat"=>{"ip"=>"10.10.10.2", "port"=>53836}, "bytes"=>6657}, 19 | "destination"=>{"ip"=>"10.10.10.1", "port"=>22, "nat"=>{"ip"=>"10.10.10.1", "port"=>22}, "bytes"=>13305}, 20 | "observer"=>{"egress"=>{"zone"=>"junos-host"}, "ingress"=>{"zone"=>"untrust"}}, 21 | "rule"=>{"name"=>"log-host-traffic"}, 22 | "network"=>{"iana_number"=>"6"}, 23 | "juniper"=>{"srx"=>{ 24 | "tag"=>"RT_FLOW_SESSION_CLOSE", "reason"=>"session closed TCP FIN", 25 | "session_id"=>"5", "service_name"=>"junos-ssh", "elapsed_time"=>45 26 | }} 27 | ) 28 | else 29 | should include("event"=>"RT_FLOW_SESSION_CLOSE", "close-reason"=>"session closed TCP FIN", 30 | "src-ip"=>"10.10.10.2", "src-port"=>"53836", "nat-src-ip"=>"10.10.10.2", "nat-src-port"=>"53836", 31 | "dst-ip"=>"10.10.10.1", "dst-port"=>"22", "nat-dst-ip"=>"10.10.10.1", "nat-dst-port"=>"22", 32 | "src-nat-rule-name"=>"None", "dst-nat-rule-name"=>"None", 33 | "protocol-id"=>"6", "policy-name"=>"log-host-traffic", 34 | "from-zone"=>"untrust", "to-zone"=>"junos-host", 35 | "service"=>"junos-ssh", "session-id"=>"5", 36 | "sent"=>"6657", "received"=>"13305", "elapsed-time"=>"45") 37 | end 38 | end 39 | 40 | end 41 | 42 | describe_pattern "RT_FLOW2", ['legacy', 'ecs-v1'] do 43 | 44 | let(:message) do 45 | 'Dec 17 08:04:45 RT_FLOW: RT_FLOW_SESSION_CREATE: session created 10.10.10.2/53836->10.10.10.1/22' + 46 | ' junos-ssh 10.10.10.2/53836->10.10.10.1/22 None None 6 log-host-traffic untrust junos-host 5 N/A(N/A) ge-0/0/1.0' 47 | end 48 | 49 | it 'matches' do 50 | if ecs_compatibility? 51 | expect(subject).to include( 52 | "source"=>{"ip"=>"10.10.10.2", "port"=>53836, "nat"=>{"ip"=>"10.10.10.2", "port"=>53836}}, 53 | "destination"=>{"ip" => "10.10.10.1", "port"=>22, "nat"=>{"ip"=>"10.10.10.1", "port"=>22}}, 54 | "observer"=>{"ingress"=>{"zone"=>"untrust"}, "egress"=>{"zone"=>"junos-host"}}, 55 | "network"=>{"iana_number"=>"6"}, 56 | "juniper"=>{"srx"=>{"service_name"=>"junos-ssh", "session_id"=>"5", "tag"=>"RT_FLOW_SESSION_CREATE"}}, 57 | "rule"=>{"name"=>"log-host-traffic"} 58 | ) 59 | else 60 | should include("event"=>"RT_FLOW_SESSION_CREATE", 61 | "src-ip"=>"10.10.10.2", "src-port"=>"53836", 62 | "dst-ip"=>"10.10.10.1", "dst-port"=>"22", 63 | "service"=>"junos-ssh", 64 | "nat-src-ip"=>"10.10.10.2", "nat-src-port"=>"53836", 65 | "nat-dst-ip"=>"10.10.10.1", "nat-dst-port"=>"22", 66 | "src-nat-rule-name"=>"None", "dst-nat-rule-name"=>"None", 67 | "protocol-id"=>"6", 68 | "policy-name"=>"log-host-traffic", 69 | "from-zone"=>"untrust", "to-zone"=>"junos-host", 70 | "session-id"=>"5") 71 | end 72 | end 73 | 74 | end 75 | 76 | describe_pattern "RT_FLOW3", ['legacy', 'ecs-v1'] do 77 | 78 | let(:message) do 79 | 'Sep 29 23:49:20 SRX-1 RT_FLOW: RT_FLOW_SESSION_DENY: session denied 10.0.0.1/54924->192.168.1.1/53 junos-dns-udp ' + 80 | '17(0) default-deny(global) trust trust UNKNOWN UNKNOWN N/A(N/A) ge-0/0/0.0 UNKNOWN policy deny' 81 | end 82 | 83 | it 'matches' do 84 | if ecs_compatibility? 85 | expect(subject).to include( 86 | "source"=>{"ip"=>"10.0.0.1", "port"=>54924}, 87 | "destination"=>{"ip"=>"192.168.1.1", "port"=>53}, 88 | "juniper"=>{"srx"=>{"service_name"=>"junos-dns-udp", "tag"=>"RT_FLOW_SESSION_DENY"}}, 89 | "network"=>{"iana_number"=>"17"}, 90 | "observer"=>{"egress"=>{"zone"=>"trust"}, "ingress"=>{"zone"=>"trust"}}, 91 | "rule"=>{"name"=>"default-deny(global)"} 92 | ) 93 | else 94 | should include("event"=>"RT_FLOW_SESSION_DENY", 95 | "src-ip"=>"10.0.0.1", "dst-ip"=>"192.168.1.1", "src-port"=>"54924", "dst-port"=>"53", 96 | "protocol-id"=>"17", "from-zone"=>"trust", "to-zone"=>"trust", 97 | "service"=>"junos-dns-udp", "policy-name"=>"default-deny(global)") 98 | end 99 | end 100 | 101 | end 102 | -------------------------------------------------------------------------------- /patterns/legacy/grok-patterns: -------------------------------------------------------------------------------- 1 | USERNAME [a-zA-Z0-9._-]+ 2 | USER %{USERNAME} 3 | EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+ 4 | EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} 5 | INT (?:[+-]?(?:[0-9]+)) 6 | BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) 7 | NUMBER (?:%{BASE10NUM}) 8 | BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) 19 | UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} 20 | # URN, allowing use of RFC 2141 section 2.3 reserved characters 21 | URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+ 22 | 23 | # Networking 24 | MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) 25 | CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) 26 | WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) 27 | COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) 28 | IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? 29 | IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ 40 | URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+ 41 | URIHOST %{IPORHOST}(?::%{POSINT:port})? 42 | # uripath comes loosely from RFC1738, but mostly from what Firefox 43 | # doesn't turn into %XX 44 | URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+ 45 | #URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)? 46 | URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* 47 | URIPATHPARAM %{URIPATH}(?:%{URIPARAM})? 48 | URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})? 49 | 50 | # Months: January, Feb, 3, 03, 12, December 51 | MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b 52 | MONTHNUM (?:0?[1-9]|1[0-2]) 53 | MONTHNUM2 (?:0[1-9]|1[0-2]) 54 | MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) 55 | 56 | # Days: Monday, Tue, Thu, etc... 57 | DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) 58 | 59 | # Years? 60 | YEAR (?>\d\d){1,2} 61 | HOUR (?:2[0123]|[01]?[0-9]) 62 | MINUTE (?:[0-5][0-9]) 63 | # '60' is a leap second in most time standards and thus is valid. 64 | SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) 65 | TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) 66 | # datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) 67 | DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} 68 | DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} 69 | ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) 70 | ISO8601_SECOND (?:%{SECOND}|60) 71 | TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? 72 | DATE %{DATE_US}|%{DATE_EU} 73 | DATESTAMP %{DATE}[- ]%{TIME} 74 | TZ (?:[APMCE][SD]T|UTC) 75 | DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} 76 | DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} 77 | DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} 78 | DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} 79 | 80 | # Syslog Dates: Month Day HH:MM:SS 81 | SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} 82 | PROG [\x21-\x5a\x5c\x5e-\x7e]+ 83 | SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])? 84 | SYSLOGHOST %{IPORHOST} 85 | SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}> 86 | HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} 87 | 88 | # Shortcuts 89 | QS %{QUOTEDSTRING} 90 | 91 | # Log formats 92 | SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}: 93 | 94 | # Log Levels 95 | LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) 96 | -------------------------------------------------------------------------------- /patterns/ecs-v1/grok-patterns: -------------------------------------------------------------------------------- 1 | USERNAME [a-zA-Z0-9._-]+ 2 | USER %{USERNAME} 3 | EMAILLOCALPART [a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,64}(?:\.[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,62}){0,63} 4 | EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} 5 | INT (?:[+-]?(?:[0-9]+)) 6 | BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) 7 | NUMBER (?:%{BASE10NUM}) 8 | BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) 19 | UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} 20 | # URN, allowing use of RFC 2141 section 2.3 reserved characters 21 | URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+ 22 | 23 | # Networking 24 | MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) 25 | CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) 26 | WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) 27 | COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) 28 | IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? 29 | IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ 40 | URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+ 41 | URIHOST %{IPORHOST}(?::%{POSINT})? 42 | # uripath comes loosely from RFC1738, but mostly from what Firefox doesn't turn into %XX 43 | URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+ 44 | URIQUERY [A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* 45 | # deprecated (kept due compatibility): 46 | URIPARAM \?%{URIQUERY} 47 | URIPATHPARAM %{URIPATH}(?:\?%{URIQUERY})? 48 | URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATH}(?:\?%{URIQUERY})?)? 49 | 50 | # Months: January, Feb, 3, 03, 12, December 51 | MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b 52 | MONTHNUM (?:0?[1-9]|1[0-2]) 53 | MONTHNUM2 (?:0[1-9]|1[0-2]) 54 | MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) 55 | 56 | # Days: Monday, Tue, Thu, etc... 57 | DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) 58 | 59 | # Years? 60 | YEAR (?>\d\d){1,2} 61 | HOUR (?:2[0123]|[01]?[0-9]) 62 | MINUTE (?:[0-5][0-9]) 63 | # '60' is a leap second in most time standards and thus is valid. 64 | SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) 65 | TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) 66 | # datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) 67 | DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} 68 | DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} 69 | ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) 70 | ISO8601_SECOND %{SECOND} 71 | TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? 72 | DATE %{DATE_US}|%{DATE_EU} 73 | DATESTAMP %{DATE}[- ]%{TIME} 74 | TZ (?:[APMCE][SD]T|UTC) 75 | DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} 76 | DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} 77 | DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} 78 | DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} 79 | 80 | # Syslog Dates: Month Day HH:MM:SS 81 | SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} 82 | PROG [\x21-\x5a\x5c\x5e-\x7e]+ 83 | SYSLOGPROG %{PROG:[process][name]}(?:\[%{POSINT:[process][pid]:int}\])? 84 | SYSLOGHOST %{IPORHOST} 85 | SYSLOGFACILITY <%{NONNEGINT:[log][syslog][facility][code]:int}.%{NONNEGINT:[log][syslog][priority]:int}> 86 | HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} 87 | 88 | # Shortcuts 89 | QS %{QUOTEDSTRING} 90 | 91 | # Log formats 92 | SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:[host][hostname]} %{SYSLOGPROG}: 93 | 94 | # Log Levels 95 | LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) 96 | -------------------------------------------------------------------------------- /patterns/ecs-v1/bacula: -------------------------------------------------------------------------------- 1 | BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH}(?:-%{YEAR})? %{HOUR}:%{MINUTE} 2 | BACULA_HOST %{HOSTNAME} 3 | BACULA_VOLUME %{USER} 4 | BACULA_DEVICE %{USER} 5 | BACULA_DEVICEPATH %{UNIXPATH} 6 | BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})* 7 | BACULA_VERSION %{USER} 8 | BACULA_JOB %{USER} 9 | 10 | BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY:[bacula][volume][max_capacity]} exceeded on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).? 11 | BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" Bytes=%{BACULA_CAPACITY:[bacula][volume][bytes]} Blocks=%{BACULA_CAPACITY:[bacula][volume][blocks]} at %{BACULA_TIMESTAMP:[bacula][timestamp]}. 12 | BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" in catalog. 13 | BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on (?:file )?device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\). 14 | BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) 15 | BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" mounted on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) at %{BACULA_TIMESTAMP:[bacula][timestamp]}. 16 | BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:[error][message]} 17 | BACULA_LOG_NOOPENDIR \s*Could not open directory \"?%{DATA:[file][path]}\"?: ERR=%{GREEDYDATA:[error][message]} 18 | BACULA_LOG_NOSTAT \s*Could not stat %{DATA:[file][path]}: ERR=%{GREEDYDATA:[error][message]} 19 | BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\". Marking it purged. 20 | BACULA_LOG_ALL_RECORDS_PRUNED .*?All records pruned from Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\"; marking it \"Purged\" 21 | BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days . 22 | BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files. 23 | BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog. 24 | BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog. 25 | BACULA_LOG_ENDPRUNE End auto prune. 26 | BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:[bacula][job][name]} 27 | BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:[bacula][job][name]} 28 | BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:[bacula][volume][device]}\" 29 | BACULA_LOG_DIFF_FS \s*%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it. 30 | BACULA_LOG_JOBEND Job write elapsed time = %{DATA:[bacula][job][elapsed_time]}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second 31 | BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune. 32 | BACULA_LOG_NOPRUNE_FILES No Files found to prune. 33 | BACULA_LOG_VOLUME_PREVWRITTEN Volume \"?%{BACULA_VOLUME:[bacula][volume][name]}\"? previously written, moving to end of data. 34 | BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" size=%{INT:[bacula][volume][size]:int} 35 | # :long - %{INT:[bacula][volume][size]:int} 36 | BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT:[bacula][job][other_id]}. 37 | BACULA_LOG_MARKCANCEL JobId %{INT:[bacula][job][id]}, Job %{BACULA_JOB:[bacula][job][name]} marked to be canceled. 38 | BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:[bacula][job][client_run_before_command]}\" 39 | BACULA_LOG_VSS (Generate )?VSS (Writer)? 40 | BACULA_LOG_MAXSTART Fatal [eE]rror: Job canceled because max start delay time exceeded. 41 | BACULA_LOG_DUPLICATE Fatal [eE]rror: JobId %{INT:[bacula][job][other_id]} already running. Duplicate job not allowed. 42 | BACULA_LOG_NOJOBSTAT Fatal [eE]rror: No Job status returned from FD. 43 | BACULA_LOG_FATAL_CONN Fatal [eE]rror: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:int}. ERR=%{GREEDYDATA:[error][message]} 44 | BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:int}. ERR=%{GREEDYDATA:[error][message]} 45 | BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at \"?%{IPORHOST:[client][address]}(?::%{POSINT:[client][port]:int})?\"?. Possible causes: 46 | BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup. 47 | BACULA_LOG_NOPRIOR No prior Full backup Job record found. 48 | 49 | BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\): 50 | 51 | BACULA_LOG %{BACULA_TIMESTAMP:timestamp} %{BACULA_HOST:[host][hostname]}(?: JobId %{INT:[bacula][job][id]})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR}) 52 | # old (deprecated) name : 53 | BACULA_LOGLINE %{BACULA_LOG} 54 | -------------------------------------------------------------------------------- /spec/patterns/netscreen_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "NETSCREENSESSIONLOG", ['legacy', 'ecs-v1'] do 6 | 7 | context "traffic denied (Juniper)" do 8 | 9 | let(:message) do 10 | 'Jun 2 14:53:31 sample-host isg1000-A2: NetScreen device_id=0000011001000011 [Root]system-notification-00257(traffic): ' + 11 | 'start_time="2015-11-11 10:02:10" duration=0 policy_id=244 service=https proto=6 src zone=Untrust dst zone=Trust ' + 12 | 'action=Permit sent=0 rcvd=0 src=74.168.138.252 dst=72.72.72.72 src_port=1732 dst_port=443 ' + 13 | 'src-xlated ip=1.255.20.1 port=22041 dst-xlated ip=1.244.136.50 port=443 session_id=488451 reason=Creation' 14 | end 15 | 16 | it 'matches' do 17 | if ecs_compatibility? 18 | expect(subject).to include("timestamp" => "Jun 2 14:53:31") 19 | expect(subject).to include("netscreen"=>{ 20 | "session"=>{"id"=>"488451", "start_time"=>"2015-11-11 10:02:10", "duration"=>0, "type"=>"traffic", "reason"=>"Creation"}, 21 | "policy_id"=>"244", "service"=>"https", "protocol_number"=>6, "device_id"=>"0000011001000011" 22 | }) 23 | expect(subject).to include("event"=>{"code"=>"00257", "action"=>"Permit"}) 24 | # expect(subject).to include("network"=>{"protocol"=>"https"}) 25 | expect(subject).to include("source"=>{"bytes"=>0, "nat"=>{"port"=>22041, "ip"=>"1.255.20.1"}, "port"=>1732, "address"=>"74.168.138.252"}) 26 | expect(subject).to include("destination"=>{"bytes"=>0, "nat"=>{"port"=>443, "ip"=>"1.244.136.50"}, "port"=>443, "address"=>"72.72.72.72"}) 27 | expect(subject).to include("observer"=>{ 28 | "ingress"=>{"zone"=>"Untrust"}, "hostname"=>"sample-host", "name"=>"isg1000-A2", "product"=>"NetScreen", 29 | "egress"=>{"zone"=>"Trust"} 30 | }) 31 | else 32 | expect(subject).to include("date" => "Jun 2 14:53:31") 33 | expect(subject).to include( 34 | "device"=>"sample-host", 35 | "device_id"=>"0000011001000011", 36 | "start_time"=>"\"2015-11-11 10:02:10\"", 37 | "duration"=>"0", 38 | "policy_id"=>"244", 39 | "service"=>"https", 40 | "proto"=>"6", 41 | "src_zone"=>"Untrust", "dst_zone"=>"Trust", 42 | "action"=>"Permit", 43 | "sent"=>"0", "rcvd"=>"0", 44 | "src_ip"=>"74.168.138.252", "dst_ip"=>"72.72.72.72", 45 | "src_port"=>"1732", "dst_port"=>"443", 46 | "src_xlated_ip"=>"1.255.20.1", "src_xlated_port"=>"22041", 47 | "dst_xlated_ip"=>"1.244.136.50", "dst_xlated_port"=>"443", 48 | "session_id"=>"488451", "reason"=>"Creation", 49 | ) 50 | end 51 | end 52 | 53 | end 54 | 55 | context "traffic denied (without port/xlated/session_id/reason suffix)" do 56 | 57 | let(:message) do 58 | 'Mar 18 17:56:52 192.168.56.11 lowly_lizard: NetScreen device_id=netscreen2 [Root]system-notification-00257(traffic): ' + 59 | 'start_time="2009-03-18 16:07:06" duration=0 policy_id=320001 service=msrpc Endpoint Mapper(tcp) proto=6 ' + 60 | 'src zone=Null dst zone=self action=Deny sent=0 rcvd=16384 src=21.10.90.125 dst=23.16.1.1' 61 | end 62 | 63 | it 'matches in ECS mode' do 64 | if ecs_compatibility? 65 | expect(subject).to include("timestamp" => "Mar 18 17:56:52") 66 | expect(subject).to include("netscreen"=>{ 67 | "device_id"=>"netscreen2", 68 | "policy_id"=>"320001", 69 | "service"=>"msrpc Endpoint Mapper(tcp)", 70 | "protocol_number"=>6, 71 | "session"=>{"start_time"=>"2009-03-18 16:07:06", "type"=>"traffic", "duration"=>0} 72 | }) 73 | expect(subject).to include("source"=>{"address"=>"21.10.90.125", "bytes"=>0}) 74 | expect(subject).to include("destination"=>{"address"=>"23.16.1.1", "bytes"=>16384}) 75 | else 76 | expect(grok['tags']).to include('_grokparsefailure') 77 | end 78 | end 79 | end 80 | 81 | context "'standard' traffic denied" do 82 | 83 | let(:message) do 84 | 'Jun 2 14:53:31 fire00 aka1: NetScreen device_id=aka1 [Root]system-notification-00257(traffic): start_time="2006-06-02 14:53:30" ' + 85 | 'duration=0 policy_id=120 service=udp/port:17210 proto=17 src zone=Trust dst zone=DMZ action=Deny sent=0 rcvd=0 ' + 86 | 'src=192.168.2.2 dst=1.2.3.4 src_port=53 dst_port=17210' 87 | end 88 | 89 | it 'matches (in ECS mode)' do 90 | if ecs_compatibility? 91 | expect(subject).to include("event"=>{"action"=>"Deny", "code"=>"00257"}) 92 | else 93 | expect(grok['tags']).to include('_grokparsefailure') 94 | expect(subject).to_not include("date" => "Jun 2 14:53:31") 95 | end 96 | end 97 | 98 | context "(with session id)" do 99 | 100 | let(:message) do 101 | super() + ' session_id=0 reason=Traffic Denied' 102 | end 103 | 104 | it 'matches (in ECS mode)' do 105 | if ecs_compatibility? 106 | expect(subject).to include("netscreen"=>hash_including("device_id"=>"aka1", "service"=>"udp/port:17210", 107 | "session"=>hash_including("reason"=>"Traffic Denied"))) 108 | expect(subject).to include("observer"=>{ 109 | "ingress"=>{"zone"=>"Trust"}, 110 | "egress"=>{"zone"=>"DMZ"}, "hostname"=>"fire00", "name"=>"aka1", 111 | "product"=>"NetScreen" 112 | }) 113 | else 114 | expect(grok['tags']).to include('_grokparsefailure') 115 | expect(subject).to_not include("date" => "Jun 2 14:53:31") 116 | end 117 | end 118 | 119 | end 120 | 121 | end 122 | 123 | end 124 | -------------------------------------------------------------------------------- /spec/patterns/squid_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "SQUID3", ['legacy', 'ecs-v1'] do 6 | 7 | describe 'CONNECT sample' do 8 | 9 | let(:message) do 10 | '1525344856.899 16867 10.170.72.111 TCP_TUNNEL/200 6256 CONNECT logs.ap-southeast-2.amazonaws.com:443 - HIER_DIRECT/53.140.206.134 -' 11 | end 12 | 13 | it "matches" do 14 | expect(subject).to include("timestamp" => "1525344856.899") 15 | if ecs_compatibility? 16 | expect(subject).to include( 17 | "event" => { "action" => "TCP_TUNNEL" }, 18 | "squid" => { 19 | "request" => { "duration" => 16867 }, 20 | "hierarchy_code" => "HIER_DIRECT" 21 | }) 22 | expect(subject).to include("destination" => { "address" => "53.140.206.134" }) 23 | expect(subject).to include("http" => { 24 | "request" => { "method" => "CONNECT" }, 25 | "response" => { "bytes" => 6256, "status_code" => 200 } 26 | # does not include missing ('-') as http.response.mime_type 27 | }) 28 | expect(subject).to include("url" => { "original" => "logs.ap-southeast-2.amazonaws.com:443" }) 29 | expect(subject).to include("source" => { "ip" => "10.170.72.111" }) 30 | else 31 | expect(subject).to include( 32 | "duration" => "16867", 33 | "client_address" => "10.170.72.111", 34 | "cache_result" => "TCP_TUNNEL", 35 | "status_code" => "200", 36 | "request_method" => "CONNECT", 37 | "bytes" => "6256", 38 | "url" => "logs.ap-southeast-2.amazonaws.com:443", 39 | "user" => "-", 40 | "hierarchy_code" => "HIER_DIRECT", 41 | "server" => "53.140.206.134", 42 | "content_type" => "-", 43 | ) 44 | end 45 | end 46 | 47 | it "does not include missing ('-') user-name" do 48 | if ecs_compatibility? 49 | expect(subject.keys).to_not include("user") # "user" => { "name" => "-" } 50 | end 51 | end 52 | 53 | end 54 | 55 | describe 'GET sample' do 56 | 57 | let(:message) do 58 | "1525334330.556 3 120.65.1.1 TCP_REFRESH_MISS/200 2014 GET http://www.sample.com/hellow_world.txt public-user DIRECT/www.sample.com text/plain 902351708.872" 59 | end 60 | 61 | it "matches" do 62 | expect(subject).to include("timestamp" => "1525334330.556") 63 | if ecs_compatibility? 64 | expect(subject).to include( 65 | "event" => { "action" => "TCP_REFRESH_MISS" }, 66 | "squid" => { 67 | "request" => { "duration" => 3 }, 68 | "hierarchy_code" => "DIRECT" 69 | }) 70 | expect(subject).to include("destination" => { "address" => "www.sample.com" }) 71 | expect(subject).to include("http" => { 72 | "request" => { "method" => "GET" }, 73 | "response" => { "bytes" => 2014, "status_code" => 200, "mime_type" => 'text/plain' } 74 | }) 75 | expect(subject).to include("url" => { "original" => "http://www.sample.com/hellow_world.txt" }) 76 | expect(subject).to include("source" => { "ip" => "120.65.1.1" }) 77 | expect(subject).to include("user" => { "name" => "public-user" }) 78 | else 79 | expect(subject).to include( 80 | "duration"=>"3", 81 | "client_address"=>"120.65.1.1", 82 | "cache_result"=>"TCP_REFRESH_MISS", 83 | "status_code"=>"200", 84 | "bytes"=>"2014", 85 | "request_method" => "GET", 86 | "url" => "http://www.sample.com/hellow_world.txt", 87 | "user"=>"public-user", 88 | "hierarchy_code"=>"DIRECT", 89 | "server"=>"www.sample.com", 90 | "content_type"=>"text/plain", 91 | ) 92 | end 93 | end 94 | 95 | it "retains message" do 96 | expect(subject).to include("message" => message) 97 | end 98 | 99 | end 100 | 101 | context 'GET with invalid status' do # code 0 means unavailable - no response received 102 | 103 | let(:message) do 104 | '1426235912.366 16 192.168.3.50 TCP_MISS_ABORTED/000 0 GET http://garfield.com/comic/2015-03-13 - HIER_DIRECT/193.135.59.44 -' 105 | end 106 | 107 | it "matches" do 108 | expect(subject).to include("timestamp" => "1426235912.366") 109 | if ecs_compatibility? 110 | expect(subject).to include("event"=>{"action"=>"TCP_MISS_ABORTED"}, 111 | "http"=>{"response"=>{"bytes"=>0}, "request"=>{"method"=>"GET"}}) 112 | else 113 | expect(subject).to include("status_code"=>'000') 114 | end 115 | end 116 | 117 | end 118 | 119 | context 'GET with optional server ip' do 120 | 121 | let(:message) do 122 | '1066037222.352 132 144.157.100.17 TCP_MISS/504 1293 GET http://at.atremis.com/image/93101912/xyz - NONE/- -' 123 | end 124 | 125 | it "matches" do 126 | expect(subject).to include("timestamp" => "1066037222.352") 127 | if ecs_compatibility? 128 | expect(subject).to include("event"=>{"action"=>"TCP_MISS"}, 129 | "http"=>{"response"=>{"bytes"=>1293, "status_code"=>504}, "request"=>{"method"=>"GET"}}) 130 | expect(subject.keys).not_to include('destination') 131 | else 132 | expect(subject).to include("status_code"=>'504') 133 | expect(subject.keys).not_to include('server') 134 | end 135 | end 136 | 137 | end 138 | 139 | end 140 | -------------------------------------------------------------------------------- /spec/patterns/mongodb_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "MONGO3_LOG", ['legacy', 'ecs-v1'] do 6 | 7 | context "parsing an standard/basic message" do 8 | 9 | let(:message) { "2014-11-03T18:28:32.450-0500 I NETWORK [initandlisten] waiting for connections on port 27017" } 10 | 11 | it { should include("timestamp" => "2014-11-03T18:28:32.450-0500") } 12 | 13 | it do 14 | if ecs_compatibility? 15 | should include("log" => { 'level' => "I" }) 16 | else 17 | should include("severity" => "I") 18 | end 19 | end 20 | 21 | it do 22 | if ecs_compatibility? 23 | should include("mongodb" => hash_including("component" => "NETWORK")) 24 | else 25 | should include("component" => "NETWORK") 26 | end 27 | end 28 | 29 | it do 30 | if ecs_compatibility? 31 | should include("mongodb" => hash_including("context" => "initandlisten")) 32 | else 33 | should include("context" => "initandlisten") 34 | end 35 | end 36 | 37 | it "generates a message field" do 38 | expect(subject["message"]).to eql [ message, "waiting for connections on port 27017" ] 39 | end 40 | end 41 | 42 | context "parsing a message with a missing component" do 43 | 44 | let(:message) { "2015-02-24T18:17:47.148+0000 F - [conn11] Got signal: 11 (Segmentation fault)." } 45 | 46 | it 'matches' do 47 | should include("timestamp" => "2015-02-24T18:17:47.148+0000") 48 | 49 | if ecs_compatibility? 50 | expect( grok_result['mongodb'].keys ).to_not include("component") 51 | else 52 | should include("component" => "-") 53 | end 54 | 55 | if ecs_compatibility? 56 | should include("log" => { 'level' => "F" }) 57 | else 58 | should include("severity" => "F") 59 | end 60 | 61 | if ecs_compatibility? 62 | should include("mongodb" => hash_including("context" => "conn11")) 63 | else 64 | should include("context" => "conn11") 65 | end 66 | end 67 | 68 | it "generates a message field" do 69 | expect(subject["message"]).to eql [ message, "Got signal: 11 (Segmentation fault)." ] 70 | end 71 | end 72 | 73 | context "parsing a message with a multiwords context" do 74 | 75 | let(:message) { "2015-04-23T06:57:28.256+0200 I JOURNAL [journal writer] Journal writer thread started" } 76 | 77 | it 'matches' do 78 | should include("timestamp" => "2015-04-23T06:57:28.256+0200") 79 | 80 | if ecs_compatibility? 81 | should include("log" => { 'level' => "I" }) 82 | else 83 | should include("severity" => "I") 84 | end 85 | 86 | if ecs_compatibility? 87 | should include("mongodb" => hash_including("component" => "JOURNAL")) 88 | else 89 | should include("component" => "JOURNAL") 90 | end 91 | 92 | if ecs_compatibility? 93 | should include("mongodb" => hash_including("context" => "journal writer")) 94 | else 95 | should include("context" => "journal writer") 96 | end 97 | end 98 | 99 | it "generates a message field" do 100 | expect(subject["message"]).to include("Journal writer thread started") 101 | end 102 | 103 | context '3.6 simple log line' do 104 | 105 | let(:message) do 106 | '2020-08-13T11:58:09.672+0200 I NETWORK [conn2] end connection 127.0.0.1:41258 (1 connection now open)' 107 | end 108 | 109 | it 'matches' do 110 | should include("timestamp" => "2020-08-13T11:58:09.672+0200") 111 | 112 | if ecs_compatibility? 113 | should include("mongodb" => hash_including("component" => "NETWORK")) 114 | else 115 | should include("component" => "NETWORK") 116 | end 117 | 118 | if ecs_compatibility? 119 | should include("mongodb" => hash_including("context" => "conn2")) 120 | else 121 | should include("context" => "conn2") 122 | end 123 | 124 | expect(subject["message"]).to include("end connection 127.0.0.1:41258 (1 connection now open)") 125 | end 126 | 127 | end 128 | 129 | context '3.6 long log line' do 130 | 131 | let(:command) do 132 | 'command config.$cmd command: createIndexes { createIndexes: "system.sessions", ' + 133 | 'indexes: [ { key: { lastUse: 1 }, name: "lsidTTLIndex", expireAfterSeconds: 1800 } ], $db: "config" } ' + 134 | 'numYields:0 reslen:101 locks:{ Global: { acquireCount: { r: 2, w: 2 } }, Database: { acquireCount: { w: 2 } }, ' + 135 | 'Collection: { acquireCount: { w: 1 } } } protocol:op_msg 0ms' 136 | end 137 | 138 | let(:message) do 139 | '2020-08-13T11:57:45.259+0200 I COMMAND [LogicalSessionCacheRefresh] ' + command 140 | end 141 | 142 | it 'matches' do 143 | should include("timestamp" => "2020-08-13T11:57:45.259+0200") 144 | 145 | if ecs_compatibility? 146 | should include("mongodb" => hash_including("component" => "COMMAND")) 147 | else 148 | should include("component" => "COMMAND") 149 | end 150 | 151 | if ecs_compatibility? 152 | should include("mongodb" => hash_including("context" => "LogicalSessionCacheRefresh")) 153 | else 154 | should include("context" => "LogicalSessionCacheRefresh") 155 | end 156 | 157 | expect(subject["message"]).to eql [message, command] 158 | end 159 | 160 | end 161 | 162 | end 163 | 164 | context "parsing a message without context" do 165 | 166 | let(:message) { "2015-04-23T07:00:13.864+0200 I CONTROL Ctrl-C signal" } 167 | 168 | it 'matches' do 169 | should include("timestamp" => "2015-04-23T07:00:13.864+0200") 170 | 171 | if ecs_compatibility? 172 | should include("log" => { 'level' => "I" }) 173 | else 174 | should include("severity" => "I") 175 | end 176 | 177 | if ecs_compatibility? 178 | should include("mongodb" => hash_including("component" => "CONTROL")) 179 | else 180 | should include("component" => "CONTROL") 181 | end 182 | 183 | if ecs_compatibility? 184 | expect( grok_result['mongodb'].keys ).to_not include("context") 185 | else 186 | should_not have_key("context") 187 | end 188 | end 189 | 190 | it "generates a message field" do 191 | expect(subject["message"]).to eql [ message, "Ctrl-C signal" ] 192 | end 193 | end 194 | end 195 | 196 | describe_pattern "MONGO_SLOWQUERY", ['legacy', 'ecs-v1'] do 197 | 198 | let(:message) do 199 | "[conn11485496] query sample.User query: { clientId: 12345 } ntoreturn:0 ntoskip:0 nscanned:287011 keyUpdates:0 numYields: 2 locks(micros) r:4187700 nreturned:18 reslen:14019 2340ms" 200 | end 201 | 202 | it do 203 | if ecs_compatibility? 204 | should include("mongodb" => { 205 | "database" => "sample", "collection" => "User", 206 | "query" => { "original"=>"{ clientId: 12345 }" }, 207 | "profile" => { 208 | "op" => "query", 209 | "ntoreturn" => 0, "ntoskip" => 0, "nscanned" => 287011, "nreturned" => 18, 210 | "duration" => 2340 211 | } 212 | }) 213 | else 214 | should include("database" => "sample", "collection" => "User") 215 | should include("ntoreturn" => '0', "ntoskip" => '0', "nscanned" => "287011", "nreturned" => "18") 216 | should include("query" => "{ clientId: 12345 }") 217 | should include("duration" => "2340") 218 | end 219 | end 220 | 221 | end 222 | -------------------------------------------------------------------------------- /spec/patterns/redis_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern 'REDISTIMESTAMP', [ 'legacy', 'ecs-v1' ] do 6 | 7 | let(:message) { '14 Nov 07:01:22.119'} 8 | 9 | it "a pattern pass the grok expression" do 10 | expect(grok_match(pattern, message)).to pass 11 | end 12 | 13 | end 14 | 15 | describe_pattern 'REDISLOG', [ 'legacy', 'ecs-v1' ] do 16 | 17 | let(:message) { "[4018] 14 Nov 07:01:22.119 * Background saving terminated with success" } 18 | 19 | it "a pattern pass the grok expression" do 20 | expect(grok).to pass 21 | end 22 | 23 | it "generates the pid field" do 24 | if ecs_compatibility? 25 | expect(grok).to include("process" => { 'pid' => 4018 }) 26 | else 27 | expect(grok).to include("pid" => "4018") 28 | end 29 | end 30 | 31 | end 32 | 33 | describe_pattern 'REDISMONLOG', [ 'legacy', 'ecs-v1' ] do 34 | 35 | context "simple command" do 36 | 37 | let(:message) { "1470637867.953466 [0 195.168.1.1:52500] \"info\"" } 38 | 39 | it "a pattern pass the grok expression" do 40 | expect(grok).to pass 41 | end 42 | 43 | it "generates the timestamp field" do 44 | expect(grok).to include("timestamp" => "1470637867.953466") 45 | end 46 | 47 | it "generates the database field" do 48 | if ecs_compatibility? 49 | expect(grok).to include("redis" => hash_including('database' => { 'id' => '0' })) 50 | else 51 | expect(grok).to include("database" => "0") 52 | end 53 | end 54 | 55 | it "generates the client field" do 56 | if ecs_compatibility? 57 | expect(grok).to include("client" => hash_including('ip' => '195.168.1.1')) 58 | else 59 | expect(grok).to include("client" => "195.168.1.1") 60 | end 61 | end 62 | 63 | it "generates the port field" do 64 | if ecs_compatibility? 65 | expect(grok).to include("client" => hash_including('port' => 52500)) 66 | else 67 | expect(grok).to include("port" => "52500") 68 | end 69 | end 70 | 71 | it "generates the command field" do 72 | if ecs_compatibility? 73 | expect(grok).to include("redis" => hash_including('command' => { 'name' => 'info' })) 74 | else 75 | expect(grok).to include("command" => "info") 76 | end 77 | end 78 | 79 | end 80 | 81 | context "one param command" do 82 | 83 | let(:message) { "1339518083.107412 [0 127.0.0.1:60866] \"keys\" \"*\"" } 84 | 85 | it "a pattern pass the grok expression" do 86 | expect(grok).to pass 87 | end 88 | 89 | it "generates the timestamp field" do 90 | expect(grok).to include("timestamp" => "1339518083.107412") 91 | end 92 | 93 | it "generates the database field" do 94 | if ecs_compatibility? 95 | expect(grok).to include("redis" => hash_including('database' => { 'id' => '0' })) 96 | else 97 | expect(grok).to include("database" => "0") 98 | end 99 | end 100 | 101 | it "generates the client field" do 102 | if ecs_compatibility? 103 | expect(grok).to include("client" => hash_including('ip' => '127.0.0.1')) 104 | else 105 | expect(grok).to include("client" => "127.0.0.1") 106 | end 107 | end 108 | 109 | it "generates the port field" do 110 | if ecs_compatibility? 111 | expect(grok).to include("client" => hash_including('port' => 60866)) 112 | else 113 | expect(grok).to include("port" => "60866") 114 | end 115 | end 116 | 117 | it "generates the command field" do 118 | if ecs_compatibility? 119 | expect(grok).to include("redis" => hash_including('command' => hash_including('name' => 'keys'))) 120 | else 121 | expect(grok).to include("command" => "keys") 122 | end 123 | end 124 | 125 | it "generates the params field" do 126 | if ecs_compatibility? 127 | expect(grok).to include("redis" => hash_including('command' => hash_including('args' => '"*"'))) 128 | else 129 | expect(grok).to include("params" => "\"*\"") 130 | end 131 | end 132 | 133 | end 134 | 135 | end 136 | 137 | describe_pattern "REDISMONLOG", [ 'legacy', 'ecs-v1' ] do 138 | 139 | context 'two param command' do 140 | 141 | let(:message) { "1470637925.186681 [0 127.0.0.1:39404] \"rpush\" \"my:special:key\" \"{\\\"data\\\":\"cdr\\\",\\\"payload\\\":\\\"json\\\"}\"" } 142 | 143 | it "a pattern pass the grok expression" do 144 | expect(grok).to pass 145 | end 146 | 147 | it "generates the timestamp field" do 148 | expect(grok).to include("timestamp" => "1470637925.186681") 149 | end 150 | 151 | it "generates the database field" do 152 | if ecs_compatibility? 153 | expect(grok).to include("redis" => hash_including('database' => hash_including('id' => '0'))) 154 | else 155 | expect(grok).to include("database" => "0") 156 | end 157 | end 158 | 159 | it "generates the client field" do 160 | if ecs_compatibility? 161 | expect(grok).to include("client" => hash_including('ip' => '127.0.0.1')) 162 | else 163 | expect(grok).to include("client" => "127.0.0.1") 164 | end 165 | end 166 | 167 | it "generates the port field" do 168 | if ecs_compatibility? 169 | expect(grok).to include("client" => hash_including('port' => 39404)) 170 | else 171 | expect(grok).to include("port" => "39404") 172 | end 173 | end 174 | 175 | it "generates the command field" do 176 | if ecs_compatibility? 177 | expect(grok).to include("redis" => hash_including('command' => hash_including('name' => 'rpush'))) 178 | else 179 | expect(grok).to include("command" => "rpush") 180 | end 181 | end 182 | 183 | it "generates the params field" do 184 | if ecs_compatibility? 185 | expect(grok).to include("redis" => hash_including('command' => hash_including('args' => "\"my:special:key\" \"{\\\"data\\\":\"cdr\\\",\\\"payload\\\":\\\"json\\\"}\""))) 186 | else 187 | expect(grok).to include("params" => "\"my:special:key\" \"{\\\"data\\\":\"cdr\\\",\\\"payload\\\":\\\"json\\\"}\"") 188 | end 189 | end 190 | 191 | end 192 | 193 | context "variadic command" do 194 | 195 | let(:message) { "1470637875.777457 [15 195.168.1.1:52500] \"intentionally\" \"broken\" \"variadic\" \"log\" \"entry\"" } 196 | 197 | it "a pattern pass the grok expression" do 198 | expect(grok).to pass 199 | end 200 | 201 | it "generates the timestamp field" do 202 | expect(grok).to include("timestamp" => "1470637875.777457") 203 | end 204 | 205 | it "generates the database field" do 206 | if ecs_compatibility? 207 | expect(grok).to include("redis" => hash_including('database' => hash_including('id' => '15'))) 208 | else 209 | expect(grok).to include("database" => "15") 210 | end 211 | end 212 | 213 | it "generates the client field" do 214 | if ecs_compatibility? 215 | expect(grok).to include("client" => hash_including('ip' => '195.168.1.1')) 216 | else 217 | expect(grok).to include("client" => "195.168.1.1") 218 | end 219 | end 220 | 221 | it "generates the port field" do 222 | if ecs_compatibility? 223 | expect(grok).to include("client" => hash_including('port' => 52500)) 224 | else 225 | expect(grok).to include("port" => "52500") 226 | end 227 | end 228 | 229 | it "generates the command field" do 230 | if ecs_compatibility? 231 | expect(grok).to include("redis" => hash_including('command' => hash_including('name' => 'intentionally'))) 232 | else 233 | expect(grok).to include("command" => "intentionally") 234 | end 235 | end 236 | 237 | it "generates the params field" do 238 | if ecs_compatibility? 239 | expect(grok).to include("redis" => hash_including('command' => hash_including('args' => "\"broken\" \"variadic\" \"log\" \"entry\""))) 240 | else 241 | expect(grok).to include("params" => "\"broken\" \"variadic\" \"log\" \"entry\"") 242 | end 243 | end 244 | 245 | end 246 | 247 | end 248 | -------------------------------------------------------------------------------- /spec/patterns/exim_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern 'EXIM', ['legacy', 'ecs-v1'] do 6 | 7 | context 'message arrival (old)' do 8 | 9 | let(:message) do 10 | "1995-10-31 08:57:53 0tACW1-0005MB-00 <= kryten@dwarf.fict.example H=mailer.fict.example [192.168.123.123] " + 11 | "U=exim P=smtp S=5678 id=f828ca60127d8646a0fa75cbf8db9ba3@dwarf.fict.example" 12 | end 13 | 14 | it "matches" do 15 | expect(grok).to include("timestamp" => "1995-10-31 08:57:53") 16 | 17 | if ecs_compatibility? 18 | expect(grok).to include("exim"=>{"log"=>{ 19 | "flags"=>"<=", 20 | "sender"=>{"email"=>"kryten@dwarf.fict.example"}, 21 | "message"=>{"id"=>"0tACW1-0005MB-00", "size"=>5678}, 22 | "header_id"=>"f828ca60127d8646a0fa75cbf8db9ba3@dwarf.fict.example" 23 | }}) 24 | expect(grok).to include("source"=>{"address"=>"mailer.fict.example", "ip"=>"192.168.123.123"}) 25 | expect(grok).to include("network"=>{"protocol"=>"smtp"}) 26 | else 27 | expect(grok).to include("exim_year" => "1995", "exim_month" => "10", "exim_day" => "31", "@version" => "1", "exim_time" => "08:57:53") 28 | expect(grok.keys).to_not include("pid") 29 | expect(grok).to include("exim_sender_email" => "kryten@dwarf.fict.example") 30 | expect(grok).to include("exim_flags" => "<=") 31 | expect(grok).to include("exim_msg_size" => "5678") 32 | expect(grok).to include("exim_msgid" => "0tACW1-0005MB-00") 33 | expect(grok).to include("remote_hostname" => "mailer.fict.example", "remote_host" => "192.168.123.123") 34 | expect(grok).to include("protocol" => "smtp") 35 | expect(grok).to include("exim_header_id" => "f828ca60127d8646a0fa75cbf8db9ba3@dwarf.fict.example") 36 | end 37 | 38 | expect(grok).to include("message" => message) 39 | end 40 | 41 | end 42 | 43 | context 'message arrival (new)' do 44 | let(:message) do 45 | '2010-09-13 05:00:13 [1487] 1Ov4tU-0000Nz-Rm <= mailling.list@domain.com ' + 46 | 'H=mailhost.domain.com [208.42.54.2]:51792 I=[67.215.162.175]:25 P=esmtps X=TLSv1:AES256-SHA:256 CV=no S=21778 ' + 47 | 'id=384a86a39e83be0d9b3a94d1feb3119f@domain.com T="Daily List: Chameleon" for user@example.com' 48 | end 49 | 50 | it "matches" do 51 | expect(grok).to include("timestamp" => "2010-09-13 05:00:13") # new in legacy mode 52 | 53 | if ecs_compatibility? 54 | expect(grok).to include("process"=>{"pid"=>1487}) 55 | expect(grok).to include("exim"=>{"log"=>hash_including( 56 | "message"=>{"id"=>"1Ov4tU-0000Nz-Rm", "size"=>21778, "subject"=>"Daily List: Chameleon"}, 57 | "flags"=>"<=", 58 | "header_id"=>"384a86a39e83be0d9b3a94d1feb3119f@domain.com", 59 | "sender"=>{"email"=>"mailling.list@domain.com"}, 60 | "recipient"=>{"email"=>"user@example.com"}, 61 | )}) 62 | expect(grok).to include("source"=>{"address"=>"mailhost.domain.com", "ip"=>"208.42.54.2", "port"=>51792}) 63 | expect(grok).to include("destination"=>{"ip"=>"67.215.162.175", "port"=>25}) 64 | else 65 | 66 | expect(grok).to include("exim_year" => "2010", "exim_month" => "09", "exim_day" => "13", "exim_time" => "05:00:13") 67 | expect(grok).to include("pid" => "1487") # new 68 | expect(grok).to include("exim_sender_email" => "mailling.list@domain.com") # new 69 | expect(grok).to include("remote_hostname" => "mailhost.domain.com", "remote_host" => "208.42.54.2", "remote_port" => "51792") # (remote_port) new 70 | expect(grok).to include("exim_interface" => "67.215.162.175", "exim_interface_port" => "25") 71 | expect(grok).to include("protocol" => "esmtps") 72 | expect(grok).to include("exim_msg_size" => "21778") 73 | expect(grok).to include("exim_header_id" => "384a86a39e83be0d9b3a94d1feb3119f@domain.com") 74 | expect(grok).to include("exim_subject" => '"Daily List: Chameleon"') 75 | expect(grok).to include("exim_recipient_email" => "user@example.com") # new 76 | end 77 | 78 | expect(grok).to include("message" => message) 79 | end 80 | 81 | end 82 | 83 | context 'message arrival (simple)' do 84 | 85 | let(:message) do 86 | '2020-02-11 17:09:46 1j1Z2g-00Faoy-Uh <= example@strawberry.active-ns.com U=example P=local ' + 87 | 'T="[Examples Galore] Please moderate: \"Hello world!\"" for admin@example.net' 88 | end 89 | 90 | it "matches" do 91 | expect(grok).to include("timestamp"=>"2020-02-11 17:09:46") 92 | if ecs_compatibility? 93 | expect(grok).to include("exim"=>{"log"=>{ 94 | "flags"=>"<=", 95 | "message"=>{"id"=>"1j1Z2g-00Faoy-Uh", "subject"=>'[Examples Galore] Please moderate: \\"Hello world!\\"'}, 96 | "sender"=>{"email"=>"example@strawberry.active-ns.com"}, 97 | "recipient"=>{"email"=>"admin@example.net"} 98 | }}) 99 | expect(grok).to include("network"=>{"protocol"=>"local"}) 100 | else 101 | expect(grok).to include( 102 | "exim_msgid"=>"1j1Z2g-00Faoy-Uh", 103 | "exim_sender_email"=>"example@strawberry.active-ns.com", 104 | "exim_flags"=>"<=", 105 | "protocol"=>"local", 106 | "exim_subject"=>"\"[Examples Galore] Please moderate: \\\"Hello world!\\\"\"" 107 | ) 108 | end 109 | end 110 | 111 | end 112 | 113 | context 'message arrival with quoted hostname' do 114 | 115 | let(:message) do 116 | '2013-03-20 12:44:02 1UIIN7-0004t9-8R <= root@example.com ' + 117 | 'H=localhost (hostname.example.com) [127.0.0.1] ' + 118 | 'P=esmtps X=TLSv1:DHE-RSA-AES256-SHA:256 S=811 id=201303201244.r2KCi11V018784@hostname.example.com' 119 | end 120 | 121 | it "matches" do 122 | expect(grok).to include("timestamp"=>"2013-03-20 12:44:02") 123 | 124 | if ecs_compatibility? 125 | expect(grok).to include("exim"=>{"log"=>hash_including( 126 | "sender"=>{ "email"=>"root@example.com" }, 127 | "message"=>{ "id"=>"1UIIN7-0004t9-8R", "size"=>811 }, 128 | "header_id"=>"201303201244.r2KCi11V018784@hostname.example.com", 129 | "remote_address"=>"hostname.example.com")}) 130 | expect(grok).to include("source"=>{"address"=>"localhost", "ip"=>"127.0.0.1"}) 131 | expect(grok).to include("network"=>{"protocol"=>"esmtps"}) 132 | else 133 | expect(grok).to include( 134 | "exim_msgid"=>"1UIIN7-0004t9-8R", 135 | "exim_sender_email"=>"root@example.com", 136 | "exim_flags"=>"<=", 137 | "protocol"=>"esmtps", 138 | "exim_header_id"=>"201303201244.r2KCi11V018784@hostname.example.com" 139 | ) 140 | expect(grok).to include( 141 | "remote_host"=>"127.0.0.1", 142 | "remote_heloname"=>"hostname.example.com", 143 | "remote_hostname"=>"localhost" 144 | ) 145 | end 146 | end 147 | 148 | end 149 | 150 | 151 | context 'message arrival with quoted hostname and port' do 152 | 153 | let(:message) do 154 | '2014-08-10 11:18:35 [28107] 1gsu1C-003dCu-Hb <= aaron@domain.com ' + 155 | 'H=localhost (10.5.40.204) [127.0.0.1]:39753 I=[127.0.0.1]:25 ' + 156 | 'P=esmtpa A=dovecot_plain:aaron@domain.com S=4315 M8S=0 id=d2b648f00f1a1b0813c483d552778dc6@domain.com ' + 157 | "T=\"what's up?!? ;-)\" from for aaron+forward@domain.com" 158 | end 159 | 160 | it "matches" do 161 | if ecs_compatibility? 162 | expect(grok).to include("exim"=>{"log"=>hash_including( 163 | "message"=>hash_including("id"=>"1gsu1C-003dCu-Hb", "subject"=>"what's up?!? ;-)"), 164 | )}) 165 | expect(grok).to include("destination"=>{"ip"=>"127.0.0.1", "port"=>25}) 166 | expect(grok).to include("exim"=>{"log"=>hash_including( 167 | "sender"=>{"email"=>"aaron@domain.com", 'original'=>'aaron@domain.com'} 168 | )}) 169 | expect(grok).to include("exim"=>{"log"=>hash_including("recipient"=>{"email"=>"aaron+forward@domain.com"})}) 170 | expect(grok).to include("source"=>{"address"=>"localhost", "ip"=>"127.0.0.1", "port"=>39753}) 171 | expect(grok).to include("exim"=>{"log" => hash_including("remote_address"=>'10.5.40.204')}) 172 | else 173 | expect(grok).to include( 174 | "exim_msgid"=>"1gsu1C-003dCu-Hb", 175 | "exim_sender_email"=>"aaron@domain.com", 176 | "exim_flags"=>"<=", 177 | "protocol"=>"esmtpa" 178 | ) 179 | expect(grok).to include( 180 | "remote_host"=>"127.0.0.1", "remote_port"=>"39753", 181 | "remote_heloname"=>"10.5.40.204", 182 | "remote_hostname"=>"localhost" 183 | ) 184 | end 185 | end 186 | 187 | end 188 | 189 | context 'delivery failed' do 190 | 191 | let(:message) do 192 | '2020-02-11 17:09:47 1j1Z2g-00Faoy-Uh ** admin@example.net R=virtual_aliases: No such person at this address.' 193 | end 194 | 195 | it "does not parse" do # matching not implemented 196 | expect(grok['tags']).to include("_grokparsefailure") 197 | end 198 | 199 | end 200 | 201 | end 202 | -------------------------------------------------------------------------------- /patterns/legacy/nagios: -------------------------------------------------------------------------------- 1 | ################################################################################## 2 | ################################################################################## 3 | # Chop Nagios log files to smithereens! 4 | # 5 | # A set of GROK filters to process logfiles generated by Nagios. 6 | # While it does not, this set intends to cover all possible Nagios logs. 7 | # 8 | # Some more work needs to be done to cover all External Commands: 9 | # http://old.nagios.org/developerinfo/externalcommands/commandlist.php 10 | # 11 | # If you need some support on these rules please contact: 12 | # Jelle Smet http://smetj.net 13 | # 14 | ################################################################################# 15 | ################################################################################# 16 | 17 | NAGIOSTIME \[%{NUMBER:nagios_epoch}\] 18 | 19 | ############################################### 20 | ######## Begin nagios log types 21 | ############################################### 22 | NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE 23 | NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE 24 | 25 | NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION 26 | NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION 27 | 28 | NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT 29 | NAGIOS_TYPE_HOST_ALERT HOST ALERT 30 | 31 | NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT 32 | NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT 33 | 34 | NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT 35 | NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT 36 | 37 | NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK 38 | NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK 39 | 40 | NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER 41 | NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER 42 | 43 | NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND 44 | NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION 45 | ############################################### 46 | ######## End nagios log types 47 | ############################################### 48 | 49 | ############################################### 50 | ######## Begin external check types 51 | ############################################### 52 | NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK 53 | NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK 54 | NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK 55 | NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK 56 | NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT 57 | NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT 58 | NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME 59 | NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME 60 | NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS 61 | NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS 62 | NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS 63 | NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS 64 | NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS 65 | NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS 66 | ############################################### 67 | ######## End external check types 68 | ############################################### 69 | NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:nagios_message} 70 | 71 | NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} 72 | NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message} 73 | 74 | NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} 75 | NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message} 76 | 77 | NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} 78 | NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message} 79 | 80 | NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} 81 | NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_message} 82 | 83 | NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} 84 | NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} 85 | 86 | NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} 87 | NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment} 88 | 89 | NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} 90 | NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name} 91 | 92 | NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios_type}: %{DATA:nagios_service};%{NUMBER:nagios_unknown1};%{NUMBER:nagios_unknown2} 93 | 94 | #################### 95 | #### External checks 96 | #################### 97 | 98 | #Disable host & service check 99 | NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} 100 | NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} 101 | 102 | #Enable host & service check 103 | NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service} 104 | NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname} 105 | 106 | #Process host & service check 107 | NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} 108 | NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result} 109 | 110 | #Disable host & service notifications 111 | NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} 112 | NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} 113 | NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service} 114 | 115 | #Enable host & service notifications 116 | NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} 117 | NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname} 118 | NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service} 119 | 120 | #Schedule host & service downtime 121 | NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios_command};%{DATA:nagios_hostname};%{NUMBER:nagios_start_time};%{NUMBER:nagios_end_time};%{NUMBER:nagios_fixed};%{NUMBER:nagios_trigger_id};%{NUMBER:nagios_duration};%{DATA:author};%{DATA:comment} 122 | 123 | #End matching line 124 | NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS}) 125 | -------------------------------------------------------------------------------- /patterns/legacy/firewalls: -------------------------------------------------------------------------------- 1 | # NetScreen firewall logs 2 | NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:date} %{IPORHOST:device} %{IPORHOST}: NetScreen device_id=%{WORD:device_id}%{DATA}: start_time=%{QUOTEDSTRING:start_time} duration=%{INT:duration} policy_id=%{INT:policy_id} service=%{DATA:service} proto=%{INT:proto} src zone=%{WORD:src_zone} dst zone=%{WORD:dst_zone} action=%{WORD:action} sent=%{INT:sent} rcvd=%{INT:rcvd} src=%{IPORHOST:src_ip} dst=%{IPORHOST:dst_ip} src_port=%{INT:src_port} dst_port=%{INT:dst_port} src-xlated ip=%{IPORHOST:src_xlated_ip} port=%{INT:src_xlated_port} dst-xlated ip=%{IPORHOST:dst_xlated_ip} port=%{INT:dst_xlated_port} session_id=%{INT:session_id} reason=%{GREEDYDATA:reason} 3 | 4 | #== Cisco ASA == 5 | CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?: %%{CISCOTAG:ciscotag}: 6 | CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} 7 | CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) 8 | # Common Particles 9 | CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted 10 | CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)* 11 | CISCO_DIRECTION Inbound|inbound|Outbound|outbound 12 | CISCO_INTERVAL first hit|%{INT}-second interval 13 | CISCO_XLATE_TYPE static|dynamic 14 | # ASA-1-104001 15 | CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:switch_reason} 16 | # ASA-1-104002 17 | CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:switch_reason} 18 | # ASA-1-104003 19 | CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\. 20 | # ASA-1-104004 21 | CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\. 22 | # ASA-1-105003 23 | CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} waiting 24 | # ASA-1-105004 25 | CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} normal 26 | # ASA-1-105005 27 | CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{GREEDYDATA:interface_name} 28 | # ASA-1-105008 29 | CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{GREEDYDATA:interface_name} 30 | # ASA-1-105009 31 | CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{GREEDYDATA:interface_name} (?:Passed|Failed) 32 | # ASA-2-106001 33 | CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface} 34 | # ASA-2-106006, ASA-2-106007, ASA-2-106010 35 | CISCOFW106006_106007_106010 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason}) 36 | # ASA-3-106014 37 | CISCOFW106014 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\) 38 | # ASA-6-106015 39 | CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags} on interface %{GREEDYDATA:interface} 40 | # ASA-1-106021 41 | CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface} 42 | # ASA-4-106023 43 | CISCOFW106023 %{CISCO_ACTION:action}( protocol)? %{WORD:protocol} src %{DATA:src_interface}:%{DATA:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{DATA:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group "?%{DATA:policy_id}"? \[%{DATA:hashcode1}, %{DATA:hashcode2}\] 44 | # ASA-4-106100, ASA-4-106102, ASA-4-106103 45 | CISCOFW106100_2_3 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} for user '%{DATA:src_fwuser}' %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\) -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\) hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] 46 | # ASA-5-106100 47 | CISCOFW106100 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\] 48 | # ASA-5-304001 49 | CISCOFW304001 %{IP:src_ip}(\(%{DATA:src_fwuser}\))? Accessed URL %{IP:dst_ip}:%{GREEDYDATA:dst_url} 50 | # ASA-6-110002 51 | CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} 52 | # ASA-6-302010 53 | CISCOFW302010 %{INT:connection_count} in use, %{INT:connection_count_max} most used 54 | # ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016 55 | CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))? 56 | # ASA-6-302020, ASA-6-302021 57 | CISCOFW302020_302021 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))? 58 | # ASA-6-305011 59 | CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} 60 | # ASA-3-313001, ASA-3-313004, ASA-3-313008 61 | CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})? 62 | # ASA-4-313005 63 | CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))? 64 | # ASA-5-321001 65 | CISCOFW321001 Resource '%{WORD:resource_name}' limit of %{POSINT:resource_limit} reached for system 66 | # ASA-4-402117 67 | CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip} 68 | # ASA-4-402119 69 | CISCOFW402119 %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking 70 | # ASA-4-419001 71 | CISCOFW419001 %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason} 72 | # ASA-4-419002 73 | CISCOFW419002 %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number 74 | # ASA-4-500004 75 | CISCOFW500004 %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} 76 | # ASA-6-602303, ASA-6-602304 77 | CISCOFW602303_602304 %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action} 78 | # ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006 79 | CISCOFW710001_710002_710003_710005_710006 %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} 80 | # ASA-6-713172 81 | CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device 82 | # ASA-4-733100 83 | CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count} 84 | #== End Cisco ASA == 85 | 86 | # Shorewall firewall logs 87 | SHOREWALL (%{SYSLOGTIMESTAMP:timestamp}) (%{WORD:nf_host}) .*Shorewall:(%{WORD:nf_action1})?:(%{WORD:nf_action2})?.*IN=(%{USERNAME:nf_in_interface})?.*(OUT= *MAC=(%{COMMONMAC:nf_dst_mac}):(%{COMMONMAC:nf_src_mac})?|OUT=%{USERNAME:nf_out_interface}).*SRC=(%{IPV4:nf_src_ip}).*DST=(%{IPV4:nf_dst_ip}).*LEN=(%{WORD:nf_len}).?*TOS=(%{WORD:nf_tos}).?*PREC=(%{WORD:nf_prec}).?*TTL=(%{INT:nf_ttl}).?*ID=(%{INT:nf_id}).?*PROTO=(%{WORD:nf_protocol}).?*SPT=(%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*) 88 | #== End Shorewall 89 | #== SuSE Firewall 2 == 90 | SFW2 ((%{SYSLOGTIMESTAMP:timestamp})|(%{TIMESTAMP_ISO8601:timestamp}))\s*%{HOSTNAME}\s*kernel\S+\s*(?:%{NAGIOSTIME}\s*)?SFW2\-INext\-%{NOTSPACE:nf_action}\s*IN=%{USERNAME:nf_in_interface}.*OUT=(\s*%{USERNAME:nf_out_interface})?\s*MAC=((%{COMMONMAC:nf_dst_mac}:%{COMMONMAC:nf_src_mac})|(\s*)).*SRC=%{IP:nf_src_ip}\s*DST=%{IP:nf_dst_ip}.*PROTO=%{WORD:nf_protocol}((.*SPT=%{INT:nf_src_port}.*DPT=%{INT:nf_dst_port}.*)|()) 91 | #== End SuSE == 92 | -------------------------------------------------------------------------------- /patterns/ecs-v1/nagios: -------------------------------------------------------------------------------- 1 | ################################################################################## 2 | ################################################################################## 3 | # Chop Nagios log files to smithereens! 4 | # 5 | # A set of GROK filters to process logfiles generated by Nagios. 6 | # While it does not, this set intends to cover all possible Nagios logs. 7 | # 8 | # Some more work needs to be done to cover all External Commands: 9 | # http://old.nagios.org/developerinfo/externalcommands/commandlist.php 10 | # 11 | # If you need some support on these rules please contact: 12 | # Jelle Smet http://smetj.net 13 | # 14 | ################################################################################# 15 | ################################################################################# 16 | 17 | NAGIOSTIME \[%{NUMBER:timestamp}\] 18 | 19 | ############################################### 20 | ######## Begin nagios log types 21 | ############################################### 22 | NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE 23 | NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE 24 | 25 | NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION 26 | NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION 27 | 28 | NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT 29 | NAGIOS_TYPE_HOST_ALERT HOST ALERT 30 | 31 | NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT 32 | NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT 33 | 34 | NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT 35 | NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT 36 | 37 | NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK 38 | NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK 39 | 40 | NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER 41 | NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER 42 | 43 | NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND 44 | NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION 45 | ############################################### 46 | ######## End nagios log types 47 | ############################################### 48 | 49 | ############################################### 50 | ######## Begin external check types 51 | ############################################### 52 | NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK 53 | NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK 54 | NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK 55 | NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK 56 | NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT 57 | NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT 58 | NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME 59 | NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME 60 | NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS 61 | NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS 62 | NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS 63 | NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS 64 | NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS 65 | NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS 66 | ############################################### 67 | ######## End external check types 68 | ############################################### 69 | NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:message} 70 | 71 | NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} 72 | NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} 73 | 74 | NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message} 75 | NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message} 76 | 77 | NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} 78 | NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} 79 | 80 | NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:message} 81 | NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:message} 82 | 83 | NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} 84 | NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} 85 | 86 | NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} 87 | NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} 88 | 89 | NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]} 90 | NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]} 91 | 92 | NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:[nagios][log][type]}: %{DATA:[service][name]};%{NUMBER:[nagios][log][period_from]:int};%{NUMBER:[nagios][log][period_to]:int} 93 | 94 | #################### 95 | #### External checks 96 | #################### 97 | 98 | #Disable host & service check 99 | NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]} 100 | NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]} 101 | 102 | #Enable host & service check 103 | NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]} 104 | NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]} 105 | 106 | #Process host & service check 107 | NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]} 108 | NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]} 109 | 110 | #Disable host & service notifications 111 | NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} 112 | NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} 113 | NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]} 114 | 115 | #Enable host & service notifications 116 | NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} 117 | NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} 118 | NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]} 119 | 120 | #Schedule host & service downtime 121 | NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:[nagios][log][command]};%{DATA:[host][hostname]};%{NUMBER:[nagios][log][start_time]};%{NUMBER:[nagios][log][end_time]};%{NUMBER:[nagios][log][fixed]};%{NUMBER:[nagios][log][trigger_id]};%{NUMBER:[nagios][log][duration]:int};%{DATA:[user][name]};%{DATA:[nagios][log][comment]} 122 | 123 | #End matching line 124 | NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS}) 125 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 4.3.4 2 | - Fix: typo in CISCOFW302013_302014_302015_302016 grok pattern [#313](https://github.com/logstash-plugins/logstash-patterns-core/pull/313) 3 | 4 | ## 4.3.3 5 | 6 | - Fix: parsing x-edge-location in CLOUDFRONT_ACCESS_LOG (ECS mode) [#311](https://github.com/logstash-plugins/logstash-patterns-core/pull/311) 7 | 8 | ## 4.3.2 9 | 10 | - Fix: typo in BIN9_QUERYLOG pattern (in ECS mode) [#307](https://github.com/logstash-plugins/logstash-patterns-core/pull/307) 11 | 12 | ## 4.3.1 13 | 14 | - Fix: incorrect syslog (priority) field name [#303](https://github.com/logstash-plugins/logstash-patterns-core/pull/303) 15 | - Fix: missed `ciscotag` field ECS-ification (`cisco.asa.tag`) for the `CISCO_TAGGED_SYSLOG` pattern 16 | 17 | ## 4.3.0 18 | 19 | With **4.3.0** we're introducing a new set of pattern definitions compliant with Elastic Common Schema (ECS), on numerous 20 | places patterns are capturing names prescribed by the schema or use custom namespaces that do not conflict with ECS ones. 21 | 22 | Changes are backwards compatible as much as possible and also include improvements to some of the existing patterns. 23 | 24 | Besides fields having new names, values for numeric (integer or floating point) types are usually converted to their 25 | numeric representation to ease further event processing (e.g. `http.response.status_code` is now stored as an integer). 26 | 27 | NOTE: to leverage the new ECS pattern set in Logstash a grok filter upgrade to version >= 4.4.0 is required. 28 | 29 | - **aws** 30 | * in ECS mode we dropped the (incomplete) attempt to capture `rawrequest` from `S3_REQUEST_LINE` 31 | * `S3_ACCESS_LOG` will handle up-to-date S3 access-log formats (6 'new' field captures at the end) 32 | Host Id -> Signature Version -> Cipher Suite -> Authentication Type -> Host Header -> TLS version 33 | * `ELB_ACCESS_LOG` will handle optional (`-`) in legacy mode 34 | * null values such as `-` or `-1` time values (e.g. `ELB_ACCESS_LOG`'s `request_processing_time`) 35 | are not captured in ECS mode 36 | 37 | - **bacula** 38 | - Fix: improve matching of `BACULA_HOST` as `HOSTNAME` 39 | - Fix: legacy `BACULA_` patterns to handle (optional) spaces 40 | - Fix: handle `BACULA_LOG` 'Job Id: X' prefix as optional 41 | - Fix: legacy matching of BACULA fatal error lines 42 | 43 | - **bind** 44 | - `BIND9`'s legacy `querytype` was further split into multiple fields as: 45 | `dns.question.type` and `bind.log.question.flags` 46 | - `BIND9` patterns (legacy as well) were adjusted to handle Bind9 >= 9.11 compatibility 47 | - `BIND9_QUERYLOGBASE` was introduced for potential re-use 48 | 49 | - **bro** 50 | * `BRO_` patterns are stricter in ECS mode - won't mistakenly match newer BRO/Zeek formats 51 | * place holders such as `(empty)` tags and `-` null values won't be captured 52 | * each `BRO_` pattern has a newer `ZEEK_` variant that supports latest Zeek 3.x versions 53 | e.g. `ZEEK_HTTP` as a replacement for `BRO_HTTP` (in ECS mode only), 54 | there's a new file **zeek** where all of the `ZEEK_XXX` pattern variants live 55 | 56 | - **exim** 57 | * introduced `EXIM` (`EXIM_MESSAGE_ARRIVAL`) to match message arrival log lines - in ECS mode! 58 | 59 | - **firewalls** 60 | * introduced `IPTABLES` pattern which is re-used within `SHOREWALL` and `SFW2` 61 | * `SHOREWALL` now supports IPv6 addresses (in ECS mode - due `IPTABLES` pattern) 62 | * `timestamp` fields will be captured for `SHOREWALL` and `SFW2` in legacy mode as well 63 | * `SHOREWALL` became less strict in containing the `kernel:` sub-string 64 | * `NETSCREENSESSIONLOG` properly handles optional `session_id=... reason=...` suffix 65 | * `interval` and `xlate_type` (legacy) CISCO fields are not captured in ECS mode 66 | 67 | - **core** (grok-patterns) 68 | * `SYSLOGFACILITY` type casts facility code and priority in ECS mode 69 | * `SYSLOGTIMESTAMP` will be captured (from `SYSLOGBASE`) as `timestamp` 70 | * Fix: e-mail address's local part to match according to RFC (#273) 71 | 72 | - **haproxy** 73 | * several ECS-ified fields will be type-casted to integer in ECS mode e.g. *haproxy.bytes_read* 74 | * fields containing null value (`-`) are no longer captured 75 | (e.g. in legacy mode `captured_request_cookie` gets captured even if `"-"`) 76 | 77 | - **httpd** 78 | * optional fields (e.g. `http.request.referrer` or `user_agent`) are only captured when not null (`-`) 79 | * `source.port` (`clientport` in legacy mode) is considered optional 80 | * dropped raw data (`rawrequest` legacy field) in ECS mode 81 | * Fix: HTTPD_ERRORLOG should match when module missing (#299) 82 | 83 | - **java** 84 | * `JAVASTACKTRACEPART`'s matched line number will be converted to an integer 85 | * `CATALINALOG` matching was updated to handle Tomcat 7/8/9 logging format 86 | * `TOMCATLOG` handles the default Tomcat 7/8/9 logging format 87 | * old (custom) legacy TOMCAT format is handled by the added `TOMCATLEGACY_LOG` 88 | * `TOMCATLOG` and `TOMCAT_DATESTAMP` still match the legacy format, 89 | however this might change at a later point - if you rely on the old format use `TOMCATLEGACY_` patterns 90 | 91 | - **junos** 92 | * integer fields (e.g. `juniper.srx.elapsed_time`) are captured as integer values 93 | 94 | - **linux-syslog** 95 | * `SYSLOG5424LINE` captures (overwrites) the `message` field instead of using a custom field name 96 | * regardless of the format used, in ECS mode, timestamps are always captured as `timestamp` 97 | * fields such as `log.syslog.facility.code` and `process.pid` are converted to integers 98 | 99 | - **mcollective** 100 | * *mcollective-patterns* file was removed, it's all one *mcollective* in ECS mode 101 | * `MCOLLECTIVE`'s `process.pid` (`pid` previously) is not type-casted to an integer 102 | 103 | - **nagios** 104 | * numeric fields such as `nagios.log.attempt` are converted to integer values in ECS mode 105 | 106 | - **rails** 107 | * request duration times from `RAILS3` log will be converted to floating point values 108 | 109 | - **squid** 110 | * `SQUID3`'s `duration` http.response `status_code` and `bytes` are type-casted to int 111 | * `SQUID3` pattern won't capture null ('-') `user.name` or `squid.response.content_type` 112 | * Fix: allow to parse SQUID log with status 0 (#298) 113 | * Fix: handle optional server address (#298) 114 | 115 | ## 4.2.0 116 | - Fix: Java stack trace's JAVAFILE to better match generated names 117 | - Fix: match Information/INFORMATION in LOGLEVEL [#274](https://github.com/logstash-plugins/logstash-patterns-core/pull/274) 118 | - Fix: NAGIOS TIMEPERIOD unknown (from/to) field matching [#275](https://github.com/logstash-plugins/logstash-patterns-core/pull/275) 119 | - Fix: HTTPD access log parse failure on missing response [#282](https://github.com/logstash-plugins/logstash-patterns-core/pull/282) 120 | - Fix: UNIXPATH to avoid DoS on long paths with unmatching chars [#292](https://github.com/logstash-plugins/logstash-patterns-core/pull/292) 121 | 122 | For longer paths, a non matching character towards the end of the path would cause the RegExp engine a long time to abort. 123 | With this change we're also explicit about not supporting relative paths (using the `PATH` pattern), these won't be properly matched. 124 | 125 | - Feat: allow UNIXPATH to match non-ascii chars [#291](https://github.com/logstash-plugins/logstash-patterns-core/pull/291) 126 | 127 | ## 4.1.2 128 | - Fix some documentation issues 129 | 130 | ## 4.1.0 131 | - Added SYSLOG5424LINE and test ipv4/ipv6/hostname as syslog5424_host rfc5424 132 | - Accordig to rcf5424 IP address should be accepted 133 | - HTTPDATE is used by patterns/aws 134 | - HTTPD (formerly APACHE) deserves its own pattern and test files. See #45 135 | - httpd: sync names between httpd20 and httpd24 136 | - Adding maven version to the list of default Grok patterns 137 | - Added Redis Monitor Log format 138 | - Remove extra space in ASA-6-106015 rule 139 | - fix COMMONAPACHELOG specs 140 | - Added SuSEfirewall2 pattern 141 | - switch USER to HTTPDUSER for "auth" field (match email addresses) 142 | - bind9 pattern 143 | - Pattern for squid3 native format 144 | - Parse Cisco ASA-5-304001 145 | - use underscores instead of hyphens in field names 146 | - fix timestamp expect 147 | - fix cs_protocol pattern name 148 | - fix cs_protocol and cs_uri_query names 149 | - added cloudfront spec test 150 | - add pattern for cloudfront access log 151 | - Java Patterns: JAVASTACKTRACEPART was duplicate 152 | 153 | ## 4.0.2 154 | - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99 155 | 156 | ## 4.0.1 157 | - Republish all the gems under jruby. 158 | 159 | ## 4.0.0 160 | - Update the plugin to the version 2.0 of the plugin api, this change is required for Logstash 5.0 compatibility. See https://github.com/elastic/logstash/issues/5141 161 | 162 | ## 2.0.5 163 | - Specs fixes, see https://github.com/logstash-plugins/logstash-patterns-core/pull/137 164 | 165 | ## 2.0.4 166 | - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash 167 | 168 | ## 2.0.3 169 | - New dependency requirements for logstash-core for the 5.0 release 170 | 171 | ## 2.0.0 172 | - Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully, 173 | instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895 174 | - Dependency on logstash-core update to 2.0 175 | 176 | ## 0.4.0 177 | - Added grok patterns for nagios notifications 178 | - Added commong exim patterns 179 | - Allow optional space between sysloghost and colon, fixes https://github.com/elastic/logstash/issues/2101 for Cisco ASA devises. 180 | - Make progname optional (not always provided) for the syslog base patern. 181 | - Improve pattern matching performance for IPV4 patterns. 182 | - Fixes: UNIXPATH pattern does not combine well with comma delimination, https://github.com/logstash-plugins/logstash-patterns-core/issues/13 183 | - Add new valid characters for URI's in HTML5 patterns. 184 | - Make IPORHOST pattern match first an IP and then a HOST as the name 185 | implies. 186 | - Added patterns for ASA-4-106100, ASA-4-106102, ASA-4-106103 CISCO 187 | firewalls. 188 | - Update CISCOFW106023 rule to match values from FWSM 189 | - Add basic apache httpd error log format 190 | - Support TIMESTAMP_ISO8601 in HAProxy patterns, useful for rsyslog and other systems that can be configured to use this format. Fixes https://github.com/logstash-plugins/logstash-patterns-core/pull/80 191 | 192 | ## 0.3.0 193 | - Updated the AWS S3 patterns 194 | - Added patterns for rails 3 195 | - Added patterns for haproxy 196 | - Added patterns for bro http.log 197 | - Added shorewall patterns 198 | ## 0.2.0 199 | - Added patterns for S3 and ELB access logs amazon services 200 | ## 0.1.12 201 | - add some missing Cisco ASA firewall system log patterns 202 | - fix cisco firewall policy_id regex for policies with '-' in the name 203 | ## 0.1.11 204 | - Added Catalina and Tomcat patterns 205 | - Added German month names 206 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2020 Elastic and contributors 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /spec/patterns/bacula_spec.rb: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | require "spec_helper" 3 | require "logstash/patterns/core" 4 | 5 | describe_pattern "BACULA_LOG_MAX_CAPACITY", ['legacy', 'ecs-v1'] do 6 | 7 | let(:message) do 8 | 'User defined maximum volume capacity 108,372,182,400 exceeded on device "FStorage" (/var/lib/bac/storage).' 9 | end 10 | 11 | it 'matches' do 12 | if ecs_compatibility? 13 | should include "bacula"=>{"volume"=>{"max_capacity"=>"108,372,182,400", "device"=>"FStorage", "path"=>"/var/lib/bac/storage"}} 14 | else 15 | should include("device"=>"FStorage") 16 | end 17 | end 18 | 19 | end 20 | 21 | describe_pattern "BACULA_LOG_END_VOLUME", ['legacy', 'ecs-v1'] do 22 | 23 | let(:message) do 24 | 'End of medium on Volume "TestShortZN0014" Bytes=5,228,777 Blocks=82 at 21-Dec-2016 12:30.' 25 | end 26 | 27 | it 'matches' do 28 | if ecs_compatibility? 29 | should include "bacula"=>hash_including("volume"=>{"name"=>"TestShortZN0014", "bytes"=>"5,228,777", "blocks"=>"82"}) 30 | # bacula.timestamp is 'duplicate' information when the full BACULA_LOGLINE is matched 31 | # we're keeping it as it includes year and might be slightly off the matched timestamp 32 | should include "bacula"=>hash_including("timestamp"=>"21-Dec-2016 12:30") 33 | else 34 | should include("volume"=>"TestShortZN0014") 35 | end 36 | end 37 | 38 | end 39 | 40 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NEW_VOLUME 41 | 42 | let(:message) do 43 | '09-Jan 19:54 bacula-host JobId 265896: Created new Volume "FullAuto-8812" in catalog.' 44 | # NOTE: we do not match full message log format that look like: 45 | # 'Created new Volume="FullAuto-8812", Pool="FullFile", MediaType="FullFile" in catalog.' 46 | end 47 | 48 | it 'matches' do 49 | should include (ecs_compatibility? ? "timestamp" : "bts") => '09-Jan 19:54' 50 | if ecs_compatibility? 51 | should include "bacula"=>{"volume"=>{"name"=>"FullAuto-8812"}, "job"=>{"id"=>"265896"}} 52 | should include "host" => {"hostname"=>"bacula-host"} 53 | else 54 | should include("volume"=>"FullAuto-8812") 55 | end 56 | end 57 | 58 | end 59 | 60 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NEW_LABEL 61 | 62 | let(:message) do 63 | '25-Aug 10:50 bacula-sd JobId 24: Labeled new Volume "Vol-0018" on device "FileChgr1-Dev1" (/opt/bacula/disk).' 64 | end 65 | 66 | it 'matches' do 67 | should include (ecs_compatibility? ? "timestamp" : "bts") => '25-Aug 10:50' 68 | if ecs_compatibility? 69 | should include "bacula"=>hash_including("volume"=>{"name"=>"Vol-0018", "device"=>"FileChgr1-Dev1", "path"=>"/opt/bacula/disk"}) 70 | should include "bacula"=>hash_including("job"=>{"id"=>"24"}) 71 | should include "host" => {"hostname"=>"bacula-sd"} 72 | else 73 | should include("volume"=>"Vol-0018", "device" => "FileChgr1-Dev1") 74 | end 75 | end 76 | 77 | end 78 | 79 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_WROTE_LABEL 80 | 81 | let(:message) do 82 | '25-Aug 10:50 bacula-sd JobId 24: Wrote label to prelabeled Volume "Volume01" on device "Device01" (/dev/nst0)' 83 | end 84 | 85 | it 'matches' do 86 | should include (ecs_compatibility? ? "timestamp" : "bts") => '25-Aug 10:50' 87 | if ecs_compatibility? 88 | should include "bacula"=>hash_including("volume"=>{"name"=>"Volume01", "device"=>"Device01", "path"=>"/dev/nst0"}) 89 | else 90 | should include("jobid"=>"24") 91 | end 92 | end 93 | 94 | end 95 | 96 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NEW_MOUNT 97 | 98 | let(:message) do 99 | '24-Aug 01:54 crey-sd JobId 215534: New volume "DiffAuto-4861" mounted on device "vDrive-1" (/usr/local/bac/volumes) at 24-Aug-2015 01:54.' 100 | end 101 | 102 | it 'matches' do 103 | should include (ecs_compatibility? ? "timestamp" : "bts") => '24-Aug 01:54' 104 | if ecs_compatibility? 105 | should include "bacula"=>hash_including("volume"=>{"name"=>"DiffAuto-4861", "device"=>"vDrive-1", "path"=>"/usr/local/bac/volumes"}) 106 | else 107 | should include("device"=>"vDrive-1", "volume"=>"DiffAuto-4861", "hostname"=>"crey-sd", "jobid"=>"215534") 108 | end 109 | end 110 | 111 | end 112 | 113 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NOOPENDIR 114 | 115 | let(:message) do 116 | '24-Feb 16:36 starfury-fd JobId 3: Could not open directory "/root": ERR=Permission denied' 117 | end 118 | 119 | it 'matches' do 120 | should include (ecs_compatibility? ? "timestamp" : "bts") => '24-Feb 16:36' 121 | if ecs_compatibility? 122 | should include "file"=>{"path"=>"/root"} 123 | should include "error"=>{"message"=>"Permission denied"} 124 | else 125 | should include("berror"=>"Permission denied") 126 | end 127 | end 128 | 129 | end 130 | 131 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NOSTAT 132 | 133 | let(:message) do 134 | '15-Dec 17:50 u22.com JobId 13: Could not stat /var/lib/bacula/bacula.sql: ERR=No such file or directory' 135 | end 136 | 137 | it 'matches' do 138 | if ecs_compatibility? 139 | should include "timestamp" => '15-Dec 17:50' 140 | should include "file"=>{"path"=>"/var/lib/bacula/bacula.sql"} 141 | should include "error"=>{"message"=>"No such file or directory"} 142 | else 143 | # NOTE: not matching due BACULA_HOST 144 | # should include "bts" => '15-Dec 17:50' 145 | # should include "berror"=>"No such file or directory" 146 | end 147 | end 148 | 149 | end 150 | 151 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_ALL_RECORDS_PRUNED 152 | 153 | let(:message) do 154 | '12-Apr 14:23 VU0EM005: All records pruned from Volume "06D125L3"; marking it "Purged"' 155 | end 156 | 157 | it 'matches' do 158 | should include (ecs_compatibility? ? "timestamp" : "bts") => '12-Apr 14:23' 159 | if ecs_compatibility? 160 | should include "bacula"=>{"volume"=>{"name"=>"06D125L3"}}, 161 | "host"=>{"hostname"=>"VU0EM005"} 162 | else 163 | should include "hostname"=>"VU0EM005", "volume"=>"06D125L3" 164 | end 165 | end 166 | 167 | end 168 | 169 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_PRUNED_JOBS 170 | 171 | let(:message) do 172 | '29-Jan 04:16 lbu02-dir: Pruned 24 Jobs for client uni-horn from catalog.' 173 | end 174 | 175 | it 'matches' do 176 | should include (ecs_compatibility? ? "timestamp" : "bts") => '29-Jan 04:16' 177 | if ecs_compatibility? 178 | should include "bacula"=>{"client"=>{"name"=>"uni-horn"}}, "host"=>{"hostname"=>"lbu02-dir"} 179 | else 180 | should include "hostname"=>"lbu02-dir", "client"=>"uni-horn" 181 | end 182 | end 183 | 184 | end 185 | 186 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_STARTJOB 187 | 188 | let(:message) do 189 | '06-Mar 20:00 srvbkp-dir JobId 1075: Start Backup JobId 1075, Job=srv1-bind.2018-03-06_20.00.01_05' 190 | end 191 | 192 | it 'matches' do 193 | should include (ecs_compatibility? ? "timestamp" : "bts") => '06-Mar 20:00' 194 | if ecs_compatibility? 195 | should include "bacula"=>{"job"=>{"name"=>"srv1-bind.2018-03-06_20.00.01_05", "id"=>"1075"}} 196 | else 197 | should include "job"=>"srv1-bind.2018-03-06_20.00.01_05", "jobid"=>"1075" 198 | end 199 | end 200 | 201 | end 202 | 203 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_DIFF_FS 204 | 205 | let(:message) do 206 | '01-Feb 00:34 ohms-fd JobId 1662: /var/spool/bareos is a different filesystem. Will not descend from /var into it.' 207 | end 208 | 209 | it 'matches' do 210 | should include (ecs_compatibility? ? "timestamp" : "bts") => '01-Feb 00:34' 211 | end 212 | 213 | end 214 | 215 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_JOBEND 216 | 217 | let(:message) do 218 | '28-Aug 21:55 bacula-sd JobId 16: Job write elapsed time = 00:00:01, Transfer rate = 0 Bytes/second' 219 | end 220 | 221 | it 'matches' do 222 | should include (ecs_compatibility? ? "timestamp" : "bts") => '28-Aug 21:55' 223 | if ecs_compatibility? 224 | should include "bacula"=>{"job"=>{"elapsed_time"=>"00:00:01", "id"=>"16"}} 225 | else 226 | should include "jobid"=>"16", "elapsed" => "00:00:01" 227 | end 228 | end 229 | 230 | end 231 | 232 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_VOLUME_PREVWRITTEN 233 | 234 | let(:message) do 235 | '17-Jan-2003 16:45 home-sd: Volume test01 previously written, moving to end of data.' 236 | end 237 | 238 | it 'matches' do 239 | if ecs_compatibility? 240 | should include "timestamp" => '17-Jan-2003 16:45' 241 | should include "bacula"=>{"volume"=>{"name"=>"test01"}} 242 | else 243 | # fails to match (due timestamp format) 244 | end 245 | end 246 | 247 | end 248 | 249 | describe_pattern "BACULA_LOG_READYAPPEND", ['legacy', 'ecs-v1'] do 250 | 251 | let(:message) do 252 | 'Ready to append to end of Volume "F-0032" size=97835302' 253 | end 254 | 255 | it 'matches' do 256 | if ecs_compatibility? 257 | should include "bacula"=>{"volume"=>{"name"=>"F-0032", "size"=>97835302}} 258 | else 259 | should include "volume"=>"F-0032" 260 | end 261 | end 262 | 263 | end 264 | 265 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_CLIENT_RBJ 266 | 267 | let(:message) do 268 | '01-Aug 13:30 toe-fd JobId 686: shell command: run ClientRunBeforeJob "/etc/bacula/cbe_hanfs.sh /mnt/baxter/fs1"' 269 | end 270 | 271 | it 'matches' do 272 | if ecs_compatibility? 273 | should include "bacula"=>{"job"=>{"id"=>"686", "client_run_before_command"=>'/etc/bacula/cbe_hanfs.sh /mnt/baxter/fs1'}} 274 | else 275 | should include "jobid"=>"686", "runjob"=>"/etc/bacula/cbe_hanfs.sh /mnt/baxter/fs1" 276 | end 277 | end 278 | 279 | end 280 | 281 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_FATAL_CONN 282 | 283 | let(:message) do 284 | '11-Nov 13:28 bacula-dir JobId 11: Fatal error: bsock.c:133 Unable to connect to Client: dc0-fd on dc0.teamworld.com:9102. ERR=Connection refused' 285 | end 286 | 287 | it 'matches' do 288 | if ecs_compatibility? 289 | should include "client"=>{"address"=>"dc0.teamworld.com", "port"=>9102}, 290 | "bacula"=>hash_including("client"=>{"name"=>"dc0-fd"}), 291 | "error"=>{"message"=>"Connection refused"} 292 | else 293 | should include "client"=>"dc0-fd", "berror"=>"Connection refused" 294 | end 295 | end 296 | 297 | end 298 | 299 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_NO_AUTH 300 | 301 | let(:message) do 302 | '16-May 11:59 samy-dir JobId 0: Fatal error: Unable to authenticate with File daemon at "cardam.home.domain:9102". Possible causes:' 303 | end 304 | 305 | it 'matches' do 306 | if ecs_compatibility? 307 | # NOTE: due a grok bug port:int type-casting does not work : 308 | #should include "client"=>{"address"=>"cardam.home.domain", "port"=>9102} 309 | expect( subject['client'] ).to be_a Hash 310 | expect( subject['client']['address'] ).to eql 'cardam.home.domain' 311 | expect( subject['client']['port'].to_i ).to eql 9102 312 | else 313 | # does not match due client address:port 314 | end 315 | end 316 | 317 | end 318 | 319 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_CANCELLING 320 | 321 | let(:message) do 322 | '03-Aug 06:20 DIRECTOR JobId 316677: Cancelling duplicate JobId=316646.' 323 | end 324 | 325 | it 'matches' do 326 | if ecs_compatibility? 327 | expect( subject ).to include "bacula" => hash_including("job" => {'id' => '316677', 'other_id' => '316646'}) 328 | else 329 | expect( subject ).to include "jobid" => "316677" 330 | end 331 | end 332 | 333 | end 334 | 335 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_MARKCANCEL 336 | 337 | let(:message) do 338 | '09-Aug 15:14 InternetServer-sd JobId 122971, Job nyi_maildir.2013-03-03_22.00.00_51 marked to be canceled.' 339 | end 340 | 341 | it 'matches' do 342 | if ecs_compatibility? 343 | expect( subject ).to include "bacula" => hash_including( 344 | "job" => {'id' => '122971', 'name' => 'nyi_maildir.2013-03-03_22.00.00_51'}) 345 | else 346 | expect( subject ).to include "job" => "nyi_maildir.2013-03-03_22.00.00_51" 347 | end 348 | end 349 | 350 | end 351 | 352 | 353 | describe_pattern "BACULA_LOGLINE", ['legacy', 'ecs-v1'] do # BACULA_LOG_FATAL_CONN 354 | 355 | let(:message) do 356 | '25-Aug 09:02 marlin2-dir JobId 10783: Fatal Error: JobId 10782 already running. Duplicate job not allowed.' 357 | end 358 | 359 | it 'matches' do 360 | if ecs_compatibility? 361 | expect( subject ).to include "bacula" => hash_including("job" => {'id' => '10783', 'other_id' => '10782'}) 362 | else 363 | # NOTE: not matching due expecting 'error' instead of 'Error' in "Fatal Error: JobId ..." 364 | end 365 | end 366 | 367 | end 368 | --------------------------------------------------------------------------------