├── .github
├── dependabot.yml
└── workflows
│ └── unit-test.yml
├── .gitmodules
├── Gemfile
├── LICENSE
├── README.md
├── Rakefile
├── fluent-plugin-grok-parser.gemspec
├── lib
└── fluent
│ └── plugin
│ ├── grok.rb
│ ├── parser_grok.rb
│ └── parser_multiline_grok.rb
├── patterns
├── ecs-v1
│ ├── aws
│ ├── bacula
│ ├── bind
│ ├── bro
│ ├── exim
│ ├── firewalls
│ ├── grok-patterns
│ ├── haproxy
│ ├── httpd
│ ├── java
│ ├── junos
│ ├── linux-syslog
│ ├── maven
│ ├── mcollective
│ ├── mongodb
│ ├── nagios
│ ├── postgresql
│ ├── rails
│ ├── redis
│ ├── ruby
│ ├── squid
│ └── zeek
└── legacy
│ ├── aws
│ ├── bacula
│ ├── bind
│ ├── bro
│ ├── exim
│ ├── firewalls
│ ├── grok-patterns
│ ├── haproxy
│ ├── httpd
│ ├── java
│ ├── junos
│ ├── linux-syslog
│ ├── maven
│ ├── mcollective
│ ├── mcollective-patterns
│ ├── mongodb
│ ├── nagios
│ ├── postgresql
│ ├── rails
│ ├── redis
│ ├── ruby
│ └── squid
└── test
├── fixtures
└── my_pattern
├── helper.rb
├── test_grok_parser.rb
├── test_grok_parser_in_tcp.rb
└── test_multiline_grok_parser.rb
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: 'github-actions'
4 | directory: '/'
5 | schedule:
6 | interval: 'weekly'
7 |
--------------------------------------------------------------------------------
/.github/workflows/unit-test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 | on:
3 | push:
4 | branches: [master]
5 | pull_request:
6 | branches: [master]
7 | jobs:
8 | build:
9 | runs-on: ${{ matrix.os }}
10 | strategy:
11 | fail-fast: false
12 | matrix:
13 | ruby: [ '3.2', '3.1', '3.0', '2.7' ]
14 | os: [ 'ubuntu-latest', 'macOS-latest', 'windows-latest' ]
15 | # It should be removed after RubyInstaller 3.2 is available on setup-ruby
16 | exclude:
17 | - os: windows-latest
18 | ruby: 3.2
19 | name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
20 | steps:
21 | - uses: actions/checkout@v4
22 | - uses: ruby/setup-ruby@v1
23 | with:
24 | ruby-version: ${{ matrix.ruby }}
25 | - name: unit testing
26 | env:
27 | CI: true
28 | run: |
29 | gem install bundler rake
30 | bundle install --jobs 4 --retry 3
31 | bundle exec rake test
32 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "vendor/logstash-patterns-core"]
2 | path = vendor/logstash-patterns-core
3 | url = https://github.com/logstash-plugins/logstash-patterns-core.git
4 |
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | source "https://rubygems.org"
2 |
3 | gemspec
4 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2014 Kiyoto Tamura
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
15 | For Grok's Ruby implementation found in parser_grok.rb
16 |
17 | Copyright 2009-2013 Jordan Sissel
18 |
19 | Licensed under the Apache License, Version 2.0 (the "License");
20 | you may not use this file except in compliance with the License.
21 | You may obtain a copy of the License at
22 |
23 | http://www.apache.org/licenses/LICENSE-2.0
24 |
25 | Unless required by applicable law or agreed to in writing, software
26 | distributed under the License is distributed on an "AS IS" BASIS,
27 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
28 | See the License for the specific language governing permissions and
29 | limitations under the License.
30 |
31 | For Grok patterns (under patterns/):
32 |
33 | Copyright 2009-2013 Jordan Sissel, Pete Fritchman, and contributors.
34 | Licensed under the Apache License, Version 2.0 (the "License");
35 | you may not use this file except in compliance with the License.
36 | You may obtain a copy of the License at
37 |
38 | http://www.apache.org/licenses/LICENSE-2.0
39 |
40 | Unless required by applicable law or agreed to in writing, software
41 | distributed under the License is distributed on an "AS IS" BASIS,
42 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
43 | See the License for the specific language governing permissions and
44 | limitations under the License.
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Grok Parser for Fluentd
2 |
3 | 
4 | 
5 |
6 | This is a Fluentd plugin to enable Logstash's Grok-like parsing logic.
7 |
8 | ## Requirements
9 |
10 | | fluent-plugin-grok-parser | fluentd | ruby |
11 | |---------------------------|------------|--------|
12 | | >= 2.0.0 | >= v0.14.0 | >= 2.1 |
13 | | < 2.0.0 | >= v0.12.0 | >= 1.9 |
14 |
15 |
16 | ## What's Grok?
17 |
18 | Grok is a macro to simplify and reuse regexes, originally developed by [Jordan Sissel](http://github.com/jordansissel).
19 |
20 | This is a partial implementation of Grok's grammer that should meet most of the needs.
21 |
22 | ## How It Works
23 |
24 | You can use it wherever you used the `format` parameter to parse texts. In the following example, it
25 | extracts the first IP address that matches in the log.
26 |
27 | ```aconf
28 |
29 | @type tail
30 | path /path/to/log
31 | tag grokked_log
32 |
33 | @type grok
34 | grok_pattern %{IP:ip_address}
35 |
36 |
37 | ```
38 |
39 | **If you want to try multiple grok patterns and use the first matched one**, you can use the following syntax:
40 |
41 | ```aconf
42 |
43 | @type tail
44 | path /path/to/log
45 | tag grokked_log
46 |
47 | @type grok
48 |
49 | pattern %{HTTPD_COMBINEDLOG}
50 | time_format "%d/%b/%Y:%H:%M:%S %z"
51 |
52 |
53 | pattern %{IP:ip_address}
54 |
55 |
56 | pattern %{GREEDYDATA:message}
57 |
58 |
59 |
60 | ```
61 |
62 | ### Multiline support
63 |
64 | You can parse multiple line text.
65 |
66 | ```aconf
67 |
68 | @type tail
69 | path /path/to/log
70 | tag grokked_log
71 |
72 | @type multiline_grok
73 | grok_pattern %{IP:ip_address}%{GREEDYDATA:message}
74 | multiline_start_regexp /^[^\s]/
75 |
76 |
77 | ```
78 |
79 | You can use multiple grok patterns to parse your data.
80 |
81 | ```aconf
82 |
83 | @type tail
84 | path /path/to/log
85 | tag grokked_log
86 |
87 | @type multiline_grok
88 |
89 | pattern Started %{WORD:verb} "%{URIPATH:pathinfo}" for %{IP:ip} at %{TIMESTAMP_ISO8601:timestamp}\nProcessing by %{WORD:controller}#%{WORD:action} as %{WORD:format}%{DATA:message}Completed %{NUMBER:response} %{WORD} in %{NUMBER:elapsed} (%{DATA:elapsed_details})
90 |
91 |
92 |
93 | ```
94 |
95 | Fluentd accumulates data in the buffer forever to parse complete data when no pattern matches.
96 |
97 | You can use this parser without `multiline_start_regexp` when you know your data structure perfectly.
98 |
99 | ## Configurations
100 |
101 | * See also: [Config: Parse Section - Fluentd](https://docs.fluentd.org/configuration/parse-section)
102 |
103 | * **time_format** (string) (optional): The format of the time field.
104 | * **grok_pattern** (string) (optional): The pattern of grok. You cannot specify multiple grok pattern with this.
105 | * **custom_pattern_path** (string) (optional): Path to the file that includes custom grok patterns
106 | * **grok_failure_key** (string) (optional): The key has grok failure reason.
107 | * **grok_name_key** (string) (optional): The key name to store grok section's name
108 | * **multi_line_start_regexp** (string) (optional): The regexp to match beginning of multiline. This is only for "multiline_grok".
109 | * **grok_pattern_series** (enum) (optional): Specify grok pattern series set.
110 | * Default value: `legacy`.
111 |
112 | ### \ section (optional) (multiple)
113 |
114 | * **name** (string) (optional): The name of this grok section
115 | * **pattern** (string) (required): The pattern of grok
116 | * **keep_time_key** (bool) (optional): If true, keep time field in the record.
117 | * **time_key** (string) (optional): Specify time field for event time. If the event doesn't have this field, current time is used.
118 | * Default value: `time`.
119 | * **time_format** (string) (optional): Process value using specified format. This is available only when time_type is string
120 | * **timezone** (string) (optional): Use specified timezone. one can parse/format the time value in the specified timezone.
121 |
122 |
123 | ## Examples
124 |
125 | ### Using grok\_failure\_key
126 |
127 | ```aconf
128 |
129 | @type dummy
130 | @label @dummy
131 | dummy [
132 | { "message1": "no grok pattern matched!", "prog": "foo" },
133 | { "message1": "/", "prog": "bar" }
134 | ]
135 | tag dummy.log
136 |
137 |
138 |
156 | ```
157 |
158 | This generates following events:
159 |
160 | ```
161 | 2016-11-28 13:07:08.009131727 +0900 dummy.log: {"message1":"no grok pattern matched!","prog":"foo","message":"no grok pattern matched!","grokfailure":"No grok pattern matched"}
162 | 2016-11-28 13:07:09.010400923 +0900 dummy.log: {"message1":"/","prog":"bar","path":"/"}
163 | ```
164 |
165 | ### Using grok\_name\_key
166 |
167 | ```aconf
168 |
169 | @type tail
170 | path /path/to/log
171 | tag grokked_log
172 |
173 | @type grok
174 | grok_name_key grok_name
175 | grok_failure_key grokfailure
176 |
177 | name apache_log
178 | pattern %{HTTPD_COMBINEDLOG}
179 | time_format "%d/%b/%Y:%H:%M:%S %z"
180 |
181 |
182 | name ip_address
183 | pattern %{IP:ip_address}
184 |
185 |
186 | name rest_message
187 | pattern %{GREEDYDATA:message}
188 |
189 |
190 |
191 | ```
192 |
193 | This will add keys like following:
194 |
195 | * Add `grok_name: "apache_log"` if the record matches `HTTPD_COMBINEDLOG`
196 | * Add `grok_name: "ip_address"` if the record matches `IP`
197 | * Add `grok_name: "rest_message"` if the record matches `GREEDYDATA`
198 |
199 | Add `grokfailure` key to the record if the record does not match any grok pattern.
200 | See also test code for more details.
201 |
202 | ## How to parse time value using specific timezone
203 |
204 | ```aconf
205 |
206 | @type tail
207 | path /path/to/log
208 | tag grokked_log
209 |
210 | @type grok
211 |
212 | name mylog-without-timezone
213 | pattern %{DATESTAMP:time} %{GREEDYDATE:message}
214 | timezone Asia/Tokyo
215 |
216 |
217 |
218 | ```
219 |
220 | This will parse the `time` value as "Asia/Tokyo" timezone.
221 |
222 | See [Config: Parse Section - Fluentd](https://docs.fluentd.org/configuration/parse-section) for more details about timezone.
223 |
224 | ## How to write Grok patterns
225 |
226 | Grok patterns look like `%{PATTERN_NAME:name}` where ":name" is optional. If "name" is provided, then it
227 | becomes a named capture. So, for example, if you have the grok pattern
228 |
229 | ```
230 | %{IP} %{HOST:host}
231 | ```
232 |
233 | it matches
234 |
235 | ```
236 | 127.0.0.1 foo.example
237 | ```
238 |
239 | but only extracts "foo.example" as {"host": "foo.example"}
240 |
241 | Please see `patterns/*` for the patterns that are supported out of the box.
242 |
243 | ## How to add your own Grok pattern
244 |
245 | You can add your own Grok patterns by creating your own Grok file and telling the plugin to read it.
246 | This is what the `custom_pattern_path` parameter is for.
247 |
248 | ```aconf
249 |
250 | @type tail
251 | path /path/to/log
252 |
253 | @type grok
254 | grok_pattern %{MY_SUPER_PATTERN}
255 | custom_pattern_path /path/to/my_pattern
256 |
257 |
258 | ```
259 |
260 | `custom_pattern_path` can be either a directory or file. If it's a directory, it reads all the files in it.
261 |
262 | ## FAQs
263 |
264 | ### 1. How can I convert types of the matched patterns like Logstash's Grok?
265 |
266 | Although every parsed field has type `string` by default, you can specify other types. This is useful when filtering particular fields numerically or storing data with sensible type information.
267 |
268 | The syntax is
269 |
270 | ```
271 | grok_pattern %{GROK_PATTERN:NAME:TYPE}...
272 | ```
273 |
274 | e.g.,
275 |
276 | ```
277 | grok_pattern %{INT:foo:integer}
278 | ```
279 |
280 | Unspecified fields are parsed at the default string type.
281 |
282 | The list of supported types are shown below:
283 |
284 | * `string`
285 | * `bool`
286 | * `integer` ("int" would NOT work!)
287 | * `float`
288 | * `time`
289 | * `array`
290 |
291 | For the `time` and `array` types, there is an optional 4th field after the type name. For the "time" type, you can specify a time format like you would in `time_format`.
292 |
293 | For the "array" type, the third field specifies the delimiter (the default is ","). For example, if a field called "item\_ids" contains the value "3,4,5", `types item_ids:array` parses it as ["3", "4", "5"]. Alternatively, if the value is "Adam|Alice|Bob", `types item_ids:array:|` parses it as ["Adam", "Alice", "Bob"].
294 |
295 | Here is a sample config using the Grok parser with `in_tail` and the `types` parameter:
296 |
297 | ```aconf
298 |
299 | @type tail
300 | path /path/to/log
301 | format grok
302 | grok_pattern %{INT:user_id:integer} paid %{NUMBER:paid_amount:float}
303 | tag payment
304 |
305 | ```
306 |
307 | ## Notice
308 |
309 | If you want to use this plugin with Fluentd v0.12.x or earlier, you can use this plugin version v1.x.
310 |
311 | See also: [Plugin Management | Fluentd](https://docs.fluentd.org/deployment/plugin-management)
312 |
313 | ## License
314 |
315 | Apache 2.0 License
316 |
--------------------------------------------------------------------------------
/Rakefile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env rake
2 | require "bundler/gem_tasks"
3 | require "rake/testtask"
4 | require "rake/clean"
5 |
6 | task :test => [:base_test]
7 |
8 | desc "Run test_unit based test"
9 | Rake::TestTask.new(:base_test) do |t|
10 | t.libs << "test"
11 | t.test_files = (Dir["test/test_*.rb"] + Dir["test/plugin/test_*.rb"] - ["helper.rb"]).sort
12 | t.verbose = true
13 | # t.warning = false
14 | end
15 |
16 | desc "Import patterns from submodules"
17 | task "patterns:import" do
18 | ["legacy", "ecs-v1"].each do |series|
19 | `git submodule --quiet foreach pwd`.split($\).each do |submodule_path|
20 | Dir.glob(File.join(submodule_path, "patterns/#{series}/*")) do |pattern|
21 | cp(pattern, "patterns/#{series}", verbose: true)
22 | end
23 | end
24 | end
25 |
26 | # copied from "./lib/fluent/plugin/grok"
27 | pattern_re =
28 | /%\{ # match '%{' not prefixed with '\'
29 | (? # match the pattern name
30 | (?[A-z0-9]+)
31 | (?::(?[@\[\]A-z0-9_:.-]+?)
32 | (?::(?(?:string|bool|integer|float|int|
33 | time(?::.+)?|
34 | array(?::.)?)))?)?
35 | )
36 | \}/x
37 | ["legacy", "ecs-v1"].each do |series|
38 | Dir.glob("patterns/#{series}/*") do |pattern_file|
39 | new_lines = ""
40 | File.readlines(pattern_file).each do |line|
41 | case
42 | when line.strip.empty?
43 | new_lines << line
44 | when line.start_with?("#")
45 | new_lines << line
46 | else
47 | name, pattern = line.split(/\s+/, 2)
48 | new_pattern = pattern.gsub(pattern_re) do |m|
49 | matched = $~
50 | if matched[:type] == "int"
51 | "%{#{matched[:pattern]}:#{matched[:subname]}:integer}"
52 | else
53 | m
54 | end
55 | end
56 | new_lines << "#{name} #{new_pattern}"
57 | end
58 | end
59 | File.write(pattern_file, new_lines)
60 | end
61 | end
62 | end
63 |
64 | task :default => [:test, :build]
65 |
--------------------------------------------------------------------------------
/fluent-plugin-grok-parser.gemspec:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | lib = File.expand_path("../lib", __FILE__)
3 | $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4 |
5 | Gem::Specification.new do |spec|
6 | spec.name = "fluent-plugin-grok-parser"
7 | spec.version = "2.6.2"
8 | spec.authors = ["kiyoto", "Kenji Okimoto"]
9 | spec.email = ["kiyoto@treasure-data.com", "okimoto@clear-code.com"]
10 | spec.summary = %q{Fluentd plugin to support Logstash-inspired Grok format for parsing logs}
11 | spec.homepage = "https://github.com/fluent/fluent-plugin-grok-parser"
12 | spec.license = "Apache-2.0"
13 |
14 | spec.files = `git ls-files`.split($/)
15 | spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
16 | spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
17 | spec.require_paths = ["lib"]
18 |
19 | spec.add_development_dependency "bundler"
20 | spec.add_development_dependency "rake"
21 | spec.add_development_dependency "test-unit", ">=3.1.5"
22 | spec.add_runtime_dependency "fluentd", ">=0.14.6", "< 2"
23 | end
24 |
--------------------------------------------------------------------------------
/lib/fluent/plugin/grok.rb:
--------------------------------------------------------------------------------
1 | require "fluent/plugin/parser_regexp"
2 |
3 | module Fluent
4 | class Grok
5 | class GrokPatternNotFoundError < StandardError
6 | end
7 |
8 | # Much of the Grok implementation is based on Jordan Sissel's jls-grok
9 | # See https://github.com/jordansissel/ruby-grok/blob/master/lib/grok-pure.rb
10 | PATTERN_RE = \
11 | /%\{ # match '%{' not prefixed with '\'
12 | (? # match the pattern name
13 | (?[A-z0-9]+)
14 | (?::(?[@\[\]A-z0-9_:.-]+?)
15 | (?::(?(?:string|bool|integer|float|
16 | time(?::.+?)?|
17 | array(?::.)?)))?)?
18 | )
19 | \}/x
20 |
21 | attr_reader :parsers
22 | attr_reader :multiline_start_regexp
23 |
24 | def initialize(plugin, conf)
25 | @pattern_map = {}
26 | @parsers = {}
27 | @multiline_mode = false
28 | @conf = conf
29 | @plugin = plugin
30 | @time_format = nil
31 | @timezone = nil
32 | if @plugin.respond_to?(:firstline?)
33 | @multiline_mode = true
34 | end
35 | if @plugin.respond_to?(:multiline_start_regexp) && @plugin.multiline_start_regexp
36 | @multiline_start_regexp = Regexp.compile(@plugin.multiline_start_regexp[1..-2])
37 | end
38 | if @plugin.respond_to?(:keep_time_key)
39 | @keep_time_key = @plugin.keep_time_key
40 | end
41 | if @plugin.respond_to?(:time_format)
42 | @time_format = @plugin.time_format
43 | end
44 | if @plugin.respond_to?(:timezone)
45 | @timezone = @plugin.timezone
46 | end
47 | end
48 |
49 | def add_patterns_from_file(path)
50 | File.open(path, "r:utf-8:utf-8").each_line do |line|
51 | next if line[0] == "#" || /^$/ =~ line
52 | name, pat = line.chomp.split(/\s+/, 2)
53 | @pattern_map[name] = pat
54 | end
55 | end
56 |
57 | def setup
58 | if @plugin.grok_pattern
59 | @parsers[:grok_pattern] = expand_pattern_expression_grok_pattern(@plugin.grok_pattern, @conf)
60 | else
61 | @plugin.grok_confs.each.with_index do |grok_conf, index|
62 | @parsers[grok_conf.name || index] = expand_pattern_expression_grok_section(grok_conf)
63 | end
64 | end
65 | @parsers.reject! do |key, parser|
66 | parser.nil?
67 | end
68 | if @parsers.empty?
69 | raise Fluent::ConfigError, 'no grok patterns. Check configuration, e.g. typo, configuration syntax, etc'
70 | end
71 | end
72 |
73 | private
74 |
75 | def expand_pattern_expression_grok_pattern(grok_pattern, conf)
76 | regexp, types = expand_pattern(grok_pattern)
77 | $log.info "Expanded the pattern #{grok_pattern} into #{regexp}"
78 | _conf = conf.to_h
79 | unless types.empty?
80 | _conf["types"] = types.map{|subname,type| "#{subname}:#{type}" }.join(",")
81 | end
82 | _conf = _conf.merge("expression" => regexp, "multiline" => @multiline_mode, "keep_time_key" => @keep_time_key)
83 | config = Fluent::Config::Element.new("parse", nil, _conf, [])
84 | parser = Fluent::Plugin::RegexpParser.new
85 | parser.configure(config)
86 | parser
87 | rescue GrokPatternNotFoundError => e
88 | raise e
89 | rescue => e
90 | $log.error(error: e)
91 | nil
92 | end
93 |
94 | def expand_pattern_expression_grok_section(conf)
95 | regexp, types = expand_pattern(conf.pattern)
96 | $log.info "Expanded the pattern #{conf.pattern} into #{regexp}"
97 | _conf = conf.to_h
98 | unless types.empty?
99 | _conf["types"] = types.map{|subname,type| "#{subname}:#{type}" }.join(",")
100 | end
101 | if conf["multiline"] || @multiline_mode
102 | _conf["multiline"] = conf["multiline"] || @multiline_mode
103 | end
104 | if conf["keep_time_key"] || @keep_time_key
105 | _conf["keep_time_key"] = conf["keep_time_key"] || @keep_time_key
106 | end
107 | if conf["time_key"]
108 | _conf["time_key"] = conf["time_key"]
109 | end
110 | if conf["time_format"] || @time_format
111 | _conf["time_format"] = conf["time_format"] || @time_format
112 | end
113 | if conf["timezone"] || @timezone
114 | _conf["timezone"] = conf["timezone"] || @timezone
115 | end
116 | _conf["expression"] = regexp
117 | config = Fluent::Config::Element.new("parse", "", _conf, [])
118 | parser = Fluent::Plugin::RegexpParser.new
119 | parser.configure(config)
120 | parser
121 | rescue GrokPatternNotFoundError => e
122 | raise e
123 | rescue => e
124 | $log.error(error: e)
125 | nil
126 | end
127 |
128 | def expand_pattern(pattern)
129 | # It's okay to modify in place. no need to expand it more than once.
130 | type_map = {}
131 | while true
132 | m = PATTERN_RE.match(pattern)
133 | break unless m
134 | curr_pattern = @pattern_map[m["pattern"]]
135 | raise GrokPatternNotFoundError, "grok pattern not found: #{pattern}" unless curr_pattern
136 | if m["subname"]
137 | ecs = /(?(^\[.*\]$))/.match(m["subname"])
138 | subname = if ecs
139 | # remove starting "[" and trailing "]" on matched data
140 | ecs["ecs-key"][1..-2].split("][").join('.')
141 | else
142 | m["subname"]
143 | end
144 | replacement_pattern = "(?<#{subname}>#{curr_pattern})"
145 | type_map[subname] = m["type"] || "string"
146 | else
147 | replacement_pattern = "(?:#{curr_pattern})"
148 | end
149 | pattern = pattern.sub(m[0]) do |s|
150 | replacement_pattern
151 | end
152 | end
153 |
154 | [pattern, type_map]
155 | end
156 | end
157 | end
158 |
--------------------------------------------------------------------------------
/lib/fluent/plugin/parser_grok.rb:
--------------------------------------------------------------------------------
1 | require "fluent/plugin/grok"
2 | require "fluent/plugin/parser_none"
3 |
4 | module Fluent
5 | module Plugin
6 | class GrokParser < Parser
7 | Fluent::Plugin.register_parser("grok", self)
8 |
9 | desc "The format of the time field."
10 | config_param :time_format, :string, default: nil
11 | desc "The pattern of grok"
12 | config_param :grok_pattern, :string, default: nil
13 | desc "Path to the file that includes custom grok patterns"
14 | config_param :custom_pattern_path, :string, default: nil
15 | desc "The key has grok failure reason"
16 | config_param :grok_failure_key, :string, default: nil
17 | desc "The key name to store grok section's name"
18 | config_param :grok_name_key, :string, default: nil
19 | desc "Specify grok pattern series set"
20 | config_param :grok_pattern_series, :enum, list: [:legacy, :"ecs-v1"], default: :legacy
21 |
22 | config_section :grok, param_name: "grok_confs", multi: true do
23 | desc "The name of this grok section"
24 | config_param :name, :string, default: nil
25 | desc "The pattern of grok"
26 | config_param :pattern, :string
27 | desc "If true, keep time field in the record."
28 | config_param :keep_time_key, :bool, default: false
29 | desc "Specify time field for event time. If the event doesn't have this field, current time is used."
30 | config_param :time_key, :string, default: "time"
31 | desc "Process value using specified format. This is available only when time_type is string"
32 | config_param :time_format, :string, default: nil
33 | desc "Use specified timezone. one can parse/format the time value in the specified timezone."
34 | config_param :timezone, :string, default: nil
35 | end
36 |
37 | def initialize
38 | super
39 | @default_parser = Fluent::Plugin::NoneParser.new
40 | end
41 |
42 | def configure(conf={})
43 | super
44 |
45 | @grok = Grok.new(self, conf)
46 |
47 | default_pattern_dir = File.expand_path("../../../../patterns/#{@grok_pattern_series}/*", __FILE__)
48 | Dir.glob(default_pattern_dir) do |pattern_file_path|
49 | @grok.add_patterns_from_file(pattern_file_path)
50 | end
51 |
52 | if @custom_pattern_path
53 | if Dir.exist? @custom_pattern_path
54 | Dir.glob(@custom_pattern_path + "/*") do |pattern_file_path|
55 | @grok.add_patterns_from_file(pattern_file_path)
56 | end
57 | elsif File.exist? @custom_pattern_path
58 | @grok.add_patterns_from_file(@custom_pattern_path)
59 | end
60 | end
61 |
62 | @grok.setup
63 | end
64 |
65 | def parse(text)
66 | @grok.parsers.each do |name_or_index, parser|
67 | parser.parse(text) do |time, record|
68 | if time and record
69 | record[@grok_name_key] = name_or_index if @grok_name_key
70 | yield time, record
71 | return
72 | end
73 | end
74 | end
75 | @default_parser.parse(text) do |time, record|
76 | record[@grok_failure_key] = "No grok pattern matched" if @grok_failure_key
77 | yield time, record
78 | end
79 | end
80 | end
81 | end
82 | end
83 |
--------------------------------------------------------------------------------
/lib/fluent/plugin/parser_multiline_grok.rb:
--------------------------------------------------------------------------------
1 | require "fluent/plugin/parser_grok"
2 |
3 | module Fluent
4 | module Plugin
5 | class MultilineGrokParser < GrokParser
6 | Fluent::Plugin.register_parser("multiline_grok", self)
7 |
8 | desc "The regexp to match beginning of multiline"
9 | config_param :multiline_start_regexp, :string, default: nil
10 |
11 | def has_firstline?
12 | !!@multiline_start_regexp
13 | end
14 |
15 | def firstline?(text)
16 | @multiline_start_regexp && !!@grok.multiline_start_regexp.match(text)
17 | end
18 |
19 | def parse(text)
20 | @grok.parsers.each do |name_or_index, parser|
21 | parser.parse(text) do |time, record|
22 | if time and record
23 | record[@grok_name_key] = name_or_index if @grok_name_key
24 | yield time, record
25 | return
26 | end
27 | end
28 | end
29 | @default_parser.parse(text) do |time, record|
30 | record[@grok_failure_key] = "No grok pattern matched" if @grok_failure_key
31 | yield time, record
32 | end
33 | end
34 | end
35 | end
36 | end
37 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/aws:
--------------------------------------------------------------------------------
1 | S3_REQUEST_LINE (?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)
2 |
3 | S3_ACCESS_LOG %{WORD:[aws][s3access][bucket_owner]} %{NOTSPACE:[aws][s3access][bucket]} \[%{HTTPDATE:timestamp}\] (?:-|%{IP:[client][ip]}) (?:-|%{NOTSPACE:[client][user][id]}) %{NOTSPACE:[aws][s3access][request_id]} %{NOTSPACE:[aws][s3access][operation]} (?:-|%{NOTSPACE:[aws][s3access][key]}) (?:-|"%{S3_REQUEST_LINE:[aws][s3access][request_uri]}") (?:-|%{INT:[http][response][status_code]:integer}) (?:-|%{NOTSPACE:[aws][s3access][error_code]}) (?:-|%{INT:[aws][s3access][bytes_sent]:integer}) (?:-|%{INT:[aws][s3access][object_size]:integer}) (?:-|%{INT:[aws][s3access][total_time]:integer}) (?:-|%{INT:[aws][s3access][turn_around_time]:integer}) "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[aws][s3access][version_id]})(?: (?:-|%{NOTSPACE:[aws][s3access][host_id]}) (?:-|%{NOTSPACE:[aws][s3access][signature_version]}) (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][s3access][authentication_type]}) (?:-|%{NOTSPACE:[aws][s3access][host_header]}) (?:-|%{NOTSPACE:[aws][s3access][tls_version]}))?
4 | # :long - %{INT:[aws][s3access][bytes_sent]:int}
5 | # :long - %{INT:[aws][s3access][object_size]:int}
6 |
7 | ELB_URIHOST %{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:integer})?
8 | ELB_URIPATHQUERY %{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})?
9 | # deprecated - old name:
10 | ELB_URIPATHPARAM %{ELB_URIPATHQUERY}
11 | ELB_URI %{URIPROTO:[url][scheme]}://(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{ELB_URIHOST})?(?:%{ELB_URIPATHQUERY})?
12 |
13 | ELB_REQUEST_LINE (?:%{WORD:[http][request][method]} %{ELB_URI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)
14 |
15 | # pattern supports 'regular' HTTP ELB format
16 | ELB_V1_HTTP_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:[aws][elb][name]} %{IP:[source][ip]}:%{INT:[source][port]:integer} (?:-|(?:%{IP:[aws][elb][backend][ip]}:%{INT:[aws][elb][backend][port]:integer})) (?:-1|%{NUMBER:[aws][elb][request_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][backend_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][response_processing_time][sec]:float}) %{INT:[http][response][status_code]:integer} (?:-|%{INT:[aws][elb][backend][http][response][status_code]:integer}) %{INT:[http][request][body][bytes]:integer} %{INT:[http][response][body][bytes]:integer} "%{ELB_REQUEST_LINE}"(?: "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][elb][ssl_protocol]}))?
17 | # :long - %{INT:[http][request][body][bytes]:int}
18 | # :long - %{INT:[http][response][body][bytes]:int}
19 |
20 | ELB_ACCESS_LOG %{ELB_V1_HTTP_LOG}
21 |
22 | # pattern used to match a shorted format, that's why we have the optional part (starting with *http.version*) at the end
23 | CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{WORD:[aws][cloudfront][x_edge_location]}\t(?:-|%{INT:[destination][bytes]:integer})\t%{IPORHOST:[source][ip]}\t%{WORD:[http][request][method]}\t%{HOSTNAME:[url][domain]}\t%{NOTSPACE:[url][path]}\t(?:(?:000)|%{INT:[http][response][status_code]:integer})\t(?:-|%{DATA:[http][request][referrer]})\t%{DATA:[user_agent][original]}\t(?:-|%{DATA:[url][query]})\t(?:-|%{DATA:[aws][cloudfront][http][request][cookie]})\t%{WORD:[aws][cloudfront][x_edge_result_type]}\t%{NOTSPACE:[aws][cloudfront][x_edge_request_id]}\t%{HOSTNAME:[aws][cloudfront][http][request][host]}\t%{URIPROTO:[network][protocol]}\t(?:-|%{INT:[source][bytes]:integer})\t%{NUMBER:[aws][cloudfront][time_taken]:float}\t(?:-|%{IP:[network][forwarded_ip]})\t(?:-|%{DATA:[aws][cloudfront][ssl_protocol]})\t(?:-|%{NOTSPACE:[tls][cipher]})\t%{WORD:[aws][cloudfront][x_edge_response_result_type]}(?:\t(?:-|HTTP/%{NUMBER:[http][version]})\t(?:-|%{DATA:[aws][cloudfront][fle_status]})\t(?:-|%{DATA:[aws][cloudfront][fle_encrypted_fields]})\t%{INT:[source][port]:integer}\t%{NUMBER:[aws][cloudfront][time_to_first_byte]:float}\t(?:-|%{DATA:[aws][cloudfront][x_edge_detailed_result_type]})\t(?:-|%{NOTSPACE:[http][request][mime_type]})\t(?:-|%{INT:[aws][cloudfront][http][request][size]:integer})\t(?:-|%{INT:[aws][cloudfront][http][request][range][start]:integer})\t(?:-|%{INT:[aws][cloudfront][http][request][range][end]:integer}))?
24 | # :long - %{INT:[destination][bytes]:int}
25 | # :long - %{INT:[source][bytes]:int}
26 | # :long - %{INT:[aws][cloudfront][http][request][size]:int}
27 | # :long - %{INT:[aws][cloudfront][http][request][range][start]:int}
28 | # :long - %{INT:[aws][cloudfront][http][request][range][end]:int}
29 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/bacula:
--------------------------------------------------------------------------------
1 | BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH}(?:-%{YEAR})? %{HOUR}:%{MINUTE}
2 | BACULA_HOST %{HOSTNAME}
3 | BACULA_VOLUME %{USER}
4 | BACULA_DEVICE %{USER}
5 | BACULA_DEVICEPATH %{UNIXPATH}
6 | BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})*
7 | BACULA_VERSION %{USER}
8 | BACULA_JOB %{USER}
9 |
10 | BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY:[bacula][volume][max_capacity]} exceeded on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).?
11 | BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" Bytes=%{BACULA_CAPACITY:[bacula][volume][bytes]} Blocks=%{BACULA_CAPACITY:[bacula][volume][blocks]} at %{BACULA_TIMESTAMP:[bacula][timestamp]}.
12 | BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" in catalog.
13 | BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on (?:file )?device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).
14 | BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\)
15 | BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" mounted on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) at %{BACULA_TIMESTAMP:[bacula][timestamp]}.
16 | BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:[error][message]}
17 | BACULA_LOG_NOOPENDIR \s*Could not open directory \"?%{DATA:[file][path]}\"?: ERR=%{GREEDYDATA:[error][message]}
18 | BACULA_LOG_NOSTAT \s*Could not stat %{DATA:[file][path]}: ERR=%{GREEDYDATA:[error][message]}
19 | BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\". Marking it purged.
20 | BACULA_LOG_ALL_RECORDS_PRUNED .*?All records pruned from Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\"; marking it \"Purged\"
21 | BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days .
22 | BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files.
23 | BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog.
24 | BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog.
25 | BACULA_LOG_ENDPRUNE End auto prune.
26 | BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:[bacula][job][name]}
27 | BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:[bacula][job][name]}
28 | BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:[bacula][volume][device]}\"
29 | BACULA_LOG_DIFF_FS \s*%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it.
30 | BACULA_LOG_JOBEND Job write elapsed time = %{DATA:[bacula][job][elapsed_time]}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second
31 | BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune.
32 | BACULA_LOG_NOPRUNE_FILES No Files found to prune.
33 | BACULA_LOG_VOLUME_PREVWRITTEN Volume \"?%{BACULA_VOLUME:[bacula][volume][name]}\"? previously written, moving to end of data.
34 | BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" size=%{INT:[bacula][volume][size]:integer}
35 | # :long - %{INT:[bacula][volume][size]:int}
36 | BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT:[bacula][job][other_id]}.
37 | BACULA_LOG_MARKCANCEL JobId %{INT:[bacula][job][id]}, Job %{BACULA_JOB:[bacula][job][name]} marked to be canceled.
38 | BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:[bacula][job][client_run_before_command]}\"
39 | BACULA_LOG_VSS (Generate )?VSS (Writer)?
40 | BACULA_LOG_MAXSTART Fatal [eE]rror: Job canceled because max start delay time exceeded.
41 | BACULA_LOG_DUPLICATE Fatal [eE]rror: JobId %{INT:[bacula][job][other_id]} already running. Duplicate job not allowed.
42 | BACULA_LOG_NOJOBSTAT Fatal [eE]rror: No Job status returned from FD.
43 | BACULA_LOG_FATAL_CONN Fatal [eE]rror: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:integer}. ERR=%{GREEDYDATA:[error][message]}
44 | BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:integer}. ERR=%{GREEDYDATA:[error][message]}
45 | BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at \"?%{IPORHOST:[client][address]}(?::%{POSINT:[client][port]:integer})?\"?. Possible causes:
46 | BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup.
47 | BACULA_LOG_NOPRIOR No prior Full backup Job record found.
48 |
49 | BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\):
50 |
51 | BACULA_LOG %{BACULA_TIMESTAMP:timestamp} %{BACULA_HOST:[host][hostname]}(?: JobId %{INT:[bacula][job][id]})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR})
52 | # old (deprecated) name :
53 | BACULA_LOGLINE %{BACULA_LOG}
54 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/bind:
--------------------------------------------------------------------------------
1 | BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME}
2 |
3 | BIND9_DNSTYPE (?:A|AAAA|CAA|CDNSKEY|CDS|CERT|CNAME|CSYNC|DLV|DNAME|DNSKEY|DS|HINFO|LOC|MX|NAPTR|NS|NSEC|NSEC3|OPENPGPKEY|PTR|RRSIG|RP|SIG|SMIMEA|SOA|SRV|TSIG|TXT|URI)
4 | BIND9_CATEGORY (?:queries)
5 |
6 | # dns.question.class is static - only 'IN' is supported by Bind9
7 | # bind.log.question.name is expected to be a 'duplicate' (same as the dns.question.name capture)
8 | BIND9_QUERYLOGBASE client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:[client][ip]}#%{POSINT:[client][port]:integer} \(%{GREEDYDATA:[bind][log][question][name]}\): query: %{GREEDYDATA:[dns][question][name]} (?<[dns][question][class]>IN) %{BIND9_DNSTYPE:[dns][question][type]}(:? %{DATA:[bind][log][question][flags]})? \(%{IP:[server][ip]}\)
9 |
10 | # for query-logging category and severity are always fixed as "queries: info: "
11 | BIND9_QUERYLOG %{BIND9_TIMESTAMP:timestamp} %{BIND9_CATEGORY:[bing][log][category]}: %{LOGLEVEL:[log][level]}: %{BIND9_QUERYLOGBASE}
12 |
13 | BIND9 %{BIND9_QUERYLOG}
14 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/bro:
--------------------------------------------------------------------------------
1 | # supports the 'old' BRO log files, for updated Zeek log format see the patters/ecs-v1/zeek
2 | # https://www.bro.org/sphinx/script-reference/log-files.html
3 |
4 | BRO_BOOL [TF]
5 | BRO_DATA [^\t]+
6 |
7 | # http.log - old format (before the Zeek rename) :
8 | BRO_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{INT:[zeek][http][trans_depth]:integer}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{BRO_DATA:[url][domain]})\t(?:-|%{BRO_DATA:[url][original]})\t(?:-|%{BRO_DATA:[http][request][referrer]})\t(?:-|%{BRO_DATA:[user_agent][original]})\t(?:-|%{NUMBER:[http][request][body][bytes]:integer})\t(?:-|%{NUMBER:[http][response][body][bytes]:integer})\t(?:-|%{POSINT:[http][response][status_code]:integer})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:integer})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:-|%{BRO_DATA:[zeek][http][filename]})\t(?:\(empty\)|%{BRO_DATA:[zeek][http][tags]})\t(?:-|%{BRO_DATA:[url][username]})\t(?:-|%{BRO_DATA:[url][password]})\t(?:-|%{BRO_DATA:[zeek][http][proxied]})\t(?:-|%{BRO_DATA:[zeek][http][orig_fuids]})\t(?:-|%{BRO_DATA:[http][request][mime_type]})\t(?:-|%{BRO_DATA:[zeek][http][resp_fuids]})\t(?:-|%{BRO_DATA:[http][response][mime_type]})
9 | # :long - %{NUMBER:[http][request][body][bytes]:int}
10 | # :long - %{NUMBER:[http][response][body][bytes]:int}
11 |
12 | # dns.log - old format
13 | BRO_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:integer})\t(?:-|%{BRO_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:integer})\t(?:-|%{BRO_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:integer})\t(?:-|%{BRO_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:integer})\t(?:-|%{BRO_DATA:[dns][response_code]})\t(?:-|%{BRO_BOOL:[zeek][dns][AA]})\t(?:-|%{BRO_BOOL:[zeek][dns][TC]})\t(?:-|%{BRO_BOOL:[zeek][dns][RD]})\t(?:-|%{BRO_BOOL:[zeek][dns][RA]})\t(?:-|%{NONNEGINT:[zeek][dns][Z]:integer})\t(?:-|%{BRO_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{BRO_BOOL:[zeek][dns][rejected]})
14 |
15 | # conn.log - old bro, also supports 'newer' format (optional *zeek.connection.local_resp* flag) compared to non-ecs mode
16 | BRO_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{WORD:[network][transport]}\t(?:-|%{BRO_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:integer})\t(?:-|%{INT:[zeek][connection][resp_bytes]:integer})\t(?:-|%{BRO_DATA:[zeek][connection][state]})\t(?:-|%{BRO_BOOL:[zeek][connection][local_orig]})\t(?:(?:-|%{BRO_BOOL:[zeek][connection][local_resp]})\t)?(?:-|%{INT:[zeek][connection][missed_bytes]:integer})\t(?:-|%{BRO_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:integer})\t(?:-|%{INT:[source][bytes]:integer})\t(?:-|%{INT:[destination][packets]:integer})\t(?:-|%{INT:[destination][bytes]:integer})\t(?:\(empty\)|%{BRO_DATA:[zeek][connection][tunnel_parents]})
17 | # :long - %{INT:[zeek][connection][orig_bytes]:int}
18 | # :long - %{INT:[zeek][connection][resp_bytes]:int}
19 | # :long - %{INT:[zeek][connection][missed_bytes]:int}
20 | # :long - %{INT:[source][packets]:int}
21 | # :long - %{INT:[source][bytes]:int}
22 | # :long - %{INT:[destination][packets]:int}
23 | # :long - %{INT:[destination][bytes]:int}
24 |
25 | # files.log - old format
26 | BRO_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t(?:-|%{IP:[server][ip]})\t(?:-|%{IP:[client][ip]})\t(?:-|%{BRO_DATA:[zeek][files][session_ids]})\t(?:-|%{BRO_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:integer})\t(?:-|%{BRO_DATA:[zeek][files][analyzers]})\t(?:-|%{BRO_DATA:[file][mime_type]})\t(?:-|%{BRO_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{BRO_DATA:[zeek][files][local_orig]})\t(?:-|%{BRO_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:integer})\t(?:-|%{INT:[file][size]:integer})\t(?:-|%{INT:[zeek][files][missing_bytes]:integer})\t(?:-|%{INT:[zeek][files][overflow_bytes]:integer})\t(?:-|%{BRO_BOOL:[zeek][files][timedout]})\t(?:-|%{BRO_DATA:[zeek][files][parent_fuid]})\t(?:-|%{BRO_DATA:[file][hash][md5]})\t(?:-|%{BRO_DATA:[file][hash][sha1]})\t(?:-|%{BRO_DATA:[file][hash][sha256]})\t(?:-|%{BRO_DATA:[zeek][files][extracted]})
27 | # :long - %{INT:[zeek][files][seen_bytes]:int}
28 | # :long - %{INT:[file][size]:int}
29 | # :long - %{INT:[zeek][files][missing_bytes]:int}
30 | # :long - %{INT:[zeek][files][overflow_bytes]:int}
31 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/exim:
--------------------------------------------------------------------------------
1 | EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2}
2 | # <= message arrival
3 | # => normal message delivery
4 | # -> additional address in same delivery
5 | # *> delivery suppressed by -N
6 | # ** delivery failed; address bounced
7 | # == delivery deferred; temporary problem
8 | EXIM_FLAGS (?:<=|=>|->|\*>|\*\*|==|<>|>>)
9 | EXIM_DATE (:?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME})
10 | EXIM_PID \[%{POSINT:[process][pid]:integer}\]
11 | EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?)
12 | EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message)
13 | EXIM_REMOTE_HOST (H=(%{NOTSPACE:[source][address]} )?(\(%{NOTSPACE:[exim][log][remote_address]}\) )?\[%{IP:[source][ip]}\](?::%{POSINT:[source][port]:integer})?)
14 | EXIM_INTERFACE (I=\[%{IP:[destination][ip]}\](?::%{NUMBER:[destination][port]:integer}))
15 | EXIM_PROTOCOL (P=%{NOTSPACE:[network][protocol]})
16 | EXIM_MSG_SIZE (S=%{NUMBER:[exim][log][message][size]:integer})
17 | EXIM_HEADER_ID (id=%{NOTSPACE:[exim][log][header_id]})
18 | EXIM_QUOTED_CONTENT (?:\\.|[^\\"])*
19 | EXIM_SUBJECT (T="%{EXIM_QUOTED_CONTENT:[exim][log][message][subject]}")
20 |
21 | EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=(?:%{QUOTEDSTRING}|%{NOTSPACE}))
22 | EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))*
23 |
24 | EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:[exim][log][message][id]} (?<[exim][log][flags]><=) (?<[exim][log][status]>[a-z:] )?%{EMAILADDRESS:[exim][log][sender][email]}%{EXIM_NAMED_FIELDS}(?:(?: from %{DATA:[exim][log][sender][original]}>?)? for %{EMAILADDRESS:[exim][log][recipient][email]})?
25 |
26 | EXIM %{EXIM_MESSAGE_ARRIVAL}
27 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/firewalls:
--------------------------------------------------------------------------------
1 | # NetScreen firewall logs
2 | NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:[observer][hostname]} %{NOTSPACE:[observer][name]}: (?<[observer][product]>NetScreen) device_id=%{WORD:[netscreen][device_id]} .*?(system-\w+-%{NONNEGINT:[event][code]}\(%{WORD:[netscreen][session][type]}\))?: start_time="%{DATA:[netscreen][session][start_time]}" duration=%{INT:[netscreen][session][duration]:integer} policy_id=%{INT:[netscreen][policy_id]} service=%{DATA:[netscreen][service]} proto=%{INT:[netscreen][protocol_number]:integer} src zone=%{WORD:[observer][ingress][zone]} dst zone=%{WORD:[observer][egress][zone]} action=%{WORD:[event][action]} sent=%{INT:[source][bytes]:integer} rcvd=%{INT:[destination][bytes]:integer} src=%{IPORHOST:[source][address]} dst=%{IPORHOST:[destination][address]}(?: src_port=%{INT:[source][port]:integer} dst_port=%{INT:[destination][port]:integer})?(?: src-xlated ip=%{IP:[source][nat][ip]} port=%{INT:[source][nat][port]:integer} dst-xlated ip=%{IP:[destination][nat][ip]} port=%{INT:[destination][nat][port]:integer})?(?: session_id=%{INT:[netscreen][session][id]} reason=%{GREEDYDATA:[netscreen][session][reason]})?
3 | # :long - %{INT:[source][bytes]:int}
4 | # :long - %{INT:[destination][bytes]:int}
5 |
6 | #== Cisco ASA ==
7 | CISCO_TAGGED_SYSLOG ^<%{POSINT:[log][syslog][priority]:integer}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:[host][hostname]})? ?: %%{CISCOTAG:[cisco][asa][tag]}:
8 | CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME}
9 | CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+)
10 | # Common Particles
11 | CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted
12 | CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)*
13 | CISCO_DIRECTION Inbound|inbound|Outbound|outbound
14 | CISCO_INTERVAL first hit|%{INT}-second interval
15 | CISCO_XLATE_TYPE static|dynamic
16 | # helpers
17 | CISCO_HITCOUNT_INTERVAL hit-cnt %{INT:[cisco][asa][hit_count]:integer} (?:first hit|%{INT:[cisco][asa][interval]:integer}-second interval)
18 | CISCO_SRC_IP_USER %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}(?:\(%{DATA:[source][user][name]}\))?
19 | CISCO_DST_IP_USER %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}(?:\(%{DATA:[destination][user][name]}\))?
20 | CISCO_SRC_HOST_PORT_USER %{NOTSPACE:[observer][ingress][interface][name]}:(?:(?:%{IP:[source][ip]})|(?:%{HOSTNAME:[source][address]}))(?:/%{INT:[source][port]:integer})?(?:\(%{DATA:[source][user][name]}\))?
21 | CISCO_DST_HOST_PORT_USER %{NOTSPACE:[observer][egress][interface][name]}:(?:(?:%{IP:[destination][ip]})|(?:%{HOSTNAME:[destination][address]}))(?:/%{INT:[destination][port]:integer})?(?:\(%{DATA:[destination][user][name]}\))?
22 | # ASA-1-104001
23 | CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:[event][reason]}
24 | # ASA-1-104002
25 | CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:[event][reason]}
26 | # ASA-1-104003
27 | CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\.
28 | # ASA-1-104004
29 | CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\.
30 | # ASA-1-105003
31 | CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:[network][interface][name]} waiting
32 | # ASA-1-105004
33 | CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:[network][interface][name]} normal
34 | # ASA-1-105005
35 | CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{NOTSPACE:[network][interface][name]}
36 | # ASA-1-105008
37 | CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{NOTSPACE:[network][interface][name]}
38 | # ASA-1-105009
39 | CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{NOTSPACE:[network][interface][name]} (?:Passed|Failed)
40 | # ASA-2-106001
41 | CISCOFW106001 %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} connection %{CISCO_ACTION:[cisco][asa][outcome]} from %{IP:[source][ip]}/%{INT:[source][port]:integer} to %{IP:[destination][ip]}/%{INT:[destination][port]:integer} flags %{DATA:[cisco][asa][tcp_flags]} on interface %{NOTSPACE:[observer][egress][interface][name]}
42 | # ASA-2-106006, ASA-2-106007, ASA-2-106010
43 | CISCOFW106006_106007_106010 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} (?:from|src) %{IP:[source][ip]}/%{INT:[source][port]:integer}(?:\(%{DATA:[source][user][name]}\))? (?:to|dst) %{IP:[destination][ip]}/%{INT:[destination][port]:integer}(?:\(%{DATA:[destination][user][name]}\))? (?:(?:on interface %{NOTSPACE:[observer][egress][interface][name]})|(?:due to %{CISCO_REASON:[event][reason]}))
44 | # ASA-3-106014
45 | CISCOFW106014 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER}\s?\(type %{INT:[cisco][asa][icmp_type]:integer}, code %{INT:[cisco][asa][icmp_code]:integer}\)
46 | # ASA-6-106015
47 | CISCOFW106015 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} \(%{DATA:[cisco][asa][rule_name]}\) from %{IP:[source][ip]}/%{INT:[source][port]:integer} to %{IP:[destination][ip]}/%{INT:[destination][port]:integer} flags %{DATA:[cisco][asa][tcp_flags]} on interface %{NOTSPACE:[observer][egress][interface][name]}
48 | # ASA-1-106021
49 | CISCOFW106021 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} reverse path check from %{IP:[source][ip]} to %{IP:[destination][ip]} on interface %{NOTSPACE:[observer][egress][interface][name]}
50 | # ASA-4-106023
51 | CISCOFW106023 %{CISCO_ACTION:[cisco][asa][outcome]}(?: protocol)? %{WORD:[cisco][asa][network][transport]} src %{CISCO_SRC_HOST_PORT_USER} dst %{CISCO_DST_HOST_PORT_USER}( \(type %{INT:[cisco][asa][icmp_type]:integer}, code %{INT:[cisco][asa][icmp_code]:integer}\))? by access-group "?%{DATA:[cisco][asa][rule_name]}"? \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\]
52 | # ASA-4-106100, ASA-4-106102, ASA-4-106103
53 | CISCOFW106100_2_3 access-list %{NOTSPACE:[cisco][asa][rule_name]} %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} for user '%{DATA:[user][name]}' %{DATA:[observer][ingress][interface][name]}/%{IP:[source][ip]}\(%{INT:[source][port]:integer}\) -> %{DATA:[observer][egress][interface][name]}/%{IP:[destination][ip]}\(%{INT:[destination][port]:integer}\) %{CISCO_HITCOUNT_INTERVAL} \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\]
54 | # ASA-5-106100
55 | CISCOFW106100 access-list %{NOTSPACE:[cisco][asa][rule_name]} %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} %{DATA:[observer][ingress][interface][name]}/%{IP:[source][ip]}\(%{INT:[source][port]:integer}\)(?:\(%{DATA:[source][user][name]}\))? -> %{DATA:[observer][egress][interface][name]}/%{IP:[destination][ip]}\(%{INT:[destination][port]:integer}\)(?:\(%{DATA:[source][user][name]}\))? hit-cnt %{INT:[cisco][asa][hit_count]:integer} %{CISCO_INTERVAL} \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\]
56 | # ASA-5-304001
57 | CISCOFW304001 %{IP:[source][ip]}(?:\(%{DATA:[source][user][name]}\))? Accessed URL %{IP:[destination][ip]}:%{GREEDYDATA:[url][original]}
58 | # ASA-6-110002
59 | CISCOFW110002 %{CISCO_REASON:[event][reason]} for %{WORD:[cisco][asa][network][transport]} from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:integer} to %{IP:[destination][ip]}/%{INT:[destination][port]:integer}
60 | # ASA-6-302010
61 | CISCOFW302010 %{INT:[cisco][asa][connections][in_use]:integer} in use, %{INT:[cisco][asa][connections][most_used]:integer} most used
62 | # ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016
63 | CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:[cisco][asa][outcome]}(?: %{CISCO_DIRECTION:[cisco][asa][network][direction]})? %{WORD:[cisco][asa][network][transport]} connection %{INT:[cisco][asa][connection_id]} for %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:integer}(?: \(%{IP:[source][nat][ip]}/%{INT:[source][nat][port]:integer}\))?(?:\(%{DATA:[source][user][name?]}\))? to %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:integer}( \(%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:integer}\))?(?:\(%{DATA:[destination][user][name]}\))?( duration %{TIME:[cisco][asa][duration]} bytes %{INT:[network][bytes]:integer})?(?: %{CISCO_REASON:[event][reason]})?(?: \(%{DATA:[user][name]}\))?
64 | # :long - %{INT:[network][bytes]:int}
65 | # ASA-6-302020, ASA-6-302021
66 | CISCOFW302020_302021 %{CISCO_ACTION:[cisco][asa][outcome]}(?: %{CISCO_DIRECTION:[cisco][asa][network][direction]})? %{WORD:[cisco][asa][network][transport]} connection for faddr %{IP:[destination][ip]}/%{INT:[cisco][asa][icmp_seq]:integer}(?:\(%{DATA:[destination][user][name]}\))? gaddr %{IP:[source][nat][ip]}/%{INT:[cisco][asa][icmp_type]:integer} laddr %{IP:[source][ip]}/%{INT}(?: \(%{DATA:[source][user][name]}\))?
67 | # ASA-6-305011
68 | CISCOFW305011 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_XLATE_TYPE} %{WORD:[cisco][asa][network][transport]} translation from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}(/%{INT:[source][port]:integer})?(?:\(%{DATA:[source][user][name]}\))? to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:integer}
69 | # ASA-3-313001, ASA-3-313004, ASA-3-313008
70 | CISCOFW313001_313004_313008 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} type=%{INT:[cisco][asa][icmp_type]:integer}, code=%{INT:[cisco][asa][icmp_code]:integer} from %{IP:[source][ip]} on interface %{NOTSPACE:[observer][egress][interface][name]}(?: to %{IP:[destination][ip]})?
71 | # ASA-4-313005
72 | CISCOFW313005 %{CISCO_REASON:[event][reason]} for %{WORD:[cisco][asa][network][transport]} error message: %{WORD} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER} \(type %{INT:[cisco][asa][icmp_type]:integer}, code %{INT:[cisco][asa][icmp_code]:integer}\) on %{NOTSPACE} interface\.\s+Original IP payload: %{WORD:[cisco][asa][original_ip_payload][network][transport]} src %{IP:[cisco][asa][original_ip_payload][source][ip]}/%{INT:[cisco][asa][original_ip_payload][source][port]:integer}(?:\(%{DATA:[cisco][asa][original_ip_payload][source][user][name]}\))? dst %{IP:[cisco][asa][original_ip_payload][destination][ip]}/%{INT:[cisco][asa][original_ip_payload][destination][port]:integer}(?:\(%{DATA:[cisco][asa][original_ip_payload][destination][user][name]}\))?
73 | # ASA-5-321001
74 | CISCOFW321001 Resource '%{DATA:[cisco][asa][resource][name]}' limit of %{POSINT:[cisco][asa][resource][limit]:integer} reached for system
75 | # ASA-4-402117
76 | CISCOFW402117 %{WORD:[cisco][asa][network][type]}: Received a non-IPSec packet \(protocol=\s?%{WORD:[cisco][asa][network][transport]}\) from %{IP:[source][ip]} to %{IP:[destination][ip]}\.?
77 | # ASA-4-402119
78 | CISCOFW402119 %{WORD:[cisco][asa][network][type]}: Received an %{WORD:[cisco][asa][ipsec][protocol]} packet \(SPI=\s?%{DATA:[cisco][asa][ipsec][spi]}, sequence number=\s?%{DATA:[cisco][asa][ipsec][seq_num]}\) from %{IP:[source][ip]} \(user=\s?%{DATA:[source][user][name]}\) to %{IP:[destination][ip]} that failed anti-replay checking\.?
79 | # ASA-4-419001
80 | CISCOFW419001 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} packet from %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:integer} to %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:integer}, reason: %{GREEDYDATA:[event][reason]}
81 | # ASA-4-419002
82 | CISCOFW419002 %{CISCO_REASON:[event][reason]} from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:integer} to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:integer} with different initial sequence number
83 | # ASA-4-500004
84 | CISCOFW500004 %{CISCO_REASON:[event][reason]} for protocol=%{WORD:[cisco][asa][network][transport]}, from %{IP:[source][ip]}/%{INT:[source][port]:integer} to %{IP:[destination][ip]}/%{INT:[destination][port]:integer}
85 | # ASA-6-602303, ASA-6-602304
86 | CISCOFW602303_602304 %{WORD:[cisco][asa][network][type]}: An %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{DATA:[cisco][asa][ipsec][tunnel_type]} SA \(SPI=\s?%{DATA:[cisco][asa][ipsec][spi]}\) between %{IP:[source][ip]} and %{IP:[destination][ip]} \(user=\s?%{DATA:[source][user][name]}\) has been %{CISCO_ACTION:[cisco][asa][outcome]}
87 | # ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006
88 | CISCOFW710001_710002_710003_710005_710006 %{WORD:[cisco][asa][network][transport]} (?:request|access) %{CISCO_ACTION:[cisco][asa][outcome]} from %{IP:[source][ip]}/%{INT:[source][port]:integer} to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:integer}
89 | # ASA-6-713172
90 | CISCOFW713172 Group = %{DATA:[cisco][asa][source][group]}, IP = %{IP:[source][ip]}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:[@metadata][cisco][asa][remote_nat]}\s*behind a NAT device\s+This\s+end\s*%{DATA:[@metadata][cisco][asa][local_nat]}\s*behind a NAT device
91 | # ASA-4-733100
92 | CISCOFW733100 \[\s*%{DATA:[cisco][asa][burst][object]}\s*\] drop %{DATA:[cisco][asa][burst][id]} exceeded. Current burst rate is %{INT:[cisco][asa][burst][current_rate]:integer} per second, max configured rate is %{INT:[cisco][asa][burst][configured_rate]:integer}; Current average rate is %{INT:[cisco][asa][burst][avg_rate]:integer} per second, max configured rate is %{INT:[cisco][asa][burst][configured_avg_rate]:integer}; Cumulative total count is %{INT:[cisco][asa][burst][cumulative_count]:integer}
93 | #== End Cisco ASA ==
94 |
95 |
96 | IPTABLES_TCP_FLAGS (CWR |ECE |URG |ACK |PSH |RST |SYN |FIN )*
97 | IPTABLES_TCP_PART (?:SEQ=%{INT:[iptables][tcp][seq]:integer}\s+)?(?:ACK=%{INT:[iptables][tcp][ack]:integer}\s+)?WINDOW=%{INT:[iptables][tcp][window]:integer}\s+RES=0x%{BASE16NUM:[iptables][tcp_reserved_bits]}\s+%{IPTABLES_TCP_FLAGS:[iptables][tcp][flags]}
98 |
99 | IPTABLES4_FRAG (?:(?<= )(?:CE|DF|MF))*
100 | IPTABLES4_PART SRC=%{IPV4:[source][ip]}\s+DST=%{IPV4:[destination][ip]}\s+LEN=(?:%{INT:[iptables][length]:integer})?\s+TOS=(?:0|0x%{BASE16NUM:[iptables][tos]})?\s+PREC=(?:0x%{BASE16NUM:[iptables][precedence_bits]})?\s+TTL=(?:%{INT:[iptables][ttl]:integer})?\s+ID=(?:%{INT:[iptables][id]})?\s+(?:%{IPTABLES4_FRAG:[iptables][fragment_flags]})?(?:\s+FRAG: %{INT:[iptables][fragment_offset]:integer})?
101 | IPTABLES6_PART SRC=%{IPV6:[source][ip]}\s+DST=%{IPV6:[destination][ip]}\s+LEN=(?:%{INT:[iptables][length]:integer})?\s+TC=(?:0|0x%{BASE16NUM:[iptables][tos]})?\s+HOPLIMIT=(?:%{INT:[iptables][ttl]:integer})?\s+FLOWLBL=(?:%{INT:[iptables][flow_label]})?
102 |
103 | IPTABLES IN=(?:%{NOTSPACE:[observer][ingress][interface][name]})?\s+OUT=(?:%{NOTSPACE:[observer][egress][interface][name]})?\s+(?:MAC=(?:%{COMMONMAC:[destination][mac]})?(?::%{COMMONMAC:[source][mac]})?(?::[A-Fa-f0-9]{2}:[A-Fa-f0-9]{2})?\s+)?(:?%{IPTABLES4_PART}|%{IPTABLES6_PART}).*?PROTO=(?:%{WORD:[network][transport]})?\s+SPT=(?:%{INT:[source][port]:integer})?\s+DPT=(?:%{INT:[destination][port]:integer})?\s+(?:%{IPTABLES_TCP_PART})?
104 |
105 | # Shorewall firewall logs
106 | SHOREWALL (?:%{SYSLOGTIMESTAMP:timestamp}) (?:%{WORD:[observer][hostname]}) .*Shorewall:(?:%{WORD:[shorewall][firewall][type]})?:(?:%{WORD:[shorewall][firewall][action]})?.*%{IPTABLES}
107 | #== End Shorewall
108 | #== SuSE Firewall 2 ==
109 | SFW2_LOG_PREFIX SFW2\-INext\-%{NOTSPACE:[suse][firewall][action]}
110 | SFW2 ((?:%{SYSLOGTIMESTAMP:timestamp})|(?:%{TIMESTAMP_ISO8601:timestamp}))\s*%{HOSTNAME:[observer][hostname]}.*?%{SFW2_LOG_PREFIX:[suse][firewall][log_prefix]}\s*%{IPTABLES}
111 | #== End SuSE ==
112 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/grok-patterns:
--------------------------------------------------------------------------------
1 | USERNAME [a-zA-Z0-9._-]+
2 | USER %{USERNAME}
3 | EMAILLOCALPART [a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,64}(?:\.[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,62}){0,63}
4 | EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME}
5 | INT (?:[+-]?(?:[0-9]+))
6 | BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
7 | NUMBER (?:%{BASE10NUM})
8 | BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
19 | UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
20 | # URN, allowing use of RFC 2141 section 2.3 reserved characters
21 | URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+
22 |
23 | # Networking
24 | MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
25 | CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
26 | WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
27 | COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
28 | IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
29 | IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
40 | URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+
41 | URIHOST %{IPORHOST}(?::%{POSINT})?
42 | # uripath comes loosely from RFC1738, but mostly from what Firefox doesn't turn into %XX
43 | URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+
44 | URIQUERY [A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]*
45 | # deprecated (kept due compatibility):
46 | URIPARAM \?%{URIQUERY}
47 | URIPATHPARAM %{URIPATH}(?:\?%{URIQUERY})?
48 | URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATH}(?:\?%{URIQUERY})?)?
49 |
50 | # Months: January, Feb, 3, 03, 12, December
51 | MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b
52 | MONTHNUM (?:0?[1-9]|1[0-2])
53 | MONTHNUM2 (?:0[1-9]|1[0-2])
54 | MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
55 |
56 | # Days: Monday, Tue, Thu, etc...
57 | DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
58 |
59 | # Years?
60 | YEAR (?>\d\d){1,2}
61 | HOUR (?:2[0123]|[01]?[0-9])
62 | MINUTE (?:[0-5][0-9])
63 | # '60' is a leap second in most time standards and thus is valid.
64 | SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
65 | TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
66 | # datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
67 | DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
68 | DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
69 | ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
70 | ISO8601_SECOND %{SECOND}
71 | TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
72 | DATE %{DATE_US}|%{DATE_EU}
73 | DATESTAMP %{DATE}[- ]%{TIME}
74 | TZ (?:[APMCE][SD]T|UTC)
75 | DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
76 | DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
77 | DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
78 | DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
79 |
80 | # Syslog Dates: Month Day HH:MM:SS
81 | SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
82 | PROG [\x21-\x5a\x5c\x5e-\x7e]+
83 | SYSLOGPROG %{PROG:[process][name]}(?:\[%{POSINT:[process][pid]:integer}\])?
84 | SYSLOGHOST %{IPORHOST}
85 | SYSLOGFACILITY <%{NONNEGINT:[log][syslog][facility][code]:integer}.%{NONNEGINT:[log][syslog][priority]:integer}>
86 | HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
87 |
88 | # Shortcuts
89 | QS %{QUOTEDSTRING}
90 |
91 | # Log formats
92 | SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:[host][hostname]} %{SYSLOGPROG}:
93 |
94 | # Log Levels
95 | LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)
96 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/haproxy:
--------------------------------------------------------------------------------
1 |
2 | HAPROXYTIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
3 | HAPROXYDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{HAPROXYTIME}.%{INT}
4 |
5 | # Override these default patterns to parse out what is captured in your haproxy.cfg
6 | HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][captured_headers]}
7 | HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[haproxy][http][response][captured_headers]}
8 |
9 | # Example:
10 | # These haproxy config lines will add data to the logs that are captured
11 | # by the patterns below. Place them in your custom patterns directory to
12 | # override the defaults.
13 | #
14 | # capture request header Host len 40
15 | # capture request header X-Forwarded-For len 50
16 | # capture request header Accept-Language len 50
17 | # capture request header Referer len 200
18 | # capture request header User-Agent len 200
19 | #
20 | # capture response header Content-Type len 30
21 | # capture response header Content-Encoding len 10
22 | # capture response header Cache-Control len 200
23 | # capture response header Last-Modified len 200
24 | #
25 | # HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][host]}\|%{DATA:[haproxy][http][request][x_forwarded_for]}\|%{DATA:[haproxy][http][request][accept_language]}\|%{DATA:[http][request][referrer]}\|%{DATA:[user_agent][original]}
26 | # HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[http][response][mime_type]}\|%{DATA:[haproxy][http][response][encoding]}\|%{DATA:[haproxy][http][response][cache_control]}\|%{DATA:[haproxy][http][response][last_modified]}
27 |
28 | HAPROXYURI (?:%{URIPROTO:[url][scheme]}://)?(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:integer})?)?(?:%{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})?)?
29 |
30 | HAPROXYHTTPREQUESTLINE (?:|(?:%{WORD:[http][request][method]} %{HAPROXYURI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?))
31 |
32 | # parse a haproxy 'httplog' line
33 | HAPROXYHTTPBASE %{IP:[source][address]}:%{INT:[source][port]:integer} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][http][request][time_wait_ms]:integer})/(?:-1|%{INT:[haproxy][total_waiting_time_ms]:integer})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:integer})/(?:-1|%{INT:[haproxy][http][request][time_wait_without_data_ms]:integer})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[http][response][status_code]:integer} %{INT:[source][bytes]:integer} (?:-|%{DATA:[haproxy][http][request][captured_cookie]}) (?:-|%{DATA:[haproxy][http][response][captured_cookie]}) %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:integer}/%{INT:[haproxy][connections][frontend]:integer}/%{INT:[haproxy][connections][backend]:integer}/%{INT:[haproxy][connections][server]:integer}/%{INT:[haproxy][connections][retries]:integer} %{INT:[haproxy][server_queue]:integer}/%{INT:[haproxy][backend_queue]:integer}(?: \{%{HAPROXYCAPTUREDREQUESTHEADERS}\}(?: \{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?)?(?: "%{HAPROXYHTTPREQUESTLINE}"?)?
34 | # :long - %{INT:[source][bytes]:int}
35 |
36 | HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{HAPROXYHTTPBASE}
37 |
38 | # parse a haproxy 'tcplog' line
39 | HAPROXYTCP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{IP:[source][address]}:%{INT:[source][port]:integer} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][total_waiting_time_ms]:integer})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:integer})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[source][bytes]:integer} %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:integer}/%{INT:[haproxy][connections][frontend]:integer}/%{INT:[haproxy][connections][backend]:integer}/%{INT:[haproxy][connections][server]:integer}/%{INT:[haproxy][connections][retries]:integer} %{INT:[haproxy][server_queue]:integer}/%{INT:[haproxy][backend_queue]:integer}
40 | # :long - %{INT:[source][bytes]:int}
41 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/httpd:
--------------------------------------------------------------------------------
1 | HTTPDUSER %{EMAILADDRESS}|%{USER}
2 | HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
3 |
4 | # Log formats
5 | HTTPD_COMMONLOG %{IPORHOST:[source][address]} (?:-|%{HTTPDUSER:[apache][access][user][identity]}) (?:-|%{HTTPDUSER:[user][name]}) \[%{HTTPDATE:timestamp}\] "(?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?|%{DATA})" (?:-|%{INT:[http][response][status_code]:integer}) (?:-|%{INT:[http][response][body][bytes]:integer})
6 | # :long - %{INT:[http][response][body][bytes]:int}
7 | HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})"
8 |
9 | # Error logs
10 | HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:[log][level]}\] (?:\[client %{IPORHOST:[source][address]}\] )?%{GREEDYDATA:message}
11 | HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:[apache][error][module]})?:%{LOGLEVEL:[log][level]}\] \[pid %{POSINT:[process][pid]:integer}(:tid %{INT:[process][thread][id]:integer})?\](?: \(%{POSINT:[apache][error][proxy][error][code]?}\)%{DATA:[apache][error][proxy][error][message]}:)?(?: \[client %{IPORHOST:[source][address]}(?::%{POSINT:[source][port]:integer})?\])?(?: %{DATA:[error][code]}:)? %{GREEDYDATA:message}
12 | # :long - %{INT:[process][thread][id]:int}
13 | HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
14 |
15 | # Deprecated
16 | COMMONAPACHELOG %{HTTPD_COMMONLOG}
17 | COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG}
18 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/java:
--------------------------------------------------------------------------------
1 | JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]*
2 | #Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source'
3 | JAVAFILE (?:[a-zA-Z$_0-9. -]+)
4 | #Allow special , methods
5 | JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*)
6 | #Line number is optional in special cases 'Native method' or 'Unknown source'
7 | JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:[java][log][origin][class][name]}\.%{JAVAMETHOD:[log][origin][function]}\(%{JAVAFILE:[log][origin][file][name]}(?::%{INT:[log][origin][file][line]:integer})?\)
8 | # Java Logs
9 | JAVATHREAD (?:[A-Z]{2}-Processor[\d]+)
10 | JAVALOGMESSAGE (?:.*)
11 |
12 | # MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM
13 | # matches default logging configuration in Tomcat 4.1, 5.0, 5.5, 6.0, 7.0
14 | CATALINA7_DATESTAMP %{MONTH} %{MONTHDAY}, %{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} (?:AM|PM)
15 | CATALINA7_LOG %{CATALINA7_DATESTAMP:timestamp} %{JAVACLASS:[java][log][origin][class][name]}(?: %{JAVAMETHOD:[log][origin][function]})?\s*(?:%{LOGLEVEL:[log][level]}:)? %{JAVALOGMESSAGE:message}
16 |
17 | # 31-Jul-2020 16:40:38.578 in Tomcat 8.5/9.0
18 | CATALINA8_DATESTAMP %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}:%{SECOND}
19 | CATALINA8_LOG %{CATALINA8_DATESTAMP:timestamp} %{LOGLEVEL:[log][level]} \[%{DATA:[java][log][origin][thread][name]}\] %{JAVACLASS:[java][log][origin][class][name]}\.(?:%{JAVAMETHOD:[log][origin][function]})? %{JAVALOGMESSAGE:message}
20 |
21 | CATALINA_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP})
22 | CATALINALOG (?:%{CATALINA8_LOG})|(?:%{CATALINA7_LOG})
23 |
24 | # in Tomcat 5.5, 6.0, 7.0 it is the same as catalina.out logging format
25 | TOMCAT7_LOG %{CATALINA7_LOG}
26 | TOMCAT8_LOG %{CATALINA8_LOG}
27 |
28 | # NOTE: a weird log we started with - not sure what TC version this should match out of the box (due the | delimiters)
29 | TOMCATLEGACY_DATESTAMP %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}(?: %{ISO8601_TIMEZONE})?
30 | TOMCATLEGACY_LOG %{TOMCATLEGACY_DATESTAMP:timestamp} \| %{LOGLEVEL:[log][level]} \| %{JAVACLASS:[java][log][origin][class][name]} - %{JAVALOGMESSAGE:message}
31 |
32 | TOMCAT_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP})|(?:%{TOMCATLEGACY_DATESTAMP})
33 |
34 | TOMCATLOG (?:%{TOMCAT8_LOG})|(?:%{TOMCAT7_LOG})|(?:%{TOMCATLEGACY_LOG})
35 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/junos:
--------------------------------------------------------------------------------
1 | # JUNOS 11.4 RT_FLOW patterns
2 | RT_FLOW_TAG (?:RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY)
3 | # deprecated legacy name:
4 | RT_FLOW_EVENT RT_FLOW_TAG
5 |
6 | RT_FLOW1 %{RT_FLOW_TAG:[juniper][srx][tag]}: %{GREEDYDATA:[juniper][srx][reason]}: %{IP:[source][ip]}/%{INT:[source][port]:integer}->%{IP:[destination][ip]}/%{INT:[destination][port]:integer} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:integer}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:integer} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} \d+\(%{INT:[source][bytes]:integer}\) \d+\(%{INT:[destination][bytes]:integer}\) %{INT:[juniper][srx][elapsed_time]:integer} .*
7 | # :long - %{INT:[source][bytes]:int}
8 | # :long - %{INT:[destination][bytes]:int}
9 |
10 | RT_FLOW2 %{RT_FLOW_TAG:[juniper][srx][tag]}: session created %{IP:[source][ip]}/%{INT:[source][port]:integer}->%{IP:[destination][ip]}/%{INT:[destination][port]:integer} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:integer}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:integer} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} .*
11 |
12 | RT_FLOW3 %{RT_FLOW_TAG:[juniper][srx][tag]}: session denied %{IP:[source][ip]}/%{INT:[source][port]:integer}->%{IP:[destination][ip]}/%{INT:[destination][port]:integer} %{DATA:[juniper][srx][service_name]} %{INT:[network][iana_number]}\(\d\) %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} .*
13 |
14 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/linux-syslog:
--------------------------------------------------------------------------------
1 | SYSLOG5424PRINTASCII [!-~]+
2 |
3 | SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp})(?: %{SYSLOGFACILITY})?(?: %{SYSLOGHOST:[host][hostname]})?(?: %{SYSLOGPROG}:)?
4 | SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:[system][auth][pam][module]}\(%{DATA:[system][auth][pam][origin]}\): session %{WORD:[system][auth][pam][session_state]} for user %{USERNAME:[user][name]}(?: by %{GREEDYDATA})?
5 |
6 | CRON_ACTION [A-Z ]+
7 | CRONLOG %{SYSLOGBASE} \(%{USER:[user][name]}\) %{CRON_ACTION:[system][cron][action]} \(%{DATA:message}\)
8 |
9 | SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message}
10 |
11 | # IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424)
12 | SYSLOG5424PRI <%{NONNEGINT:[log][syslog][priority]:integer}>
13 | SYSLOG5424SD \[%{DATA}\]+
14 | SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:[system][syslog][version]} +(?:-|%{TIMESTAMP_ISO8601:timestamp}) +(?:-|%{IPORHOST:[host][hostname]}) +(?:-|%{SYSLOG5424PRINTASCII:[process][name]}) +(?:-|%{POSINT:[process][pid]:integer}) +(?:-|%{SYSLOG5424PRINTASCII:[event][code]}) +(?:-|%{SYSLOG5424SD:[system][syslog][structured_data]})?
15 |
16 | SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:message}
17 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/maven:
--------------------------------------------------------------------------------
1 | MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))?
2 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/mcollective:
--------------------------------------------------------------------------------
1 | # Remember, these can be multi-line events.
2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:integer}\]%{SPACE}%{LOGLEVEL:[log][level]}
3 |
4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}:
5 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/mongodb:
--------------------------------------------------------------------------------
1 | MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:[mongodb][component]}\] %{GREEDYDATA:message}
2 | MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \}
3 | MONGO_SLOWQUERY %{WORD:[mongodb][profile][op]} %{MONGO_WORDDASH:[mongodb][database]}\.%{MONGO_WORDDASH:[mongodb][collection]} %{WORD}: %{MONGO_QUERY:[mongodb][query][original]} ntoreturn:%{NONNEGINT:[mongodb][profile][ntoreturn]:integer} ntoskip:%{NONNEGINT:[mongodb][profile][ntoskip]:integer} nscanned:%{NONNEGINT:[mongodb][profile][nscanned]:integer}.*? nreturned:%{NONNEGINT:[mongodb][profile][nreturned]:integer}.*? %{INT:[mongodb][profile][duration]:integer}ms
4 | MONGO_WORDDASH \b[\w-]+\b
5 | MONGO3_SEVERITY \w
6 | MONGO3_COMPONENT %{WORD}
7 | MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:[log][level]} (?:-|%{MONGO3_COMPONENT:[mongodb][component]})%{SPACE}(?:\[%{DATA:[mongodb][context]}\])? %{GREEDYDATA:message}
8 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/nagios:
--------------------------------------------------------------------------------
1 | ##################################################################################
2 | ##################################################################################
3 | # Chop Nagios log files to smithereens!
4 | #
5 | # A set of GROK filters to process logfiles generated by Nagios.
6 | # While it does not, this set intends to cover all possible Nagios logs.
7 | #
8 | # Some more work needs to be done to cover all External Commands:
9 | # http://old.nagios.org/developerinfo/externalcommands/commandlist.php
10 | #
11 | # If you need some support on these rules please contact:
12 | # Jelle Smet http://smetj.net
13 | #
14 | #################################################################################
15 | #################################################################################
16 |
17 | NAGIOSTIME \[%{NUMBER:timestamp}\]
18 |
19 | ###############################################
20 | ######## Begin nagios log types
21 | ###############################################
22 | NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE
23 | NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE
24 |
25 | NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION
26 | NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION
27 |
28 | NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT
29 | NAGIOS_TYPE_HOST_ALERT HOST ALERT
30 |
31 | NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT
32 | NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT
33 |
34 | NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT
35 | NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT
36 |
37 | NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK
38 | NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK
39 |
40 | NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER
41 | NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER
42 |
43 | NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND
44 | NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION
45 | ###############################################
46 | ######## End nagios log types
47 | ###############################################
48 |
49 | ###############################################
50 | ######## Begin external check types
51 | ###############################################
52 | NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK
53 | NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK
54 | NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK
55 | NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK
56 | NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT
57 | NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT
58 | NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME
59 | NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME
60 | NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS
61 | NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS
62 | NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS
63 | NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS
64 | NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS
65 | NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS
66 | ###############################################
67 | ######## End external check types
68 | ###############################################
69 | NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:message}
70 |
71 | NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:integer};%{GREEDYDATA:message}
72 | NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:integer};%{GREEDYDATA:message}
73 |
74 | NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message}
75 | NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message}
76 |
77 | NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:integer};%{GREEDYDATA:message}
78 | NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:integer};%{GREEDYDATA:message}
79 |
80 | NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:message}
81 | NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:message}
82 |
83 | NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]}
84 | NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]}
85 |
86 | NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]}
87 | NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]}
88 |
89 | NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]}
90 | NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]}
91 |
92 | NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:[nagios][log][type]}: %{DATA:[service][name]};%{NUMBER:[nagios][log][period_from]:integer};%{NUMBER:[nagios][log][period_to]:integer}
93 |
94 | ####################
95 | #### External checks
96 | ####################
97 |
98 | #Disable host & service check
99 | NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]}
100 | NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]}
101 |
102 | #Enable host & service check
103 | NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]}
104 | NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]}
105 |
106 | #Process host & service check
107 | NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]}
108 | NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]}
109 |
110 | #Disable host & service notifications
111 | NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]}
112 | NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]}
113 | NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]}
114 |
115 | #Enable host & service notifications
116 | NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]}
117 | NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]}
118 | NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]}
119 |
120 | #Schedule host & service downtime
121 | NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:[nagios][log][command]};%{DATA:[host][hostname]};%{NUMBER:[nagios][log][start_time]};%{NUMBER:[nagios][log][end_time]};%{NUMBER:[nagios][log][fixed]};%{NUMBER:[nagios][log][trigger_id]};%{NUMBER:[nagios][log][duration]:integer};%{DATA:[user][name]};%{DATA:[nagios][log][comment]}
122 |
123 | #End matching line
124 | NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS})
125 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/postgresql:
--------------------------------------------------------------------------------
1 | # Default postgresql pg_log format pattern
2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ:[event][timezone]} %{DATA:[user][name]} %{GREEDYDATA:[postgresql][log][connection_id]} %{POSINT:[process][pid]:integer}
3 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/rails:
--------------------------------------------------------------------------------
1 | RUUID \h{32}
2 | # rails controller with action
3 | RCONTROLLER (?<[rails][controller][class]>[^#]+)#(?<[rails][controller][action]>\w+)
4 |
5 | # this will often be the only line:
6 | RAILS3HEAD (?m)Started %{WORD:[http][request][method]} "%{URIPATHPARAM:[url][original]}" for %{IPORHOST:[source][address]} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE})
7 | # for some a strange reason, params are stripped of {} - not sure that's a good idea.
8 | RPROCESSING \W*Processing by %{RCONTROLLER} as (?<[rails][request][format]>\S+)(?:\W*Parameters: {%{DATA:[rails][request][params]}}\W*)?
9 | RAILS3FOOT Completed %{POSINT:[http][response][status_code]:integer}%{DATA} in %{NUMBER:[rails][request][duration][total]:float}ms %{RAILS3PROFILE}%{GREEDYDATA}
10 | RAILS3PROFILE (?:\(Views: %{NUMBER:[rails][request][duration][view]:float}ms \| ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms|\(ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms)?
11 |
12 | # putting it all together
13 | RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?<[rails][request][explain][original]>(?:%{DATA}\n)*)(?:%{RAILS3FOOT})?
14 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/redis:
--------------------------------------------------------------------------------
1 | REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME}
2 | REDISLOG \[%{POSINT:[process][pid]:integer}\] %{REDISTIMESTAMP:timestamp} \*
3 | REDISMONLOG %{NUMBER:timestamp} \[%{INT:[redis][database][id]} %{IP:[client][ip]}:%{POSINT:[client][port]:integer}\] "%{WORD:[redis][command][name]}"\s?%{GREEDYDATA:[redis][command][args]}
4 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/ruby:
--------------------------------------------------------------------------------
1 | RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO)
2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:integer}\] *%{RUBY_LOGLEVEL:[log][level]} -- +%{DATA:[process][name]}: %{GREEDYDATA:message}
3 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/squid:
--------------------------------------------------------------------------------
1 | # Pattern squid3
2 | # Documentation of squid3 logs formats can be found at the following link:
3 | # http://wiki.squid-cache.org/Features/LogFormat
4 | SQUID3_STATUS (?:%{POSINT:[http][response][status_code]:integer}|0|000)
5 | SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:[squid][request][duration]:integer}\s%{IP:[source][ip]}\s%{WORD:[event][action]}/%{SQUID3_STATUS}\s%{INT:[http][response][bytes]:integer}\s%{WORD:[http][request][method]}\s%{NOTSPACE:[url][original]}\s(?:-|%{NOTSPACE:[user][name]})\s%{WORD:[squid][hierarchy_code]}/(?:-|%{IPORHOST:[destination][address]})\s(?:-|%{NOTSPACE:[http][response][mime_type]})
6 | # :long - %{INT:[http][response][bytes]:int}
7 |
--------------------------------------------------------------------------------
/patterns/ecs-v1/zeek:
--------------------------------------------------------------------------------
1 | # updated Zeek log matching, for legacy matching see the patters/ecs-v1/bro
2 |
3 | ZEEK_BOOL [TF]
4 | ZEEK_DATA [^\t]+
5 |
6 | # http.log - the 'new' format (compared to BRO_HTTP)
7 | # has *version* and *origin* fields added and *filename* replaced with *orig_filenames* + *resp_filenames*
8 | ZEEK_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{INT:[zeek][http][trans_depth]:integer}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{ZEEK_DATA:[url][domain]})\t(?:-|%{ZEEK_DATA:[url][original]})\t(?:-|%{ZEEK_DATA:[http][request][referrer]})\t(?:-|%{NUMBER:[http][version]})\t(?:-|%{ZEEK_DATA:[user_agent][original]})\t(?:-|%{ZEEK_DATA:[zeek][http][origin]})\t(?:-|%{NUMBER:[http][request][body][bytes]:integer})\t(?:-|%{NUMBER:[http][response][body][bytes]:integer})\t(?:-|%{POSINT:[http][response][status_code]:integer})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:integer})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:\(empty\)|%{ZEEK_DATA:[zeek][http][tags]})\t(?:-|%{ZEEK_DATA:[url][username]})\t(?:-|%{ZEEK_DATA:[url][password]})\t(?:-|%{ZEEK_DATA:[zeek][http][proxied]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_filenames]})\t(?:-|%{ZEEK_DATA:[http][request][mime_type]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_filenames]})\t(?:-|%{ZEEK_DATA:[http][response][mime_type]})
9 | # :long - %{NUMBER:[http][request][body][bytes]:int}
10 | # :long - %{NUMBER:[http][response][body][bytes]:int}
11 |
12 | # dns.log - 'updated' BRO_DNS format (added *zeek.dns.rtt*)
13 | ZEEK_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:integer})\t(?:-|%{NUMBER:[zeek][dns][rtt]:float})\t(?:-|%{ZEEK_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:integer})\t(?:-|%{ZEEK_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:integer})\t(?:-|%{ZEEK_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:integer})\t(?:-|%{ZEEK_DATA:[dns][response_code]})\t%{ZEEK_BOOL:[zeek][dns][AA]}\t%{ZEEK_BOOL:[zeek][dns][TC]}\t%{ZEEK_BOOL:[zeek][dns][RD]}\t%{ZEEK_BOOL:[zeek][dns][RA]}\t%{NONNEGINT:[zeek][dns][Z]:integer}\t(?:-|%{ZEEK_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{ZEEK_BOOL:[zeek][dns][rejected]})
14 |
15 | # conn.log - the 'new' format (requires *zeek.connection.local_resp*, handles `(empty)` as `-` for tunnel_parents, and optional mac adresses)
16 | ZEEK_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:integer}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:integer}\t%{WORD:[network][transport]}\t(?:-|%{ZEEK_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:integer})\t(?:-|%{INT:[zeek][connection][resp_bytes]:integer})\t(?:-|%{ZEEK_DATA:[zeek][connection][state]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_resp]})\t(?:-|%{INT:[zeek][connection][missed_bytes]:integer})\t(?:-|%{ZEEK_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:integer})\t(?:-|%{INT:[source][bytes]:integer})\t(?:-|%{INT:[destination][packets]:integer})\t(?:-|%{INT:[destination][bytes]:integer})\t(?:-|%{ZEEK_DATA:[zeek][connection][tunnel_parents]})(?:\t(?:-|%{COMMONMAC:[source][mac]})\t(?:-|%{COMMONMAC:[destination][mac]}))?
17 | # :long - %{INT:[zeek][connection][orig_bytes]:int}
18 | # :long - %{INT:[zeek][connection][resp_bytes]:int}
19 | # :long - %{INT:[zeek][connection][missed_bytes]:int}
20 | # :long - %{INT:[source][packets]:int}
21 | # :long - %{INT:[source][bytes]:int}
22 | # :long - %{INT:[destination][packets]:int}
23 | # :long - %{INT:[destination][bytes]:int}
24 |
25 | # files.log - updated BRO_FILES format (2 new fields added at the end)
26 | ZEEK_FILES_TX_HOSTS (?:-|%{IP:[server][ip]})|(?<[zeek][files][tx_hosts]>%{IP:[server][ip]}(?:[\s,]%{IP})+)
27 | ZEEK_FILES_RX_HOSTS (?:-|%{IP:[client][ip]})|(?<[zeek][files][rx_hosts]>%{IP:[client][ip]}(?:[\s,]%{IP})+)
28 | ZEEK_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t%{ZEEK_FILES_TX_HOSTS}\t%{ZEEK_FILES_RX_HOSTS}\t(?:-|%{ZEEK_DATA:[zeek][files][session_ids]})\t(?:-|%{ZEEK_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:integer})\t(?:-|%{ZEEK_DATA:[zeek][files][analyzers]})\t(?:-|%{ZEEK_DATA:[file][mime_type]})\t(?:-|%{ZEEK_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{ZEEK_DATA:[zeek][files][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:integer})\t(?:-|%{INT:[file][size]:integer})\t(?:-|%{INT:[zeek][files][missing_bytes]:integer})\t(?:-|%{INT:[zeek][files][overflow_bytes]:integer})\t(?:-|%{ZEEK_BOOL:[zeek][files][timedout]})\t(?:-|%{ZEEK_DATA:[zeek][files][parent_fuid]})\t(?:-|%{ZEEK_DATA:[file][hash][md5]})\t(?:-|%{ZEEK_DATA:[file][hash][sha1]})\t(?:-|%{ZEEK_DATA:[file][hash][sha256]})\t(?:-|%{ZEEK_DATA:[zeek][files][extracted]})(?:\t(?:-|%{ZEEK_BOOL:[zeek][files][extracted_cutoff]})\t(?:-|%{INT:[zeek][files][extracted_size]:integer}))?
29 | # :long - %{INT:[zeek][files][seen_bytes]:int}
30 | # :long - %{INT:[file][size]:int}
31 | # :long - %{INT:[zeek][files][missing_bytes]:int}
32 | # :long - %{INT:[zeek][files][overflow_bytes]:int}
33 | # :long - %{INT:[zeek][files][extracted_size]:int}
34 |
--------------------------------------------------------------------------------
/patterns/legacy/aws:
--------------------------------------------------------------------------------
1 | S3_REQUEST_LINE (?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
2 |
3 | S3_ACCESS_LOG %{WORD:owner} %{NOTSPACE:bucket} \[%{HTTPDATE:timestamp}\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:"%{S3_REQUEST_LINE}"|-) (?:%{INT:response:integer}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:integer}|-) (?:%{INT:object_size:integer}|-) (?:%{INT:request_time_ms:integer}|-) (?:%{INT:turnaround_time_ms:integer}|-) (?:%{QS:referrer}|-) (?:"?%{QS:agent}"?|-) (?:-|%{NOTSPACE:version_id})
4 |
5 | ELB_URIPATHPARAM %{URIPATH:path}(?:%{URIPARAM:params})?
6 |
7 | ELB_URI %{URIPROTO:proto}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST:urihost})?(?:%{ELB_URIPATHPARAM})?
8 |
9 | ELB_REQUEST_LINE (?:%{WORD:verb} %{ELB_URI:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})
10 |
11 | ELB_ACCESS_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:integer} (?:(?:%{IP:backendip}:?:%{INT:backendport:integer})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} %{INT:response:integer} (?:-|%{INT:backend_response:integer}) %{INT:received_bytes:integer} %{INT:bytes:integer} "%{ELB_REQUEST_LINE}"
12 |
13 | CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{WORD:x_edge_location}\t(?:%{NUMBER:sc_bytes:integer}|-)\t%{IPORHOST:clientip}\t%{WORD:cs_method}\t%{HOSTNAME:cs_host}\t%{NOTSPACE:cs_uri_stem}\t%{NUMBER:sc_status:integer}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:agent}\t%{GREEDYDATA:cs_uri_query}\t%{GREEDYDATA:cookies}\t%{WORD:x_edge_result_type}\t%{NOTSPACE:x_edge_request_id}\t%{HOSTNAME:x_host_header}\t%{URIPROTO:cs_protocol}\t%{INT:cs_bytes:integer}\t%{GREEDYDATA:time_taken:float}\t%{GREEDYDATA:x_forwarded_for}\t%{GREEDYDATA:ssl_protocol}\t%{GREEDYDATA:ssl_cipher}\t%{GREEDYDATA:x_edge_response_result_type}
14 |
15 |
--------------------------------------------------------------------------------
/patterns/legacy/bacula:
--------------------------------------------------------------------------------
1 | BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH} %{HOUR}:%{MINUTE}
2 | BACULA_HOST [a-zA-Z0-9-]+
3 | BACULA_VOLUME %{USER}
4 | BACULA_DEVICE %{USER}
5 | BACULA_DEVICEPATH %{UNIXPATH}
6 | BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})*
7 | BACULA_VERSION %{USER}
8 | BACULA_JOB %{USER}
9 |
10 | BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY} exceeded on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\)
11 | BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:volume}\" Bytes=%{BACULA_CAPACITY} Blocks=%{BACULA_CAPACITY} at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
12 | BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:volume}\" in catalog.
13 | BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\).
14 | BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:volume}\" on device \"%{BACULA_DEVICE}\" \(%{BACULA_DEVICEPATH}\)
15 | BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:volume}\" mounted on device \"%{BACULA_DEVICE:device}\" \(%{BACULA_DEVICEPATH}\) at %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}.
16 | BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:berror}
17 | BACULA_LOG_NOOPENDIR \s*Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror}
18 | BACULA_LOG_NOSTAT \s*Could not stat %{DATA}: ERR=%{GREEDYDATA:berror}
19 | BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:volume}\". Marking it purged.
20 | BACULA_LOG_ALL_RECORDS_PRUNED All records pruned from Volume \"%{BACULA_VOLUME:volume}\"; marking it \"Purged\"
21 | BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days .
22 | BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files.
23 | BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
24 | BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:client} from catalog.
25 | BACULA_LOG_ENDPRUNE End auto prune.
26 | BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:job}
27 | BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:job}
28 | BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:device}\"
29 | BACULA_LOG_DIFF_FS \s+%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it.
30 | BACULA_LOG_JOBEND Job write elapsed time = %{DATA:elapsed}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second
31 | BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune.
32 | BACULA_LOG_NOPRUNE_FILES No Files found to prune.
33 | BACULA_LOG_VOLUME_PREVWRITTEN Volume \"%{BACULA_VOLUME:volume}\" previously written, moving to end of data.
34 | BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:volume}\" size=%{INT}
35 | BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT}.
36 | BACULA_LOG_MARKCANCEL JobId %{INT}, Job %{BACULA_JOB:job} marked to be canceled.
37 | BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:runjob}\"
38 | BACULA_LOG_VSS (Generate )?VSS (Writer)?
39 | BACULA_LOG_MAXSTART Fatal error: Job canceled because max start delay time exceeded.
40 | BACULA_LOG_DUPLICATE Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed.
41 | BACULA_LOG_NOJOBSTAT Fatal error: No Job status returned from FD.
42 | BACULA_LOG_FATAL_CONN Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA})
43 | BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?%{GREEDYDATA})
44 | BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at %{DATA}. Possible causes:
45 | BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup.
46 | BACULA_LOG_NOPRIOR No prior Full backup Job record found.
47 |
48 | BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\):
49 |
50 | BACULA_LOGLINE %{BACULA_TIMESTAMP:bts} %{BACULA_HOST:hostname}(?: JobId %{INT:jobid})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR})
51 |
--------------------------------------------------------------------------------
/patterns/legacy/bind:
--------------------------------------------------------------------------------
1 | BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME}
2 |
3 | BIND9 %{BIND9_TIMESTAMP:timestamp} queries: %{LOGLEVEL:loglevel}: client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:clientip}#%{POSINT:clientport} \(%{GREEDYDATA:query}\): query: %{GREEDYDATA:query} IN %{GREEDYDATA:querytype} \(%{IP:dns}\)
4 |
--------------------------------------------------------------------------------
/patterns/legacy/bro:
--------------------------------------------------------------------------------
1 | # https://www.bro.org/sphinx/script-reference/log-files.html
2 |
3 | # http.log
4 | BRO_HTTP %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{INT:trans_depth}\t%{GREEDYDATA:method}\t%{GREEDYDATA:domain}\t%{GREEDYDATA:uri}\t%{GREEDYDATA:referrer}\t%{GREEDYDATA:user_agent}\t%{NUMBER:request_body_len}\t%{NUMBER:response_body_len}\t%{GREEDYDATA:status_code}\t%{GREEDYDATA:status_msg}\t%{GREEDYDATA:info_code}\t%{GREEDYDATA:info_msg}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:bro_tags}\t%{GREEDYDATA:username}\t%{GREEDYDATA:password}\t%{GREEDYDATA:proxied}\t%{GREEDYDATA:orig_fuids}\t%{GREEDYDATA:orig_mime_types}\t%{GREEDYDATA:resp_fuids}\t%{GREEDYDATA:resp_mime_types}
5 |
6 | # dns.log
7 | BRO_DNS %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{INT:trans_id}\t%{GREEDYDATA:query}\t%{GREEDYDATA:qclass}\t%{GREEDYDATA:qclass_name}\t%{GREEDYDATA:qtype}\t%{GREEDYDATA:qtype_name}\t%{GREEDYDATA:rcode}\t%{GREEDYDATA:rcode_name}\t%{GREEDYDATA:AA}\t%{GREEDYDATA:TC}\t%{GREEDYDATA:RD}\t%{GREEDYDATA:RA}\t%{GREEDYDATA:Z}\t%{GREEDYDATA:answers}\t%{GREEDYDATA:TTLs}\t%{GREEDYDATA:rejected}
8 |
9 | # conn.log
10 | BRO_CONN %{NUMBER:ts}\t%{NOTSPACE:uid}\t%{IP:orig_h}\t%{INT:orig_p}\t%{IP:resp_h}\t%{INT:resp_p}\t%{WORD:proto}\t%{GREEDYDATA:service}\t%{NUMBER:duration}\t%{NUMBER:orig_bytes}\t%{NUMBER:resp_bytes}\t%{GREEDYDATA:conn_state}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:missed_bytes}\t%{GREEDYDATA:history}\t%{GREEDYDATA:orig_pkts}\t%{GREEDYDATA:orig_ip_bytes}\t%{GREEDYDATA:resp_pkts}\t%{GREEDYDATA:resp_ip_bytes}\t%{GREEDYDATA:tunnel_parents}
11 |
12 | # files.log
13 | BRO_FILES %{NUMBER:ts}\t%{NOTSPACE:fuid}\t%{IP:tx_hosts}\t%{IP:rx_hosts}\t%{NOTSPACE:conn_uids}\t%{GREEDYDATA:source}\t%{GREEDYDATA:depth}\t%{GREEDYDATA:analyzers}\t%{GREEDYDATA:mime_type}\t%{GREEDYDATA:filename}\t%{GREEDYDATA:duration}\t%{GREEDYDATA:local_orig}\t%{GREEDYDATA:is_orig}\t%{GREEDYDATA:seen_bytes}\t%{GREEDYDATA:total_bytes}\t%{GREEDYDATA:missing_bytes}\t%{GREEDYDATA:overflow_bytes}\t%{GREEDYDATA:timedout}\t%{GREEDYDATA:parent_fuid}\t%{GREEDYDATA:md5}\t%{GREEDYDATA:sha1}\t%{GREEDYDATA:sha256}\t%{GREEDYDATA:extracted}
14 |
--------------------------------------------------------------------------------
/patterns/legacy/exim:
--------------------------------------------------------------------------------
1 | EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2}
2 | EXIM_FLAGS (<=|[-=>*]>|[*]{2}|==)
3 | EXIM_DATE %{YEAR:exim_year}-%{MONTHNUM:exim_month}-%{MONTHDAY:exim_day} %{TIME:exim_time}
4 | EXIM_PID \[%{POSINT:pid}\]
5 | EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?)
6 | EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message)
7 | EXIM_REMOTE_HOST (H=(%{NOTSPACE:remote_hostname} )?(\(%{NOTSPACE:remote_heloname}\) )?\[%{IP:remote_host}\])(?::%{POSINT:remote_port})?
8 | EXIM_INTERFACE (I=\[%{IP:exim_interface}\](:%{NUMBER:exim_interface_port}))
9 | EXIM_PROTOCOL (P=%{NOTSPACE:protocol})
10 | EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size})
11 | EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id})
12 | EXIM_SUBJECT (T=%{QS:exim_subject})
13 |
14 | EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=%{NOTSPACE})
15 | EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))*
16 |
17 | EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:exim_msgid} (?<=) (?[a-z:] )?%{EMAILADDRESS:exim_sender_email}%{EXIM_NAMED_FIELDS}(?: for %{EMAILADDRESS:exim_recipient_email})?
18 |
19 | EXIM %{EXIM_MESSAGE_ARRIVAL}
20 |
--------------------------------------------------------------------------------
/patterns/legacy/firewalls:
--------------------------------------------------------------------------------
1 | # NetScreen firewall logs
2 | NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:date} %{IPORHOST:device} %{IPORHOST}: NetScreen device_id=%{WORD:device_id}%{DATA}: start_time=%{QUOTEDSTRING:start_time} duration=%{INT:duration} policy_id=%{INT:policy_id} service=%{DATA:service} proto=%{INT:proto} src zone=%{WORD:src_zone} dst zone=%{WORD:dst_zone} action=%{WORD:action} sent=%{INT:sent} rcvd=%{INT:rcvd} src=%{IPORHOST:src_ip} dst=%{IPORHOST:dst_ip} src_port=%{INT:src_port} dst_port=%{INT:dst_port} src-xlated ip=%{IPORHOST:src_xlated_ip} port=%{INT:src_xlated_port} dst-xlated ip=%{IPORHOST:dst_xlated_ip} port=%{INT:dst_xlated_port} session_id=%{INT:session_id} reason=%{GREEDYDATA:reason}
3 |
4 | #== Cisco ASA ==
5 | CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?: %%{CISCOTAG:ciscotag}:
6 | CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME}
7 | CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+)
8 | # Common Particles
9 | CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted
10 | CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)*
11 | CISCO_DIRECTION Inbound|inbound|Outbound|outbound
12 | CISCO_INTERVAL first hit|%{INT}-second interval
13 | CISCO_XLATE_TYPE static|dynamic
14 | # ASA-1-104001
15 | CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:switch_reason}
16 | # ASA-1-104002
17 | CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:switch_reason}
18 | # ASA-1-104003
19 | CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\.
20 | # ASA-1-104004
21 | CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\.
22 | # ASA-1-105003
23 | CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} waiting
24 | # ASA-1-105004
25 | CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{GREEDYDATA:interface_name} normal
26 | # ASA-1-105005
27 | CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{GREEDYDATA:interface_name}
28 | # ASA-1-105008
29 | CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{GREEDYDATA:interface_name}
30 | # ASA-1-105009
31 | CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{GREEDYDATA:interface_name} (?:Passed|Failed)
32 | # ASA-2-106001
33 | CISCOFW106001 %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface}
34 | # ASA-2-106006, ASA-2-106007, ASA-2-106010
35 | CISCOFW106006_106007_106010 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason})
36 | # ASA-3-106014
37 | CISCOFW106014 %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\)
38 | # ASA-6-106015
39 | CISCOFW106015 %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags} on interface %{GREEDYDATA:interface}
40 | # ASA-1-106021
41 | CISCOFW106021 %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface}
42 | # ASA-4-106023
43 | CISCOFW106023 %{CISCO_ACTION:action}( protocol)? %{WORD:protocol} src %{DATA:src_interface}:%{DATA:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{DATA:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group "?%{DATA:policy_id}"? \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
44 | # ASA-4-106100, ASA-4-106102, ASA-4-106103
45 | CISCOFW106100_2_3 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} for user '%{DATA:src_fwuser}' %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\) -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\) hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
46 | # ASA-5-106100
47 | CISCOFW106100 access-list %{NOTSPACE:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
48 | # ASA-5-304001
49 | CISCOFW304001 %{IP:src_ip}(\(%{DATA:src_fwuser}\))? Accessed URL %{IP:dst_ip}:%{GREEDYDATA:dst_url}
50 | # ASA-6-110002
51 | CISCOFW110002 %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
52 | # ASA-6-302010
53 | CISCOFW302010 %{INT:connection_count} in use, %{INT:connection_count_max} most used
54 | # ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016
55 | CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))?
56 | # ASA-6-302020, ASA-6-302021
57 | CISCOFW302020_302021 %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))?
58 | # ASA-6-305011
59 | CISCOFW305011 %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port}
60 | # ASA-3-313001, ASA-3-313004, ASA-3-313008
61 | CISCOFW313001_313004_313008 %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})?
62 | # ASA-4-313005
63 | CISCOFW313005 %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\. Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))?
64 | # ASA-5-321001
65 | CISCOFW321001 Resource '%{WORD:resource_name}' limit of %{POSINT:resource_limit} reached for system
66 | # ASA-4-402117
67 | CISCOFW402117 %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip}
68 | # ASA-4-402119
69 | CISCOFW402119 %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking
70 | # ASA-4-419001
71 | CISCOFW419001 %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason}
72 | # ASA-4-419002
73 | CISCOFW419002 %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number
74 | # ASA-4-500004
75 | CISCOFW500004 %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
76 | # ASA-6-602303, ASA-6-602304
77 | CISCOFW602303_602304 %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action}
78 | # ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006
79 | CISCOFW710001_710002_710003_710005_710006 %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}
80 | # ASA-6-713172
81 | CISCOFW713172 Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device
82 | # ASA-4-733100
83 | CISCOFW733100 \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count}
84 | #== End Cisco ASA ==
85 |
86 | # Shorewall firewall logs
87 | SHOREWALL (%{SYSLOGTIMESTAMP:timestamp}) (%{WORD:nf_host}) .*Shorewall:(%{WORD:nf_action1})?:(%{WORD:nf_action2})?.*IN=(%{USERNAME:nf_in_interface})?.*(OUT= *MAC=(%{COMMONMAC:nf_dst_mac}):(%{COMMONMAC:nf_src_mac})?|OUT=%{USERNAME:nf_out_interface}).*SRC=(%{IPV4:nf_src_ip}).*DST=(%{IPV4:nf_dst_ip}).*LEN=(%{WORD:nf_len}).?*TOS=(%{WORD:nf_tos}).?*PREC=(%{WORD:nf_prec}).?*TTL=(%{INT:nf_ttl}).?*ID=(%{INT:nf_id}).?*PROTO=(%{WORD:nf_protocol}).?*SPT=(%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*)
88 | #== End Shorewall
89 | #== SuSE Firewall 2 ==
90 | SFW2 ((%{SYSLOGTIMESTAMP:timestamp})|(%{TIMESTAMP_ISO8601:timestamp}))\s*%{HOSTNAME}\s*kernel\S+\s*(?:%{NAGIOSTIME}\s*)?SFW2\-INext\-%{NOTSPACE:nf_action}\s*IN=%{USERNAME:nf_in_interface}.*OUT=(\s*%{USERNAME:nf_out_interface})?\s*MAC=((%{COMMONMAC:nf_dst_mac}:%{COMMONMAC:nf_src_mac})|(\s*)).*SRC=%{IP:nf_src_ip}\s*DST=%{IP:nf_dst_ip}.*PROTO=%{WORD:nf_protocol}((.*SPT=%{INT:nf_src_port}.*DPT=%{INT:nf_dst_port}.*)|())
91 | #== End SuSE ==
92 |
--------------------------------------------------------------------------------
/patterns/legacy/grok-patterns:
--------------------------------------------------------------------------------
1 | USERNAME [a-zA-Z0-9._-]+
2 | USER %{USERNAME}
3 | EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+
4 | EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME}
5 | INT (?:[+-]?(?:[0-9]+))
6 | BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
7 | NUMBER (?:%{BASE10NUM})
8 | BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
19 | UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
20 | # URN, allowing use of RFC 2141 section 2.3 reserved characters
21 | URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+
22 |
23 | # Networking
24 | MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
25 | CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
26 | WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
27 | COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
28 | IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
29 | IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
40 | URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+
41 | URIHOST %{IPORHOST}(?::%{POSINT:port})?
42 | # uripath comes loosely from RFC1738, but mostly from what Firefox
43 | # doesn't turn into %XX
44 | URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+
45 | #URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
46 | URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]*
47 | URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
48 | URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
49 |
50 | # Months: January, Feb, 3, 03, 12, December
51 | MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b
52 | MONTHNUM (?:0?[1-9]|1[0-2])
53 | MONTHNUM2 (?:0[1-9]|1[0-2])
54 | MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
55 |
56 | # Days: Monday, Tue, Thu, etc...
57 | DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
58 |
59 | # Years?
60 | YEAR (?>\d\d){1,2}
61 | HOUR (?:2[0123]|[01]?[0-9])
62 | MINUTE (?:[0-5][0-9])
63 | # '60' is a leap second in most time standards and thus is valid.
64 | SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
65 | TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
66 | # datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
67 | DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
68 | DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
69 | ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
70 | ISO8601_SECOND (?:%{SECOND}|60)
71 | TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
72 | DATE %{DATE_US}|%{DATE_EU}
73 | DATESTAMP %{DATE}[- ]%{TIME}
74 | TZ (?:[APMCE][SD]T|UTC)
75 | DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
76 | DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
77 | DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
78 | DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
79 |
80 | # Syslog Dates: Month Day HH:MM:SS
81 | SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
82 | PROG [\x21-\x5a\x5c\x5e-\x7e]+
83 | SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
84 | SYSLOGHOST %{IPORHOST}
85 | SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
86 | HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
87 |
88 | # Shortcuts
89 | QS %{QUOTEDSTRING}
90 |
91 | # Log formats
92 | SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}:
93 |
94 | # Log Levels
95 | LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)
96 |
--------------------------------------------------------------------------------
/patterns/legacy/haproxy:
--------------------------------------------------------------------------------
1 | ## These patterns were tested w/ haproxy-1.4.15
2 |
3 | ## Documentation of the haproxy log formats can be found at the following links:
4 | ## http://code.google.com/p/haproxy-docs/wiki/HTTPLogFormat
5 | ## http://code.google.com/p/haproxy-docs/wiki/TCPLogFormat
6 |
7 | HAPROXYTIME (?!<[0-9])%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?![0-9])
8 | HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds}
9 |
10 | # Override these default patterns to parse out what is captured in your haproxy.cfg
11 | HAPROXYCAPTUREDREQUESTHEADERS %{DATA:captured_request_headers}
12 | HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:captured_response_headers}
13 |
14 | # Example:
15 | # These haproxy config lines will add data to the logs that are captured
16 | # by the patterns below. Place them in your custom patterns directory to
17 | # override the defaults.
18 | #
19 | # capture request header Host len 40
20 | # capture request header X-Forwarded-For len 50
21 | # capture request header Accept-Language len 50
22 | # capture request header Referer len 200
23 | # capture request header User-Agent len 200
24 | #
25 | # capture response header Content-Type len 30
26 | # capture response header Content-Encoding len 10
27 | # capture response header Cache-Control len 200
28 | # capture response header Last-Modified len 200
29 | #
30 | # HAPROXYCAPTUREDREQUESTHEADERS %{DATA:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{DATA:request_header_referer}\|%{DATA:request_header_user_agent}
31 | # HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified}
32 |
33 | # parse a haproxy 'httplog' line
34 | HAPROXYHTTPBASE %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?"(|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?"?
35 |
36 | HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{HAPROXYHTTPBASE}
37 |
38 | # parse a haproxy 'tcplog' line
39 | HAPROXYTCP (?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_queue}/%{INT:time_backend_connect}/%{NOTSPACE:time_duration} %{NOTSPACE:bytes_read} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue}
40 |
--------------------------------------------------------------------------------
/patterns/legacy/httpd:
--------------------------------------------------------------------------------
1 | HTTPDUSER %{EMAILADDRESS}|%{USER}
2 | HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
3 |
4 | # Log formats
5 | HTTPD_COMMONLOG %{IPORHOST:clientip} %{HTTPDUSER:ident} %{HTTPDUSER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" (?:-|%{NUMBER:response}) (?:-|%{NUMBER:bytes})
6 | HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} %{QS:referrer} %{QS:agent}
7 |
8 | # Error logs
9 | HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:message}
10 | HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:module})?:%{LOGLEVEL:loglevel}\] \[pid %{POSINT:pid}(:tid %{NUMBER:tid})?\]( \(%{POSINT:proxy_errorcode}\)%{DATA:proxy_message}:)?( \[client %{IPORHOST:clientip}:%{POSINT:clientport}\])?( %{DATA:errorcode}:)? %{GREEDYDATA:message}
11 | HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}
12 |
13 | # Deprecated
14 | COMMONAPACHELOG %{HTTPD_COMMONLOG}
15 | COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG}
16 |
--------------------------------------------------------------------------------
/patterns/legacy/java:
--------------------------------------------------------------------------------
1 | JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]*
2 | #Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source'
3 | JAVAFILE (?:[a-zA-Z$_0-9. -]+)
4 | #Allow special , methods
5 | JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*)
6 | #Line number is optional in special cases 'Native method' or 'Unknown source'
7 | JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:class}\.%{JAVAMETHOD:method}\(%{JAVAFILE:file}(?::%{NUMBER:line})?\)
8 | # Java Logs
9 | JAVATHREAD (?:[A-Z]{2}-Processor[\d]+)
10 | JAVALOGMESSAGE (.*)
11 | # MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM
12 | CATALINA_DATESTAMP %{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM)
13 | # yyyy-MM-dd HH:mm:ss,SSS ZZZ eg: 2014-01-09 17:32:25,527 -0800
14 | TOMCAT_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) %{ISO8601_TIMEZONE}
15 | CATALINALOG %{CATALINA_DATESTAMP:timestamp} %{JAVACLASS:class} %{JAVALOGMESSAGE:logmessage}
16 | # 2014-01-09 20:03:28,269 -0800 | ERROR | com.example.service.ExampleService - something compeletely unexpected happened...
17 | TOMCATLOG %{TOMCAT_DATESTAMP:timestamp} \| %{LOGLEVEL:level} \| %{JAVACLASS:class} - %{JAVALOGMESSAGE:logmessage}
18 |
--------------------------------------------------------------------------------
/patterns/legacy/junos:
--------------------------------------------------------------------------------
1 | # JUNOS 11.4 RT_FLOW patterns
2 | RT_FLOW_EVENT (RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY)
3 |
4 | RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} \d+\(%{DATA:sent}\) \d+\(%{DATA:received}\) %{INT:elapsed-time} .*
5 |
6 | RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .*
7 |
8 | RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .*
9 |
10 |
--------------------------------------------------------------------------------
/patterns/legacy/linux-syslog:
--------------------------------------------------------------------------------
1 | SYSLOG5424PRINTASCII [!-~]+
2 |
3 | SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource}+(?: %{SYSLOGPROG}:|)
4 | SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:pam_module}\(%{DATA:pam_caller}\): session %{WORD:pam_session_state} for user %{USERNAME:username}(?: by %{GREEDYDATA:pam_by})?
5 |
6 | CRON_ACTION [A-Z ]+
7 | CRONLOG %{SYSLOGBASE} \(%{USER:user}\) %{CRON_ACTION:action} \(%{DATA:message}\)
8 |
9 | SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message}
10 |
11 | # IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424)
12 | SYSLOG5424PRI <%{NONNEGINT:syslog5424_pri}>
13 | SYSLOG5424SD \[%{DATA}\]+
14 | SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:syslog5424_ver} +(?:%{TIMESTAMP_ISO8601:syslog5424_ts}|-) +(?:%{IPORHOST:syslog5424_host}|-) +(-|%{SYSLOG5424PRINTASCII:syslog5424_app}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_proc}) +(-|%{SYSLOG5424PRINTASCII:syslog5424_msgid}) +(?:%{SYSLOG5424SD:syslog5424_sd}|-|)
15 |
16 | SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:syslog5424_msg}
17 |
--------------------------------------------------------------------------------
/patterns/legacy/maven:
--------------------------------------------------------------------------------
1 | MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))?
2 |
--------------------------------------------------------------------------------
/patterns/legacy/mcollective:
--------------------------------------------------------------------------------
1 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}:
2 |
--------------------------------------------------------------------------------
/patterns/legacy/mcollective-patterns:
--------------------------------------------------------------------------------
1 | # Remember, these can be multi-line events.
2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level}
3 |
4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}:
5 |
--------------------------------------------------------------------------------
/patterns/legacy/mongodb:
--------------------------------------------------------------------------------
1 | MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message}
2 | MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \}
3 | MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ (?[0-9]+)ms
4 | MONGO_WORDDASH \b[\w-]+\b
5 | MONGO3_SEVERITY \w
6 | MONGO3_COMPONENT %{WORD}|-
7 | MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:severity} %{MONGO3_COMPONENT:component}%{SPACE}(?:\[%{DATA:context}\])? %{GREEDYDATA:message}
8 |
--------------------------------------------------------------------------------
/patterns/legacy/nagios:
--------------------------------------------------------------------------------
1 | ##################################################################################
2 | ##################################################################################
3 | # Chop Nagios log files to smithereens!
4 | #
5 | # A set of GROK filters to process logfiles generated by Nagios.
6 | # While it does not, this set intends to cover all possible Nagios logs.
7 | #
8 | # Some more work needs to be done to cover all External Commands:
9 | # http://old.nagios.org/developerinfo/externalcommands/commandlist.php
10 | #
11 | # If you need some support on these rules please contact:
12 | # Jelle Smet http://smetj.net
13 | #
14 | #################################################################################
15 | #################################################################################
16 |
17 | NAGIOSTIME \[%{NUMBER:nagios_epoch}\]
18 |
19 | ###############################################
20 | ######## Begin nagios log types
21 | ###############################################
22 | NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE
23 | NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE
24 |
25 | NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION
26 | NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION
27 |
28 | NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT
29 | NAGIOS_TYPE_HOST_ALERT HOST ALERT
30 |
31 | NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT
32 | NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT
33 |
34 | NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT
35 | NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT
36 |
37 | NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK
38 | NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK
39 |
40 | NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER
41 | NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER
42 |
43 | NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND
44 | NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION
45 | ###############################################
46 | ######## End nagios log types
47 | ###############################################
48 |
49 | ###############################################
50 | ######## Begin external check types
51 | ###############################################
52 | NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK
53 | NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK
54 | NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK
55 | NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK
56 | NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT
57 | NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT
58 | NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME
59 | NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME
60 | NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS
61 | NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS
62 | NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS
63 | NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS
64 | NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS
65 | NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS
66 | ###############################################
67 | ######## End external check types
68 | ###############################################
69 | NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:nagios_message}
70 |
71 | NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message}
72 | NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statetype};%{DATA:nagios_statecode};%{GREEDYDATA:nagios_message}
73 |
74 | NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}
75 | NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}
76 |
77 | NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message}
78 | NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{NUMBER:nagios_attempt};%{GREEDYDATA:nagios_message}
79 |
80 | NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_message}
81 | NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_message}
82 |
83 | NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
84 | NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
85 |
86 | NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
87 | NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_comment}
88 |
89 | NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name}
90 | NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_statelevel};%{DATA:nagios_event_handler_name}
91 |
92 | NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:nagios_type}: %{DATA:nagios_service};%{NUMBER:nagios_unknown1};%{NUMBER:nagios_unknown2}
93 |
94 | ####################
95 | #### External checks
96 | ####################
97 |
98 | #Disable host & service check
99 | NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service}
100 | NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname}
101 |
102 | #Enable host & service check
103 | NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_CHECK:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service}
104 | NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_CHECK:nagios_command};%{DATA:nagios_hostname}
105 |
106 | #Process host & service check
107 | NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_service};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result}
108 | NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:nagios_command};%{DATA:nagios_hostname};%{DATA:nagios_state};%{GREEDYDATA:nagios_check_result}
109 |
110 | #Disable host & service notifications
111 | NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
112 | NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
113 | NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service}
114 |
115 | #Enable host & service notifications
116 | NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
117 | NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:nagios_command};%{GREEDYDATA:nagios_hostname}
118 | NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:nagios_command};%{DATA:nagios_hostname};%{GREEDYDATA:nagios_service}
119 |
120 | #Schedule host & service downtime
121 | NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:nagios_type}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:nagios_command};%{DATA:nagios_hostname};%{NUMBER:nagios_start_time};%{NUMBER:nagios_end_time};%{NUMBER:nagios_fixed};%{NUMBER:nagios_trigger_id};%{NUMBER:nagios_duration};%{DATA:author};%{DATA:comment}
122 |
123 | #End matching line
124 | NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS})
125 |
--------------------------------------------------------------------------------
/patterns/legacy/postgresql:
--------------------------------------------------------------------------------
1 | # Default postgresql pg_log format pattern
2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid}
3 |
4 |
--------------------------------------------------------------------------------
/patterns/legacy/rails:
--------------------------------------------------------------------------------
1 | RUUID \h{32}
2 | # rails controller with action
3 | RCONTROLLER (?[^#]+)#(?\w+)
4 |
5 | # this will often be the only line:
6 | RAILS3HEAD (?m)Started %{WORD:verb} "%{URIPATHPARAM:request}" for %{IPORHOST:clientip} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE})
7 | # for some a strange reason, params are stripped of {} - not sure that's a good idea.
8 | RPROCESSING \W*Processing by %{RCONTROLLER} as (?\S+)(?:\W*Parameters: {%{DATA:params}}\W*)?
9 | RAILS3FOOT Completed %{NUMBER:response}%{DATA} in %{NUMBER:totalms}ms %{RAILS3PROFILE}%{GREEDYDATA}
10 | RAILS3PROFILE (?:\(Views: %{NUMBER:viewms}ms \| ActiveRecord: %{NUMBER:activerecordms}ms|\(ActiveRecord: %{NUMBER:activerecordms}ms)?
11 |
12 | # putting it all together
13 | RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?(?:%{DATA}\n)*)(?:%{RAILS3FOOT})?
14 |
--------------------------------------------------------------------------------
/patterns/legacy/redis:
--------------------------------------------------------------------------------
1 | REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME}
2 | REDISLOG \[%{POSINT:pid}\] %{REDISTIMESTAMP:timestamp} \*
3 | REDISMONLOG %{NUMBER:timestamp} \[%{INT:database} %{IP:client}:%{NUMBER:port}\] "%{WORD:command}"\s?%{GREEDYDATA:params}
4 |
--------------------------------------------------------------------------------
/patterns/legacy/ruby:
--------------------------------------------------------------------------------
1 | RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO)
2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message}
3 |
--------------------------------------------------------------------------------
/patterns/legacy/squid:
--------------------------------------------------------------------------------
1 | # Pattern squid3
2 | # Documentation of squid3 logs formats can be found at the following link:
3 | # http://wiki.squid-cache.org/Features/LogFormat
4 | SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:duration}\s%{IP:client_address}\s%{WORD:cache_result}/%{NONNEGINT:status_code}\s%{NUMBER:bytes}\s%{WORD:request_method}\s%{NOTSPACE:url}\s(%{NOTSPACE:user}|-)\s%{WORD:hierarchy_code}/(%{IPORHOST:server}|-)\s%{NOTSPACE:content_type}
5 |
--------------------------------------------------------------------------------
/test/fixtures/my_pattern:
--------------------------------------------------------------------------------
1 | MY_AWESOME_PATTERN %{GREEDYDATA:message}
2 |
--------------------------------------------------------------------------------
/test/helper.rb:
--------------------------------------------------------------------------------
1 | require "fluent/test"
2 | require "fluent/test/helpers"
3 | require "fluent/test/driver/input"
4 | require "fluent/test/driver/parser"
5 | require "pathname"
6 |
7 | Test::Unit::TestCase.include(Fluent::Test::Helpers)
8 |
9 | def fixtures(name)
10 | Pathname(__dir__).expand_path + "fixtures" + name
11 | end
12 |
13 | def unused_port
14 | s = TCPServer.open(0)
15 | port = s.addr[1]
16 | s.close
17 | port
18 | end
19 |
20 | def ipv6_enabled?
21 | require "socket"
22 |
23 | begin
24 | TCPServer.open("::1", 0)
25 | true
26 | rescue
27 | false
28 | end
29 | end
30 |
--------------------------------------------------------------------------------
/test/test_grok_parser.rb:
--------------------------------------------------------------------------------
1 | require "helper"
2 | require "tempfile"
3 | require "fluent/plugin/parser_grok"
4 |
5 | def str2time(str_time, format = nil)
6 | if format
7 | Time.strptime(str_time, format).to_i
8 | else
9 | Time.parse(str_time).to_i
10 | end
11 | end
12 |
13 | class GrokParserTest < ::Test::Unit::TestCase
14 | setup do
15 | Fluent::Test.setup
16 | end
17 |
18 | sub_test_case "timestamp" do
19 | test "timestamp iso8601" do
20 | internal_test_grok_pattern("%{TIMESTAMP_ISO8601:time}", "Some stuff at 2014-01-01T00:00:00+0900",
21 | event_time("2014-01-01T00:00:00+0900"), {})
22 | end
23 |
24 | test "datestamp rfc822 with zone" do
25 | internal_test_grok_pattern("%{DATESTAMP_RFC822:time}", "Some stuff at Mon Aug 15 2005 15:52:01 UTC",
26 | event_time("Mon Aug 15 2005 15:52:01 UTC"), {})
27 | end
28 |
29 | test "datestamp rfc822 with numeric zone" do
30 | internal_test_grok_pattern("%{DATESTAMP_RFC2822:time}", "Some stuff at Mon, 15 Aug 2005 15:52:01 +0000",
31 | event_time("Mon, 15 Aug 2005 15:52:01 +0000"), {})
32 | end
33 |
34 | test "syslogtimestamp" do
35 | internal_test_grok_pattern("%{SYSLOGTIMESTAMP:time}", "Some stuff at Aug 01 00:00:00",
36 | event_time("Aug 01 00:00:00"), {})
37 | end
38 | end
39 |
40 | test "date" do
41 | internal_test_grok_pattern("\\[(?%{DATE} %{TIME} (?:AM|PM))\\]", "[2/16/2018 10:19:34 AM]",
42 | nil, { "date" => "2/16/2018 10:19:34 AM" })
43 | end
44 |
45 | test "grok pattern not found" do
46 | assert_raise Fluent::Grok::GrokPatternNotFoundError do
47 | internal_test_grok_pattern("%{THIS_PATTERN_DOESNT_EXIST}", "Some stuff at somewhere", nil, {})
48 | end
49 | end
50 |
51 | test "multiple fields" do
52 | internal_test_grok_pattern("%{MAC:mac_address} %{IP:ip_address}", "this.wont.match DEAD.BEEF.1234 127.0.0.1", nil,
53 | {"mac_address" => "DEAD.BEEF.1234", "ip_address" => "127.0.0.1"})
54 | end
55 |
56 | sub_test_case "complex pattern w/ grok_pattern_series" do
57 | test "legacy" do
58 | internal_test_grok_pattern("%{HTTPD_COMBINEDLOG}", '127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"',
59 | str2time("28/Feb/2013:12:00:00 +0900", "%d/%b/%Y:%H:%M:%S %z"),
60 | {
61 | "clientip" => "127.0.0.1",
62 | "ident" => "192.168.0.1",
63 | "auth" => "-",
64 | "verb" => "GET",
65 | "request" => "/",
66 | "httpversion" => "1.1",
67 | "response" => "200",
68 | "bytes" => "777",
69 | "referrer" => "\"-\"",
70 | "agent" => "\"Opera/12.0\""
71 | },
72 | "time_key" => "timestamp",
73 | "time_format" => "%d/%b/%Y:%H:%M:%S %z",
74 | "grok_pattern_series" => "legacy"
75 | )
76 | end
77 |
78 | test "ecs-v1" do
79 | internal_test_grok_pattern("%{HTTPD_COMBINEDLOG}", '127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"',
80 | str2time("28/Feb/2013:12:00:00 +0900", "%d/%b/%Y:%H:%M:%S %z"),
81 | {
82 | "apache.access.user.identity" => "192.168.0.1",
83 | "http.request.method" => "GET",
84 | "http.response.body.bytes" => 777,
85 | "http.response.status_code" => 200,
86 | "http.version" => "1.1",
87 | "source.address" => "127.0.0.1",
88 | "url.original" => "/",
89 | "user_agent.original" => "Opera/12.0",
90 | },
91 | "time_key" => "timestamp",
92 | "time_format" => "%d/%b/%Y:%H:%M:%S %z",
93 | "grok_pattern_series" => "ecs-v1"
94 | )
95 | end
96 | end
97 |
98 | test "custom pattern" do
99 | internal_test_grok_pattern("%{MY_AWESOME_PATTERN:message}", "this is awesome",
100 | nil, {"message" => "this is awesome"},
101 | "custom_pattern_path" => fixtures("my_pattern").to_s)
102 | end
103 |
104 | sub_test_case "OptionalType" do
105 | test "simple" do
106 | internal_test_grok_pattern("%{INT:user_id:integer} paid %{NUMBER:paid_amount:float}",
107 | "12345 paid 6789.10", nil,
108 | {"user_id" => 12345, "paid_amount" => 6789.1 })
109 | end
110 |
111 | test "array" do
112 | internal_test_grok_pattern("%{GREEDYDATA:message:array}",
113 | "a,b,c,d", nil,
114 | {"message" => %w(a b c d)})
115 | end
116 |
117 | test "array with delimiter" do
118 | internal_test_grok_pattern("%{GREEDYDATA:message:array:|}",
119 | "a|b|c|d", nil,
120 | {"message" => %w(a b c d)})
121 | end
122 |
123 | test "timestamp iso8601" do
124 | internal_test_grok_pattern("%{TIMESTAMP_ISO8601:stamp:time}", "Some stuff at 2014-01-01T00:00:00+0900",
125 | nil, {"stamp" => event_time("2014-01-01T00:00:00+0900")})
126 | end
127 |
128 | test "datestamp rfc822 with zone" do
129 | internal_test_grok_pattern("%{DATESTAMP_RFC822:stamp:time}", "Some stuff at Mon Aug 15 2005 15:52:01 UTC",
130 | nil, {"stamp" => event_time("Mon Aug 15 2005 15:52:01 UTC")})
131 | end
132 |
133 | test "datestamp rfc822 with numeric zone" do
134 | internal_test_grok_pattern("%{DATESTAMP_RFC2822:stamp:time}", "Some stuff at Mon, 15 Aug 2005 15:52:01 +0000",
135 | nil, {"stamp" => event_time("Mon, 15 Aug 2005 15:52:01 +0000")})
136 | end
137 |
138 | test "syslogtimestamp" do
139 | internal_test_grok_pattern("%{SYSLOGTIMESTAMP:stamp:time}", "Some stuff at Aug 01 00:00:00",
140 | nil, {"stamp" => event_time("Aug 01 00:00:00")})
141 | end
142 |
143 | test "timestamp with format" do
144 | internal_test_grok_pattern("%{TIMESTAMP_ISO8601:stamp:time:%Y-%m-%d %H%M}", "Some stuff at 2014-01-01 1000",
145 | nil, {"stamp" => event_time("2014-01-01 10:00")})
146 | end
147 | end
148 |
149 | sub_test_case "NoGrokPatternMatched" do
150 | test "with grok_failure_key" do
151 | config = %[
152 | grok_failure_key grok_failure
153 |
154 | pattern %{PATH:path}
155 |
156 | ]
157 | expected = {
158 | "grok_failure" => "No grok pattern matched",
159 | "message" => "no such pattern"
160 | }
161 | d = create_driver(config)
162 | d.instance.parse("no such pattern") do |_time, record|
163 | assert_equal(expected, record)
164 | end
165 | end
166 |
167 | test "without grok_failure_key" do
168 | config = %[
169 |
170 | pattern %{PATH:path}
171 |
172 | ]
173 | expected = {
174 | "message" => "no such pattern"
175 | }
176 | d = create_driver(config)
177 | d.instance.parse("no such pattern") do |_time, record|
178 | assert_equal(expected, record)
179 | end
180 | end
181 | end
182 |
183 | sub_test_case "configure" do
184 | test "no grok patterns" do
185 | assert_raise Fluent::ConfigError do
186 | create_driver('')
187 | end
188 | end
189 |
190 | test "invalid config value type" do
191 | assert_raise Fluent::ConfigError do
192 | create_driver(%[
193 |
194 | pattern %{PATH:path:foo}
195 |
196 | ])
197 | end
198 | end
199 |
200 | test "invalid config value type and normal grok pattern" do
201 | d = create_driver(%[
202 |
203 | pattern %{PATH:path:foo}
204 |
205 |
206 | pattern %{IP:ip_address}
207 |
208 | ])
209 | assert_equal(1, d.instance.instance_variable_get(:@grok).parsers.size)
210 | logs = $log.instance_variable_get(:@logger).instance_variable_get(:@logdev).logs
211 | error_logs = logs.grep(/error_class/)
212 | assert_equal(1, error_logs.size)
213 | error_message = error_logs.first[/error="(.+)"/, 1]
214 | assert_equal("unknown value conversion for key:'path', type:'foo'", error_message)
215 | end
216 |
217 | test "keep original configuration" do
218 | config = %[
219 |
220 | pattern %{INT:user_id:integer} paid %{NUMBER:paid_amount:float}
221 |
222 | ]
223 | d = create_driver(config)
224 | assert_equal("%{INT:user_id:integer} paid %{NUMBER:paid_amount:float}", d.instance.config.elements("grok").first["pattern"])
225 | end
226 | end
227 |
228 | sub_test_case "grok_name_key" do
229 | test "one grok section with name" do
230 | d = create_driver(%[
231 | grok_name_key grok_name
232 |
233 | name path
234 | pattern %{PATH:path}
235 |
236 | ])
237 | expected = {
238 | "path" => "/",
239 | "grok_name" => "path"
240 | }
241 | d.instance.parse("/") do |time, record|
242 | assert_equal(expected, record)
243 | end
244 | end
245 |
246 | test "one grok section without name" do
247 | d = create_driver(%[
248 | grok_name_key grok_name
249 |
250 | pattern %{PATH:path}
251 |
252 | ])
253 | expected = {
254 | "path" => "/",
255 | "grok_name" => 0
256 | }
257 | d.instance.parse("/") do |time, record|
258 | assert_equal(expected, record)
259 | end
260 | end
261 |
262 | test "multiple grok sections with name" do
263 | d = create_driver(%[
264 | grok_name_key grok_name
265 |
266 | name path
267 | pattern %{PATH:path}
268 |
269 |
270 | name ip
271 | pattern %{IP:ip_address}
272 |
273 | ])
274 | expected = [
275 | { "path" => "/", "grok_name" => "path" },
276 | { "ip_address" => "127.0.0.1", "grok_name" => "ip" },
277 | ]
278 | records = []
279 | d.instance.parse("/") do |time, record|
280 | records << record
281 | end
282 | d.instance.parse("127.0.0.1") do |time, record|
283 | records << record
284 | end
285 | assert_equal(expected, records)
286 | end
287 |
288 | test "multiple grok sections without name" do
289 | d = create_driver(%[
290 | grok_name_key grok_name
291 |
292 | pattern %{PATH:path}
293 |
294 |
295 | pattern %{IP:ip_address}
296 |
297 | ])
298 | expected = [
299 | { "path" => "/", "grok_name" => 0 },
300 | { "ip_address" => "127.0.0.1", "grok_name" => 1 },
301 | ]
302 | records = []
303 | d.instance.parse("/") do |time, record|
304 | records << record
305 | end
306 | d.instance.parse("127.0.0.1") do |time, record|
307 | records << record
308 | end
309 | assert_equal(expected, records)
310 | end
311 |
312 | test "multiple grok sections with both name and index" do
313 | d = create_driver(%[
314 | grok_name_key grok_name
315 |
316 | name path
317 | pattern %{PATH:path}
318 |
319 |
320 | pattern %{IP:ip_address}
321 |
322 | ])
323 | expected = [
324 | { "path" => "/", "grok_name" => "path" },
325 | { "ip_address" => "127.0.0.1", "grok_name" => 1 },
326 | ]
327 | records = []
328 | d.instance.parse("/") do |time, record|
329 | records << record
330 | end
331 | d.instance.parse("127.0.0.1") do |time, record|
332 | records << record
333 | end
334 | assert_equal(expected, records)
335 | end
336 | end
337 |
338 | sub_test_case "keep_time_key" do
339 | test "true" do
340 | d = create_driver(%[
341 | keep_time_key true
342 |
343 | pattern "%{TIMESTAMP_ISO8601:time}"
344 |
345 | ])
346 | expected = [
347 | { "time" => "2014-01-01T00:00:00+0900" }
348 | ]
349 | records = []
350 | d.instance.parse("Some stuff at 2014-01-01T00:00:00+0900") do |time, record|
351 | assert_equal(event_time("2014-01-01T00:00:00+0900"), time)
352 | records << record
353 | end
354 | assert_equal(expected, records)
355 | end
356 | end
357 |
358 | sub_test_case "grok section" do
359 | sub_test_case "complex pattern w/ grok_pattern_series" do
360 | test "legacy" do
361 | d = create_driver(%[
362 | grok_pattern_series legacy
363 |
364 | pattern %{COMBINEDAPACHELOG}
365 | time_key timestamp
366 | time_format %d/%b/%Y:%H:%M:%S %z
367 |
368 | ])
369 | expected_record = {
370 | "clientip" => "127.0.0.1",
371 | "ident" => "192.168.0.1",
372 | "auth" => "-",
373 | "verb" => "GET",
374 | "request" => "/",
375 | "httpversion" => "1.1",
376 | "response" => "200",
377 | "bytes" => "777",
378 | "referrer" => "\"-\"",
379 | "agent" => "\"Opera/12.0\""
380 | }
381 | d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"') do |time, record|
382 | assert_equal(expected_record, record)
383 | assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: "%d/%b/%Y:%H:%M:%S %z"), time)
384 | end
385 | end
386 |
387 | test "ecs-v1" do
388 | d = create_driver(%[
389 | grok_pattern_series ecs-v1
390 |
391 | pattern %{HTTPD_COMBINEDLOG}
392 | time_key timestamp
393 | time_format %d/%b/%Y:%H:%M:%S %z
394 |
395 | ])
396 | expected_record = {
397 | "apache.access.user.identity" => "192.168.0.1",
398 | "http.request.method" => "GET",
399 | "http.response.body.bytes" => 777,
400 | "http.response.status_code" => 200,
401 | "http.version" => "1.1",
402 | "source.address" => "127.0.0.1",
403 | "url.original" => "/",
404 | "user_agent.original" => "Opera/12.0"
405 | }
406 | d.instance.parse('127.0.0.1 192.168.0.1 - [28/Feb/2013:12:00:00 +0900] "GET / HTTP/1.1" 200 777 "-" "Opera/12.0"') do |time, record|
407 | assert_equal(expected_record, record)
408 | assert_equal(event_time("28/Feb/2013:12:00:00 +0900", format: "%d/%b/%Y:%H:%M:%S %z"), time)
409 | end
410 | end
411 | end
412 |
413 | test "leading time type with following other type" do
414 | d = create_driver(%[
415 |
416 | pattern \\[%{HTTPDATE:log_timestamp:time:%d/%b/%Y:%H:%M:%S %z}\\] %{GREEDYDATA:message}
417 |
418 | ])
419 | expected_record = {
420 | "log_timestamp" => event_time("03/Feb/2019:06:47:21 +0530", format: "%d/%b/%Y:%H:%M:%S %z"),
421 | "message" => "Python-urllib/2.7"
422 | }
423 | d.instance.parse('[03/Feb/2019:06:47:21 +0530] Python-urllib/2.7') do |time, record|
424 | assert_equal(expected_record, record)
425 | end
426 | end
427 |
428 | test "timezone" do
429 | d = create_driver(%[
430 |
431 | pattern %{TIMESTAMP_ISO8601:time} %{GREEDYDATA:message}
432 | time_key time
433 | time_format %Y-%m-%d %H:%M:%S
434 | timezone Europe/Berlin
435 |
436 | ])
437 | d.instance.parse("2019-02-01 12:34:56 This is test") do |time, record|
438 | assert_equal(event_time("2019-02-01 12:34:56 +0100"), time)
439 | assert_equal({ "message" => "This is test" }, record)
440 | end
441 | end
442 |
443 | test "multiple timezone" do
444 | d = create_driver(%[
445 |
446 | pattern %{TIMESTAMP_ISO8601:time} 1 %{GREEDYDATA:message}
447 | time_key time
448 | time_format %Y-%m-%d %H:%M:%S
449 | timezone Europe/Berlin
450 |
451 |
452 | pattern %{TIMESTAMP_ISO8601:time} 2 %{GREEDYDATA:message}
453 | time_key time
454 | time_format %Y-%m-%d %H:%M:%S
455 | timezone Asia/Aden
456 |
457 | ])
458 | d.instance.parse("2019-02-01 12:34:56 1 This is test") do |time, record|
459 | assert_equal(event_time("2019-02-01 12:34:56 +0100"), time)
460 | assert_equal({ "message" => "This is test" }, record)
461 | end
462 | d.instance.parse("2019-02-01 12:34:56 2 This is test") do |time, record|
463 | assert_equal(event_time("2019-02-01 12:34:56 +0300"), time)
464 | assert_equal({ "message" => "This is test" }, record)
465 | end
466 | end
467 | end
468 |
469 | private
470 |
471 | def create_driver(conf)
472 | Fluent::Test::Driver::Parser.new(Fluent::Plugin::GrokParser).configure(conf)
473 | end
474 |
475 | def internal_test_grok_pattern(grok_pattern, text, expected_time, expected_record, options = {})
476 | d = create_driver({"grok_pattern" => grok_pattern}.merge(options))
477 |
478 | # for the new API
479 | d.instance.parse(text) {|time, record|
480 | assert_equal(expected_time, time) if expected_time
481 | assert_equal(expected_record, record)
482 | }
483 | end
484 | end
485 |
--------------------------------------------------------------------------------
/test/test_grok_parser_in_tcp.rb:
--------------------------------------------------------------------------------
1 | require "helper"
2 | require "fluent/plugin/in_tcp"
3 |
4 | class TcpInputWithGrokTest < Test::Unit::TestCase
5 | if defined?(ServerEngine)
6 | class << self
7 | def startup
8 | socket_manager_path = ServerEngine::SocketManager::Server.generate_path
9 | @server = ServerEngine::SocketManager::Server.open(socket_manager_path)
10 | ENV["SERVERENGINE_SOCKETMANAGER_PATH"] = socket_manager_path.to_s
11 | end
12 |
13 | def shutdown
14 | @server.close
15 | end
16 | end
17 | end
18 |
19 | setup do
20 | Fluent::Test.setup
21 | end
22 |
23 | PORT = unused_port
24 | BASE_CONFIG = %[
25 | port #{PORT}
26 | tag tcp
27 | ]
28 | CONFIG = BASE_CONFIG + %[
29 | bind 127.0.0.1
30 |
31 | @type grok
32 |
33 | pattern %{GREEDYDATA:message}
34 |
35 |
36 | ]
37 | IPv6_CONFIG = BASE_CONFIG + %[
38 | bind ::1
39 |
40 | @type grok
41 |
42 | pattern %{GREEDYDATA:message}
43 |
44 |
45 | ]
46 |
47 | def create_driver(conf)
48 | Fluent::Test::Driver::Input.new(Fluent::Plugin::TcpInput).configure(conf)
49 | end
50 |
51 | data do
52 | configs = {}
53 | configs[:ipv4] = ["127.0.0.1", CONFIG]
54 | configs[:ipv6] = ["::1", IPv6_CONFIG] if ipv6_enabled?
55 | configs
56 | end
57 | test "configure" do |(ip, config)|
58 | d = create_driver(config)
59 | assert_equal PORT, d.instance.port
60 | assert_equal ip, d.instance.bind
61 | assert_equal "\n", d.instance.delimiter
62 | end
63 |
64 | test "grok_pattern" do
65 | tests = [
66 | {"msg" => "tcptest1\n", "expected" => "tcptest1"},
67 | {"msg" => "tcptest2\n", "expected" => "tcptest2"},
68 | ]
69 | config = %[
70 |
71 | @type grok
72 | grok_pattern %{GREEDYDATA:message}
73 |
74 | ]
75 |
76 | internal_test_grok(config, tests)
77 | end
78 |
79 | test "grok_pattern_block_config" do
80 | tests = [
81 | {"msg" => "tcptest1\n", "expected" => "tcptest1"},
82 | {"msg" => "tcptest2\n", "expected" => "tcptest2"},
83 | ]
84 | block_config = %[
85 |
86 | @type grok
87 |
88 | pattern %{GREEDYDATA:message}
89 |
90 |
91 | ]
92 |
93 | internal_test_grok(block_config, tests)
94 | end
95 |
96 | test "grok_multi_patterns" do
97 | tests = [
98 | {"msg" => "Current time is 2014-01-01T00:00:00+0900\n", "expected" => "2014-01-01T00:00:00+0900"},
99 | {"msg" => "The first word matches\n", "expected" => "The"}
100 | ]
101 | block_config = %[
102 |
103 | @type grok
104 |
105 | pattern %{TIMESTAMP_ISO8601:message}
106 |
107 |
108 | pattern %{WORD:message}
109 |
110 |
111 | ]
112 | internal_test_grok(block_config, tests)
113 | end
114 |
115 | def internal_test_grok(conf, tests)
116 | d = create_driver(BASE_CONFIG + conf)
117 | d.run(expect_emits: tests.size) do
118 | tests.each {|test|
119 | TCPSocket.open("127.0.0.1", PORT) do |s|
120 | s.send(test["msg"], 0)
121 | end
122 | }
123 | end
124 |
125 | compare_test_result(d.events, tests)
126 | end
127 |
128 | def compare_test_result(events, tests)
129 | assert_equal(2, events.size)
130 | events.each_index {|i|
131 | assert_equal(tests[i]["expected"], events[i][2]["message"])
132 | }
133 | end
134 | end
135 |
--------------------------------------------------------------------------------
/test/test_multiline_grok_parser.rb:
--------------------------------------------------------------------------------
1 | require "helper"
2 | require "fluent/plugin/parser_multiline_grok"
3 | require "fluent/config/parser"
4 |
5 | require "stringio"
6 |
7 | class MultilineGrokParserTest < Test::Unit::TestCase
8 | def test_multiline
9 | text=< "host1", "message" => message }, record)
27 | end
28 | end
29 |
30 | def test_without_multiline_start_regexp
31 | text = < "host1",
44 | "message1" => "message1",
45 | "message2" => "message2",
46 | "message3" => "message3"
47 | }
48 | d.instance.parse(text) do |_time, record|
49 | assert_equal(expected, record)
50 | end
51 | end
52 |
53 | test "empty_range_text_in_text" do
54 | text = " [b-a]"
55 | conf = %[
56 | grok_pattern %{HOSTNAME:hostname} %{GREEDYDATA:message}
57 | multiline_start_regexp /^\s/
58 | ]
59 | d = create_driver(conf)
60 |
61 | assert(d.instance.firstline?(text))
62 | end
63 |
64 | sub_test_case "NoGrokPatternMatched" do
65 | test "with grok_failure_key" do
66 | config = %[
67 | grok_failure_key grok_failure
68 |
69 | pattern %{PATH:path}
70 |
71 | ]
72 | expected = {
73 | "grok_failure" => "No grok pattern matched",
74 | "message" => "no such pattern\nno such pattern\n"
75 | }
76 | d = create_driver(config)
77 | d.instance.parse("no such pattern\nno such pattern\n") do |_time, record|
78 | assert_equal(expected, record)
79 | end
80 | end
81 |
82 | test "without grok_failure" do
83 | config = %[
84 |
85 | pattern %{PATH:path}
86 |
87 | ]
88 | expected = {
89 | "message" => "no such pattern\nno such pattern\n"
90 | }
91 | d = create_driver(config)
92 | d.instance.parse("no such pattern\nno such pattern\n") do |_time, record|
93 | assert_equal(expected, record)
94 | end
95 | end
96 | end
97 |
98 | test "no_grok_patterns" do
99 | assert_raise Fluent::ConfigError do
100 | create_driver('')
101 | end
102 | end
103 |
104 | private
105 |
106 | def create_driver(conf)
107 | Fluent::Test::Driver::Parser.new(Fluent::Plugin::MultilineGrokParser).configure(conf)
108 | end
109 | end
110 |
--------------------------------------------------------------------------------