├── .gitignore ├── logstash ├── conf.d │ ├── 90-output-dots.conf │ ├── 90-output-stdout.conf │ ├── 89-cleanup.conf │ ├── patterns │ │ └── elasticsearch │ ├── 80-output-grokparsefailurelog.conf │ ├── 10-input-stdin.conf │ ├── 50-filter-discovery.conf │ ├── 50-filter-transport.conf │ ├── 41-filter-slowlog.conf │ ├── 50-filter-indices.conf │ ├── 45-filter-exceptions.conf │ ├── 50-filter-monitor.conf │ ├── 40-filter-base.conf │ ├── 50-filter-shield.conf │ ├── 50-filter-index.conf │ ├── 50-filter-action.conf │ └── 50-filter-cluster.conf ├── logstash.yml ├── log4j2.properties └── jvm.options ├── Dashboard-Screenshot.png ├── kibana ├── search.json ├── dashboard.json └── visualisations.json ├── elasticsearch ├── elasticsearch-template-5x.json └── elasticsearch-template-2x.json ├── run.sh ├── README.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | logstash/conf.d/90-output-elasticsearch.conf 2 | logstash/conf.d/10-input-file.conf 3 | -------------------------------------------------------------------------------- /logstash/conf.d/90-output-dots.conf: -------------------------------------------------------------------------------- 1 | output { 2 | stdout { 3 | codec => "dots" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /Dashboard-Screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshuar/elasticsearch-log-processor/HEAD/Dashboard-Screenshot.png -------------------------------------------------------------------------------- /logstash/conf.d/90-output-stdout.conf: -------------------------------------------------------------------------------- 1 | # output { 2 | # stdout { 3 | # codec => "rubydebug" 4 | # } 5 | # } 6 | -------------------------------------------------------------------------------- /logstash/conf.d/89-cleanup.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | mutate { 3 | gsub => [ 4 | "node_name", "\s+", "" 5 | ] 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /logstash/logstash.yml: -------------------------------------------------------------------------------- 1 | path.queue: /tmp/elasticsearch-log-processor 2 | path.data: /tmp/elasticsearch-log-processor 3 | path.logs: /tmp/elasticsearch-log-processor 4 | -------------------------------------------------------------------------------- /logstash/conf.d/patterns/elasticsearch: -------------------------------------------------------------------------------- 1 | SHARD_TYPE (?P|R) 2 | NODE_DETAILS \{%{DATA:node_name}\}\{%{DATA:node_id}\}\{%{IP:node_ipaddr}\}\{%{DATA:node_addr}\}\{%{DATA:node_features}\} 3 | -------------------------------------------------------------------------------- /logstash/conf.d/80-output-grokparsefailurelog.conf: -------------------------------------------------------------------------------- 1 | output { 2 | if "_grokparsefailure" in [tags] { 3 | file { 4 | path => "/tmp/parsefailure.log" 5 | } 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /logstash/conf.d/10-input-stdin.conf: -------------------------------------------------------------------------------- 1 | input { 2 | stdin { 3 | codec => multiline { 4 | pattern => "^(\[?%{TIMESTAMP_ISO8601}\]|%{TIMESTAMP_ISO8601})" 5 | negate => true 6 | what => "previous" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-discovery.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "discovery.zen.publish" { 3 | grok { 4 | match => { "message" => "(published|cluster) state (version )?\[?%{POSINT:cluster_state_version}\]?" } 5 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-discovery-1" ] 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-transport.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] =~ /transport\.netty$/ { 3 | grok { 4 | match => { "message" => "Caught exception while handling client http traffic, closing connection \[%{DATA:channel}\]" } 5 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-transport-1" ] 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /logstash/conf.d/41-filter-slowlog.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] =~ "slowlog" { 3 | grok { 4 | match => { "message" => "\[%{DATA:index_name}\]\[%{DATA:shard_id}\] took\[%{DATA:took}\], took_millis\[%{NUMBER:took_millis:int}\], types\[%{DATA:types}\], stats\[%{DATA:stats}\], search_type\[%{DATA:search_type}\], total_shards\[%{NUMBER:total_shards:int}\], source\[%{DATA:source_query}\], extra_source\[%{DATA:extra_source}\]," } 5 | remove_field => [ "message" ] 6 | tag_on_failure => [ "_grokparsefailure", "_grok-41-filter-slowlog" ] 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-indices.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "indices.cluster" { 3 | grok { 4 | match => { "message" => [ 5 | "\[\[%{USER:index_name}\]\[%{INT:index_shard_id}\]\] marking and sending shard failed due to \[%{GREEDYDATA:details}\]", 6 | "\[\[%{USER:index_name}\]\[%{INT:index_shard_id}\]\] engine failed, but can't find index shard. failure reason: \[%{GREEDYDATA:details}\]" ] } 7 | add_tag => [ "shard_failed" ] 8 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-indices-1" ] 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /logstash/conf.d/45-filter-exceptions.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if "multiline" in [tags] and "_grokparsefailure" not in [tags] { 3 | grok { 4 | match => { 5 | "message" => [ "%{DATA:message}\n(\t)?%{GREEDYDATA:exception}", 6 | "%{DATA:message}\n\[%{POSINT}\]\:%{GREEDYDATA:exception}" ] 7 | } 8 | overwrite => [ "message" ] 9 | tag_on_failure => [ "_grokparsefailure", "_grok-45-filter-exceptions-1" ] 10 | } 11 | mutate { 12 | gsub => [ "exception", "\n\t?", "|" ] 13 | split => { "exception" => "|" } 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-monitor.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "monitor.jvm" { 3 | grok { 4 | match => { "message" => "\[gc\]\[%{WORD:[gc][generation]}\]\[%{POSINT:[gc][sequence]}\]\[%{POSINT:[gc][total_count]}\] duration \[%{DATA:[gc][collection][current_duration]}\], collections \[%{POSINT:[gc][collection][count_since_last_cycle]}\]\/\[%{DATA:[gc][collection][time_since_last_cycle]}\], total \[%{DATA}\/\[%{DATA:[gc][collection][total_duration]}\], memory \[%{DATA:[gc][old_heap_use]}\]->\[%{DATA:[gc][new_heap_use]}\]\/\[%{DATA:[gc][max_heap]}\], all_pools %{GREEDYDATA:pool_info}" } 5 | add_tag => [ "gc" ] 6 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-monitor-1" ] 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /kibana/search.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "Search:-Elasticsearch-Logs", 4 | "_type": "search", 5 | "_source": { 6 | "title": "Search: Elasticsearch Logs", 7 | "description": "", 8 | "hits": 0, 9 | "columns": [ 10 | "message" 11 | ], 12 | "sort": [ 13 | "@timestamp", 14 | "desc" 15 | ], 16 | "version": 1, 17 | "kibanaSavedObjectMeta": { 18 | "searchSourceJSON": "{\"index\":\"elasticsearch-logs-*\",\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647}}" 19 | } 20 | } 21 | } 22 | ] -------------------------------------------------------------------------------- /logstash/conf.d/40-filter-base.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | # mutate { 3 | # gsub => [ "message", "^(\S+)\n(\S+)\n$", "\1\2" ] 4 | # } 5 | grok { 6 | match => { 7 | "message" => [ 8 | "(%{NOTSPACE})?(%SPACE})?\[%{TIMESTAMP_ISO8601:[@metadata][timestamp]}\]\[%{LOGLEVEL:level}%{SPACE}?\]\[%{USERNAME:component}%{SPACE}*\] (\[%{USERNAME:node_name}\])?%{GREEDYDATA:message}", 9 | "%{TIMESTAMP_ISO8601:[@metadata][timestamp]} %{GREEDYDATA:message}", 10 | "%{POSINT}:%{GREEDYDATA:message}" 11 | ] 12 | } 13 | overwrite => [ "message" ] 14 | tag_on_failure => [ "_grokparsefailure", "_grok-40-filter-base-1" ] 15 | } 16 | date { 17 | match => [ "[@metadata][timestamp]", "YYYY-MM-dd HH:mm:ss,SSS", "ISO8601" ] 18 | timezone => "UTC" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-shield.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "shield.transport.netty" { 3 | grok { 4 | match => { "message" => "\[\[%{DATA:connection_channel}\]\]" } 5 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-shield-1" ] 6 | } 7 | } 8 | if [component] == "shield.audit.logfile" { 9 | grok { 10 | match => { "message" => [ 11 | "\[%{USER:[shield][origin_type]}\]\s+\[%{USER:[shield][result]}\]\s+origin_type=\[.*?\],\s+origin_address=\[%{IP:[shield][origin_address]}\],\s+principal=\[%{USER:[shield][principal]}\],\s+action=\[%{DATA:[shield][action]}\],\s+indices=\[%{DATA:[shield][indices]}\],\s+request=\[%{DATA:[shield][request]}\]", 12 | "\[%{USER:[shield][origin_type]}\]\s+\[%{USER:[shield][result]}\]\s+origin_type=\[.*?\],\s+origin_address=\[%{IP:[shield][origin_address]}\],\s+principal=\[%{USER:[shield][principal]}\], action=\[%{DATA:[shield][action]}\], request=\[%{DATA:[shield][request]}\]" 13 | ] 14 | } 15 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-shield-2" ] 16 | } 17 | if [shield][indices] { 18 | mutate { 19 | split => { "[shield][indices]" => "," } 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /kibana/dashboard.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "Elasticsearch-Logs", 4 | "_type": "dashboard", 5 | "_source": { 6 | "title": "Elasticsearch Logs", 7 | "hits": 0, 8 | "description": "", 9 | "panelsJSON": "[{\"col\":1,\"id\":\"Elasticsearch-Logs:-Histogram\",\"panelIndex\":2,\"row\":1,\"size_x\":12,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"Elasticsearch-Logs:-Components\",\"panelIndex\":3,\"row\":5,\"size_x\":3,\"size_y\":2,\"type\":\"visualization\"},{\"col\":1,\"columns\":[\"message\"],\"id\":\"Search:-Elasticsearch-Logs\",\"panelIndex\":1,\"row\":7,\"size_x\":12,\"size_y\":5,\"sort\":[\"@timestamp\",\"desc\"],\"type\":\"search\"},{\"id\":\"Elasticsearch-Logs:-Nodes\",\"type\":\"visualization\",\"panelIndex\":4,\"size_x\":3,\"size_y\":2,\"col\":4,\"row\":5},{\"id\":\"Elasticsearch-Logs:-Event-Tags\",\"type\":\"visualization\",\"panelIndex\":5,\"size_x\":3,\"size_y\":2,\"col\":7,\"row\":5}]", 10 | "optionsJSON": "{\"darkTheme\":false}", 11 | "uiStateJSON": "{}", 12 | "version": 1, 13 | "timeRestore": true, 14 | "timeTo": "now", 15 | "timeFrom": "now-30d", 16 | "refreshInterval": { 17 | "display": "Off", 18 | "pause": false, 19 | "value": 0 20 | }, 21 | "kibanaSavedObjectMeta": { 22 | "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}]}" 23 | } 24 | } 25 | } 26 | ] -------------------------------------------------------------------------------- /logstash/log4j2.properties: -------------------------------------------------------------------------------- 1 | status = error 2 | name = LogstashPropertiesConfig 3 | 4 | appender.rolling.type = RollingFile 5 | appender.rolling.name = plain_rolling 6 | appender.rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log 7 | appender.rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}.log 8 | appender.rolling.policies.type = Policies 9 | appender.rolling.policies.time.type = TimeBasedTriggeringPolicy 10 | appender.rolling.policies.time.interval = 1 11 | appender.rolling.policies.time.modulate = true 12 | appender.rolling.layout.type = PatternLayout 13 | appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n 14 | 15 | appender.json_rolling.type = RollingFile 16 | appender.json_rolling.name = json_rolling 17 | appender.json_rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log 18 | appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}.log 19 | appender.json_rolling.policies.type = Policies 20 | appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy 21 | appender.json_rolling.policies.time.interval = 1 22 | appender.json_rolling.policies.time.modulate = true 23 | appender.json_rolling.layout.type = JSONLayout 24 | appender.json_rolling.layout.compact = true 25 | appender.json_rolling.layout.eventEol = true 26 | 27 | rootLogger.level = ${sys:ls.log.level} 28 | rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling 29 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-index.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "index.engine" { 3 | if [message] =~ "stop throttling indexing" { 4 | grok { 5 | match => { "message" => [ 6 | "\[%{USER:index_name}\]\[%{INT:index_shard_id}\] stop throttling indexing: numMergesInFlight=%{POSINT:inflight_merges}, maxNumMerges=%{POSINT:max_merges}" 7 | ] 8 | } 9 | add_tag => [ "merge_throttling" ] 10 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-index-1" ] 11 | } 12 | } else if [message] =~ "merge segment" { 13 | grok { 14 | match => { "message" => [ 15 | "\[%{USER:index_name\]\[%{INT:index_shard_id\] merge segment \[%{USER:segment_id}\] done: took \[%{DATA:[merge][took]}\], \[%{DATA:[merge][size]}\], \[%{DATA:[merge][docs]} docs\], \[%{DATA:[merge][time_stopped]} stopped\], \[%{DATA:[merge][time_throttled]} throttled\], \[%{DATA:[merge][written]} written\], \[%{DATA:[merge][rate_throttled} throttle\]" 16 | ] 17 | } 18 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-index-2" ] 19 | } 20 | } else { 21 | grok { 22 | match => { "message" => [ 23 | "\[%{USER:index_name\]\s+%{GREEDYDATA:message}" 24 | ] 25 | } 26 | overwrite => [ "message" ] 27 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-index-3" ] 28 | } 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-action.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | # I have not been able to find examples where this matches, but without the component conditional 3 | # it was causing conflicts with other documents which matched the conditional but failed the 4 | # grok. 5 | # TODO Review with applicable log lines 6 | if [component] =~ /^action/ and [message] =~ /^\[/ { 7 | grok { 8 | match => { "message" => "\[%{USER:index_name}\]\[%{INT:shard_id}\], node\[%{USER:node_id}\], \[(?P|R)\], s\[%{WORD:shard_state}\]\: %{GREEDYDATA:message}" } 9 | overwrite => "message" 10 | add_field => { "shard" => "%{index_name}{%{shard_id}}{%{shard_type}}" } 11 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-action-1" ] 12 | 13 | } 14 | } 15 | if [component] == "action.bulk" { 16 | grok { 17 | match => {"message" => "failed to execute bulk item \(%{WORD:bulk_action}\) index \{\[%{USER:index_name}\]\[%{DATA:document_type}\]\[%{USER:document_id}\], source\[%{GREEDYDATA:bulk_request}\]\}" } 18 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-action-2" ] 19 | } 20 | } 21 | if [component] == "action.index" { 22 | grok { 23 | match => { 24 | "message" => "%{DATA:message} \[%{DATA:action}\], request \[%{NOTSPACE:request_type} \{\[%{USER:index_name}\]\[%{DATA:document_type}\]\[%{DATA:request_id}\], source\[%{GREEDYDATA:source_document}\]\}\]" } 25 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-action-3" ] 26 | } 27 | } 28 | if [component] == "action.admin.indices.stats" { 29 | grok { 30 | match => { "message" => "(f|F)ailed to execute \[%{DATA:request_context}\]" } 31 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-action-4" ] 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /logstash/jvm.options: -------------------------------------------------------------------------------- 1 | ## JVM configuration 2 | 3 | # Xms represents the initial size of total heap space 4 | # Xmx represents the maximum size of total heap space 5 | 6 | -Xms256m 7 | -Xmx1g 8 | 9 | ################################################################ 10 | ## Expert settings 11 | ################################################################ 12 | ## 13 | ## All settings below this section are considered 14 | ## expert settings. Don't tamper with them unless 15 | ## you understand what you are doing 16 | ## 17 | ################################################################ 18 | 19 | ## GC configuration 20 | -XX:+UseParNewGC 21 | -XX:+UseConcMarkSweepGC 22 | -XX:CMSInitiatingOccupancyFraction=75 23 | -XX:+UseCMSInitiatingOccupancyOnly 24 | 25 | ## optimizations 26 | 27 | # disable calls to System#gc 28 | -XX:+DisableExplicitGC 29 | 30 | ## locale 31 | # Set the locale language 32 | #-Duser.language=en 33 | 34 | # Set the locale country 35 | #-Duser.country=US 36 | 37 | # Set the locale variant, if any 38 | #-Duser.variant= 39 | 40 | ## basic 41 | 42 | # set the I/O temp directory 43 | #-Djava.io.tmpdir=$HOME 44 | 45 | # set to headless, just in case 46 | -Djava.awt.headless=true 47 | 48 | # ensure UTF-8 encoding by default (e.g. filenames) 49 | -Dfile.encoding=UTF-8 50 | 51 | # use our provided JNA always versus the system one 52 | #-Djna.nosys=true 53 | 54 | ## heap dumps 55 | 56 | # generate a heap dump when an allocation from the Java heap fails 57 | # heap dumps are created in the working directory of the JVM 58 | -XX:+HeapDumpOnOutOfMemoryError 59 | 60 | # specify an alternative path for heap dumps 61 | # ensure the directory exists and has sufficient space 62 | #-XX:HeapDumpPath=${LOGSTASH_HOME}/heapdump.hprof 63 | 64 | ## GC logging 65 | #-XX:+PrintGCDetails 66 | #-XX:+PrintGCTimeStamps 67 | #-XX:+PrintGCDateStamps 68 | #-XX:+PrintClassHistogram 69 | #-XX:+PrintTenuringDistribution 70 | #-XX:+PrintGCApplicationStoppedTime 71 | 72 | # log GC status to a file with time stamps 73 | # ensure the directory exists 74 | #-Xloggc:${LS_GC_LOG_FILE} 75 | -------------------------------------------------------------------------------- /elasticsearch/elasticsearch-template-5x.json: -------------------------------------------------------------------------------- 1 | { 2 | "template" : "elasticsearch-logs-5-*", 3 | "settings" : { 4 | "index.refresh_interval" : "5s", 5 | "analysis": { 6 | "filter": { 7 | "trace_filter": { 8 | "type": "pattern_capture", 9 | "preserve_original": 1, 10 | "patterns": [ 11 | "(\\p{Ll}+|\\p{Lu}\\p{Ll}+|\\p{Lu}+)", 12 | "(\\d+)" 13 | ] 14 | } 15 | }, 16 | "analyzer": { 17 | "component_analyzer": { 18 | "tokenizer": "pattern" 19 | }, 20 | "trace_analyzer": { 21 | "tokenizer": "pattern", 22 | "filter": [ 23 | "trace_filter", 24 | "lowercase" 25 | ] 26 | } 27 | } 28 | } 29 | }, 30 | "mappings" : { 31 | "_default_" : { 32 | "_default_" : { 33 | "_all" : {"enabled" : true, "norms" : false}, 34 | "dynamic_templates" : [ { 35 | "message_field" : { 36 | "path_match" : "message", 37 | "match_mapping_type" : "string", 38 | "mapping" : { 39 | "type" : "text", 40 | "norms" : false 41 | } 42 | } 43 | }, { 44 | "string_fields" : { 45 | "match" : "*", 46 | "match_mapping_type" : "string", 47 | "mapping" : { 48 | "type" : "text", "norms" : false, 49 | "fields" : { 50 | "keyword" : { "type": "keyword" } 51 | } 52 | } 53 | } 54 | }, { 55 | "component" : { 56 | "match" : "component", 57 | "mapping" : { 58 | "type" : "text", "analyzer": "component_analyzer", "norms" : false, 59 | "fields" : { 60 | "keyword" : { "type": "keyword", "ignore_above" : 256} 61 | } 62 | } 63 | } 64 | }, { 65 | "exception" : { 66 | "match" : "exception", 67 | "mapping" : { 68 | "type" : "text", "analyzer": "trace_analyzer", "norms": false, "ignore_above" : 2048, 69 | "fields" : { 70 | "keyword" : { "type": "keyword", "ignore_above" : 2048 } 71 | } 72 | } 73 | } 74 | } ], 75 | "properties" : { 76 | "@timestamp": { "type": "date" }, 77 | "@version": { "type": "keyword", "include_in_all": false }, 78 | "level": { "type": "keyword" }, 79 | "shard_state": { "type": "keyword" }, 80 | "shard_type": { "type": "keyword" }, 81 | "search_type": { "type": "keyword" }, 82 | "index_id": { "type": "keyword" }, 83 | "document_id": { "type": "keyword" }, 84 | "node_id": { "type": "keyword" }, 85 | "shard_id": { "type": "integer" }, 86 | "total_shards": { "type": "integer" }, 87 | "bulk_action": { "type": "keyword" }, 88 | "source_query": { "type": "text" }, 89 | "extra_source": { "type": "text" }, 90 | "bulk_request": { "type": "text", "index": false }, 91 | "index_request": { "type": "text", "index": false } 92 | } 93 | } 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /logstash/conf.d/50-filter-cluster.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [component] == "cluster.metadata" { 3 | grok { 4 | match => { "message" => "\[%{USER:index_name}\] (?update|create)_mapping \[%{DATA:mapping}\] (\(%{WORD:mapping_type}\))?" } 5 | add_tag => [ "mapping" ] 6 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-cluster-1" ] 7 | } 8 | grok { 9 | match => { "message" => "\[%{USER:index_name}\] creating index, cause \[%{DATA:reason}\], templates \[%{DATA:index_template}\], shards \[%{POSINT}\]/\[%{POSINT}\], mappings \[%{DATA:mapping}\]" } 10 | add_tag => "create_index" 11 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-cluster-2" ] 12 | } 13 | } 14 | if [component] == "cluster.action.shard" { 15 | grok { 16 | match => { 17 | "message" => [ 18 | "\[%{USER:index_name}\]\[%{INT:shard_id}\] %{DATA} \[%{DATA}\]\[%{DATA}\], node\[%{USER:node_id}\], \[(?P|R)\], s\[%{WORD:shard_state}\], (indexUUID \[%{USER:index_id}\], )?%{GREEDYDATA:details}", 19 | "\[%{USER:index_name}\]\[%{INT:shard_id}\] %{DATA} \[\[%{DATA}\]\[%{DATA}\], node\[%{USER:node_id}\], \[(?P|R)\], v\[%{INT:shard_version:integer}\], s\[%{WORD:shard_state}\]%{GREEDYDATA}" 20 | ] 21 | } 22 | add_field => { "shard" => "%{index_name}{%{shard_id}}{%{shard_type}}" } 23 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-cluster-3" ] 24 | } 25 | if [message] =~ /received shard failed for/ { 26 | mutate { 27 | add_tag => [ "shard_failed" ] 28 | } 29 | } 30 | } 31 | if [component] == "cluster.service" { 32 | grok { 33 | match => { 34 | "message" => [ 35 | "processing \[%{WORD} \(\[\[%{USER:index_name}\]\[%{INT:shard_id}\]%{GREEDYDATA}?", 36 | "processing \[%{WORD}\(from master \[\{%{USER:[service][node_name]}\}\{%{DATA}\}\{%{IP:[service][address]}\}%{DATA}\(version: %{INT:[service][cluster_state_version]}%{GREEDYDATA}" 37 | ] 38 | } 39 | tag_on_failure => [ "_grokparsefailure", "_grok-50-filter-cluster-4" ] 40 | } 41 | if [message] =~ /^new_master/ { 42 | mutate { 43 | add_tag => ["master_elected"] 44 | } 45 | } 46 | if [message] =~ /^detected_master/ { 47 | mutate { 48 | add_tag => ["master_found"] 49 | } 50 | } 51 | if [message] =~ /^added/ { 52 | mutate { 53 | add_tag => ["node_added"] 54 | } 55 | } 56 | if [message] =~ /^removed/ { 57 | mutate { 58 | add_tag => ["node_removed"] 59 | } 60 | } 61 | } 62 | mutate { 63 | split => { 64 | "mapping" => "," 65 | "index_template" => "," 66 | } 67 | strip => ["mapping","index_template"] 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | trap clean_up EXIT 4 | 5 | clean_up() { 6 | test -e ${logs_pipe} && rm -f ${logs_pipe} 7 | test -e ${ls_output_conf} && rm -f ${ls_output_conf} 8 | } 9 | 10 | manage_index_template() { 11 | template_url_path="_template/template-es-logs" 12 | template_file_path="@${scriptdir}/elasticsearch/elasticsearch-template-5x.json" 13 | response=$(curl -XGET ${auth} -s -w "\n%{http_code}\n" ${es_endpoint}/${template_url_path} | tail -1) 14 | if [[ ${response} -ne "200" ]] || [[ ${force_template_load} ]]; then 15 | echo -n "Loading Elasticsearch template from ${template_file_path}..." 16 | response=$(curl -XPUT ${auth} -s -w "\n%{http_code}\n" ${es_endpoint}/${template_url_path} -d ${template_file_path} | tail -1) 17 | if [[ ${response} -ne "200" ]]; then 18 | echo -e "\nFailed to load Elasticsearch template! Response: ${response}" 19 | exit -1 20 | else 21 | echo "done!" 22 | fi 23 | fi 24 | } 25 | 26 | scriptdir=$(readlink -f $0) 27 | scriptdir=$(dirname ${scriptdir}) 28 | 29 | # Check for xargs binary, die if we can't find it 30 | if ! type -P xargs >/dev/null; then 31 | echo "Could not find xargs!" 32 | exit -1 33 | fi 34 | 35 | # Check for curl binary, die if we can't find it 36 | if ! type -P curl >/dev/null; then 37 | echo "Could not find curl!" 38 | exit -1 39 | fi 40 | 41 | # Check we have a Logstash binary we can run 42 | if type -P logstash >/dev/null; then 43 | LOGSTASH_CMD="$(type -P logstash)" 44 | else 45 | echo "Could not find logstash!" 46 | exit -1 47 | fi 48 | 49 | # Check for ionice/nice binaries 50 | if type -P ionice 1> /dev/null; then 51 | LOGSTASH_CMD="$(type -P ionice) -c 3 ${LOGSTASH_CMD}" 52 | fi 53 | if type -P nice 1> /dev/null; then 54 | LOGSTASH_CMD="$(type -P nice) -n 19 ${LOGSTASH_CMD}" 55 | fi 56 | 57 | LOGSTASH_OPTS="--path.config=${scriptdir}/logstash/conf.d --path.settings=${scriptdir}/logstash" 58 | logs_pipe="/tmp/logstash-$$" 59 | 60 | while getopts ":h:u:p:t" opt; do 61 | case $opt in 62 | h) 63 | es_endpoint=$OPTARG 64 | ;; 65 | u) 66 | es_username=$OPTARG 67 | ;; 68 | p) 69 | es_password=$OPTARG 70 | ;; 71 | t) 72 | force_template_load=1 73 | ;; 74 | \?) 75 | echo "Invalid option: -$OPTARG" >&2 76 | echo "Ex. usage: $0 -h myes.com:9200 -u user -p password path/to/logs" 77 | exit -1 78 | ;; 79 | :) 80 | echo "Option -$OPTARG requires an argument." >&2 81 | echo "Ex. usage: $0 -h 'https://es:9200' -u user -p password /path/to/some.log" 82 | exit -1 83 | ;; 84 | esac 85 | done 86 | 87 | if [[ $es_username ]] && [[ $es_password ]]; then 88 | auth="-u ${es_username}:${es_password}" 89 | else 90 | auth="" 91 | fi 92 | 93 | if ! $(curl -XGET ${auth} -s $es_endpoint | grep tagline 1> /dev/null); then 94 | echo "$es_endpoint does not seem to be a valid Elasticsearch endpoint." 95 | exit -1 96 | fi 97 | 98 | # Set-up Elasticsearch template 99 | manage_index_template 100 | 101 | # Generate output plugin template 102 | echo -n "Creating Logstash output file..." 103 | ls_output_conf=${scriptdir}/logstash/conf.d/90-output-elasticsearch.conf 104 | cat > ${ls_output_conf}< [ "${es_endpoint}" ] 108 | EOF 109 | if [[ $es_username ]] && [[ $es_password ]]; then 110 | cat >> ${ls_output_conf}< "${es_username}" 112 | password => "${es_password}" 113 | EOF 114 | fi 115 | cat >> ${ls_output_conf}< "elasticsearch-logs-5-%{+YYYY.MM.dd}" 117 | } 118 | } 119 | EOF 120 | echo "done!" 121 | 122 | # Create a pipe for read/write 123 | 124 | mkfifo ${logs_pipe} 125 | 126 | # Generate input plugin config 127 | shift $(($OPTIND-1)) 128 | for f in "$@"; do 129 | echo "Sending file ${f} to pipe ${logs_pipe}..." 130 | cat "${f}" > ${logs_pipe} & 131 | done 132 | 133 | ${LOGSTASH_CMD} ${LOGSTASH_OPTS} < ${logs_pipe} & 134 | 135 | wait 136 | 137 | exit 0 138 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## What is this? 2 | 3 | This is a way to visualise Elasticsearch logs (not slow logs) in 4 | Kibana. See the following screenshot for what it looks like: 5 | 6 | ![Dashboard Example](Dashboard-Screenshot.png) 7 | 8 | ## Installation 9 | 10 | ### Requirements 11 | 12 | * Elasticsearch 5.x or higher 13 | * Kibana 5.x or higher 14 | * Logstash 5.x or higher 15 | 16 | ### Setup 17 | 18 | #### 1. Load Elasticsearch mapping template 19 | 20 | Load the Elasticsearch mapping template in 21 | `elasticsearch/elasticsearch-template.json`: 22 | 23 | ``` 24 | curl -XPUT :/_template/es-logs -d @elasticsearch/elasticsearch-template-5x.json 25 | ``` 26 | 27 | Replace `` and `` to your Elasticsearch ip/host and 28 | port combo. 29 | 30 | #### 2. Import Kibana dashboard/visualisations 31 | 32 | Import the Kibana dashboard/visualisations in `kibana/export.json` 33 | 34 | ## Usage 35 | 36 | ### Helper Script 37 | 38 | There is a helper script called `run.sh` in the root of this repo that 39 | can assist with performing one-off indexing of logs via Logstash and 40 | Elasticsearch. 41 | 42 | Usage is simple: 43 | 44 | ```sh 45 | ./run.sh -h https://myes.com:9200 -u user -p password path/to/log 46 | ``` 47 | 48 | Where: 49 | 50 | * `-h` is the URL (including the port) for your Elasticsearch endpoint 51 | * `-u` and `-p` are the username and password to access your 52 | Elasticsearch endpoint (optional). 53 | 54 | The remaining command-line arguments are treated as log files. 55 | Standard shell wildcard/globbing applies. Need to process a bunch of 56 | logs in different directories? Try: 57 | 58 | ``` sh 59 | find -type f -name \*.log | xargs ~/git/elasticsearch-log-processor/run.sh -u user -p password 60 | ``` 61 | 62 | If you add a `-t` option the script will load the Elasticsearch 63 | template shipped in this repo into Elasticsearch for you before 64 | performing any indexing with Logstash. 65 | 66 | ### Manual Indexing 67 | 68 | Create an *output* configuration in the `logsash/conf.d` directory 69 | appropriate for your Elasticsearch server. Ensure the index pattern is 70 | `elasticsearch-logs-5-%{YYYY.MM.DD}`. Then simply pipe your log files to logstash using this configuration file: 71 | 72 | ``` 73 | /path/to/bin/logstash --config logstash/conf.d < /path/to/elasticsearch.log 74 | # or 75 | cat /path/to/logs/*.log | /path/to/bin/logstash --config logstash 76 | ``` 77 | 78 | ## Contributing 79 | 80 | Most of the time you'll probably want to add new Logstash filters to 81 | parse various components not already handled here. You may also need 82 | to update the dynamic template mapping used by the index if you add 83 | new fields or import/export a new dashboard/visualisation from/to Kibana. 84 | 85 | ### Elasticsearch 86 | 87 | The mapping file is located in the [elasticsearch](elasticsearch) directory in the file `elasticsearch-template.json`. After editing, you'll 88 | need to update the mapping in your Elasticsearch cluster with the curl 89 | command above and reindex any log files. 90 | 91 | ### Logstash 92 | 93 | All of the filters for a base component of logging in Elasticsearch 94 | go into a seperate `logstash/conf.d/50-filter-.conf` Logstash configuration file. So 95 | for example, filters for **index.shard** and **index.fielddata** log 96 | messages go into a `50-filter-index.conf` Logstash configuration file. 97 | 98 | The following standard input, filter and output configuration files 99 | also exist: 100 | 101 | * `10-input-stdin.conf`: for reading stdin. 102 | * `40-filter-base.conf`: base filter configuration to parse timestamp, 103 | log level and component. 104 | * `45-filter-exceptions.conf`: parses multiline java stack traces. 105 | * `90-output-dots.conf`: outputs a `.` to the console for each log 106 | line processed. 107 | 108 | ### Kibana 109 | 110 | The dashboards and visualisations shown in the screenshot are in the 111 | [kibana](kibana) directory in the `export.json` file. 112 | -------------------------------------------------------------------------------- /elasticsearch/elasticsearch-template-2x.json: -------------------------------------------------------------------------------- 1 | { 2 | "template" : "elasticsearch-logs-*", 3 | "settings" : { 4 | "index.refresh_interval" : "5s", 5 | "analysis": { 6 | "filter": { 7 | "trace_filter": { 8 | "type": "pattern_capture", 9 | "preserve_original": 1, 10 | "patterns": [ 11 | "(\\p{Ll}+|\\p{Lu}\\p{Ll}+|\\p{Lu}+)", 12 | "(\\d+)" 13 | ] 14 | } 15 | }, 16 | "analyzer": { 17 | "component_analyzer": { 18 | "tokenizer": "pattern" 19 | }, 20 | "trace_analyzer": { 21 | "tokenizer": "pattern", 22 | "filter": [ 23 | "trace_filter", 24 | "lowercase" 25 | ] 26 | } 27 | } 28 | } 29 | }, 30 | "mappings" : { 31 | "_default_" : { 32 | "_all" : {"enabled" : true, "omit_norms" : true}, 33 | "dynamic_templates" : [ { 34 | "message_field" : { 35 | "match" : "message", 36 | "match_mapping_type" : "string", 37 | "mapping" : { 38 | "type" : "string", "index" : "analyzed", "omit_norms" : true, 39 | "fielddata" : { "format" : "disabled" } 40 | } 41 | } 42 | }, { 43 | "string_fields" : { 44 | "match" : "*", 45 | "match_mapping_type" : "string", 46 | "mapping" : { 47 | "type" : "string", "index" : "analyzed", "omit_norms" : true, 48 | "fielddata" : { "format" : "disabled" }, 49 | "fields" : { 50 | "raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256} 51 | } 52 | } 53 | } 54 | }, { 55 | "component" : { 56 | "match" : "component", 57 | "mapping" : { 58 | "type" : "string", "index" : "analyzed", "analyzer": "component_analyzer", "omit_norms" : true, 59 | "fielddata" : { "format" : "disabled" }, 60 | "fields" : { 61 | "raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256} 62 | } 63 | } 64 | } 65 | }, { 66 | "exception" : { 67 | "match" : "exception", 68 | "mapping" : { 69 | "type" : "string", "index": "analyzed", "analyzer": "trace_analyzer", "omit_norms": true, "ignore_above" : 2048, 70 | "fielddata" : { "format" : "disabled" }, 71 | "fields" : { 72 | "raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 2048 } 73 | } 74 | } 75 | } 76 | }, { 77 | "float_fields" : { 78 | "match" : "*", 79 | "match_mapping_type" : "float", 80 | "mapping" : { "type" : "float", "doc_values" : true } 81 | } 82 | }, { 83 | "double_fields" : { 84 | "match" : "*", 85 | "match_mapping_type" : "double", 86 | "mapping" : { "type" : "double", "doc_values" : true } 87 | } 88 | }, { 89 | "byte_fields" : { 90 | "match" : "*", 91 | "match_mapping_type" : "byte", 92 | "mapping" : { "type" : "byte", "doc_values" : true } 93 | } 94 | }, { 95 | "short_fields" : { 96 | "match" : "*", 97 | "match_mapping_type" : "short", 98 | "mapping" : { "type" : "short", "doc_values" : true } 99 | } 100 | }, { 101 | "integer_fields" : { 102 | "match" : "*", 103 | "match_mapping_type" : "integer", 104 | "mapping" : { "type" : "integer", "doc_values" : true } 105 | } 106 | }, { 107 | "long_fields" : { 108 | "match" : "*", 109 | "match_mapping_type" : "long", 110 | "mapping" : { "type" : "long", "doc_values" : true } 111 | } 112 | }, { 113 | "date_fields" : { 114 | "match" : "*", 115 | "match_mapping_type" : "date", 116 | "mapping" : { "type" : "date", "doc_values" : true } 117 | } 118 | } ], 119 | "properties" : { 120 | "@timestamp": { "type": "date", "doc_values" : true }, 121 | "@version": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 122 | "level": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 123 | "shard_state": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 124 | "shard_type": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 125 | "index_id": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 126 | "document_id": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 127 | "node_id": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 128 | "shard_id": { "type": "integer", "doc_values" : true }, 129 | "bulk_action": { "type": "string", "index": "not_analyzed", "doc_values" : true }, 130 | "bulk_request": { "type": "string", "index": "no" }, 131 | "index_request": { "type": "string", "index": "no" } 132 | } 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /kibana/visualisations.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "Elasticsearch-Logs:-Components", 4 | "_type": "visualization", 5 | "_source": { 6 | "title": "Elasticsearch Logs: Components", 7 | "visState": "{\"title\":\"Elasticsearch Logs: Components\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"component.keyword\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"level\",\"size\":6,\"order\":\"desc\",\"orderBy\":\"1\"}}],\"listeners\":{}}", 8 | "uiStateJSON": "{}", 9 | "description": "", 10 | "savedSearchId": "Search:-Elasticsearch-Logs", 11 | "version": 1, 12 | "kibanaSavedObjectMeta": { 13 | "searchSourceJSON": "{\"filter\":[]}" 14 | } 15 | } 16 | }, 17 | { 18 | "_id": "Elasticsearch-Logs:-Event-Tags", 19 | "_type": "visualization", 20 | "_source": { 21 | "title": "Elasticsearch Logs: Event Tags", 22 | "visState": "{\"title\":\"Elasticsearch Logs: Event Tags\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"# of Events\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"tags.keyword\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Tags\"}}],\"listeners\":{}}", 23 | "uiStateJSON": "{}", 24 | "description": "", 25 | "savedSearchId": "Search:-Elasticsearch-Logs", 26 | "version": 1, 27 | "kibanaSavedObjectMeta": { 28 | "searchSourceJSON": "{\"filter\":[]}" 29 | } 30 | } 31 | }, 32 | { 33 | "_id": "Elasticsearch-Logs:-Histogram", 34 | "_type": "visualization", 35 | "_source": { 36 | "title": "Elasticsearch Logs: Histogram", 37 | "visState": "{\"title\":\"Elasticsearch Logs: Histogram\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"# of Events\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"@timestamp\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Time\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"component.keyword\",\"size\":5,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Component\"}}],\"listeners\":{}}", 38 | "uiStateJSON": "{}", 39 | "description": "", 40 | "savedSearchId": "Search:-Elasticsearch-Logs", 41 | "version": 1, 42 | "kibanaSavedObjectMeta": { 43 | "searchSourceJSON": "{\"filter\":[]}" 44 | } 45 | } 46 | }, 47 | { 48 | "_id": "Elasticsearch-Logs:-Index-Details", 49 | "_type": "visualization", 50 | "_source": { 51 | "title": "Elasticsearch Logs: Index Details", 52 | "visState": "{\"title\":\"Elasticsearch Logs: Index Details\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"# of Events\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"index_id\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Index\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"shard_id\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Shard ID\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"shard_type\",\"size\":2,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Shard Type\"}}],\"listeners\":{}}", 53 | "uiStateJSON": "{}", 54 | "description": "", 55 | "savedSearchId": "Search:-Elasticsearch-Logs", 56 | "version": 1, 57 | "kibanaSavedObjectMeta": { 58 | "searchSourceJSON": "{\"filter\":[]}" 59 | } 60 | } 61 | }, 62 | { 63 | "_id": "Elasticsearch-Logs:-Nodes", 64 | "_type": "visualization", 65 | "_source": { 66 | "title": "Elasticsearch Logs: Nodes", 67 | "visState": "{\"title\":\"Elasticsearch Logs: Nodes\",\"type\":\"pie\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"isDonut\":false},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"count\",\"schema\":\"metric\",\"params\":{\"customLabel\":\"# of Events\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"node_name.keyword\",\"size\":50,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Node\"}}],\"listeners\":{}}", 68 | "uiStateJSON": "{}", 69 | "description": "", 70 | "savedSearchId": "Search:-Elasticsearch-Logs", 71 | "version": 1, 72 | "kibanaSavedObjectMeta": { 73 | "searchSourceJSON": "{\"filter\":[]}" 74 | } 75 | } 76 | } 77 | ] 78 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2015 Joshua Rich 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------