├── Dockerfile
├── fluent.conf
├── plugins
├── dir.info
└── json_in_string.rb
├── readme.md
└── scripts
├── run.sh
└── setup_plugins.sh
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fluent/fluentd:latest
2 |
3 | MAINTAINER Osama Sidat
4 |
5 | USER root
6 | RUN apk add --update bash && rm -rf /var/cache/apk/*
7 |
8 | ADD scripts/run.sh /run.sh
9 | ADD scripts/setup_plugins.sh /setup_plugins.sh
10 | RUN chmod 755 /*.sh
11 |
12 | WORKDIR /home/fluent
13 | ENV PATH /home/fluent/.gem/ruby/2.2.0/bin:$PATH
14 |
15 | RUN gem install fluent-plugin-parser
16 | RUN gem install fluent-plugin-elasticsearch
17 | RUN gem install fluent-plugin-aws-elasticsearch-service
18 |
19 | EXPOSE 24224
20 | EXPOSE 8888
21 |
22 | ENTRYPOINT ["/run.sh"]
23 |
--------------------------------------------------------------------------------
/fluent.conf:
--------------------------------------------------------------------------------
1 |
2 | type forward
3 |
4 |
--------------------------------------------------------------------------------
/plugins/dir.info:
--------------------------------------------------------------------------------
1 | This file is just here to commit this folder. Git doesn't like empty folders, Docker doesn't build this image without one. Sad, I know.
2 |
--------------------------------------------------------------------------------
/plugins/json_in_string.rb:
--------------------------------------------------------------------------------
1 | require 'json'
2 | require 'time'
3 |
4 | module Fluent
5 | class TextParser
6 | class JsonInStringParser < Parser
7 | Plugin.register_parser("json_in_string", self)
8 |
9 | config_param :time_format, :string, :default => nil # time_format is configurable
10 |
11 | def configure(conf)
12 | super
13 | @time_parser = TimeParser.new(@time_format)
14 | end
15 |
16 | # This is the main method. The input is the unit of data to be parsed.
17 | # If this is the in_tail plugin, it would be a line. If this is for in_syslog,
18 | # it is a single syslog message.
19 | def parse(input)
20 | begin
21 | output = (input.is_a?(Hash)) ? (input) : (JSON.parse(input))
22 | t = output['time']
23 | t = Time.now.strftime(@time_format) if t.nil?
24 | time = @time_parser.parse(t)
25 | yield time, output
26 | rescue
27 | yield input
28 | end
29 | end
30 | end
31 | end
32 | end
33 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # fluentd-elasticsearch-container
2 |
3 | The container to send all containers' logs to an Elasticsearch container.
4 |
5 | ## Running
6 |
7 | ###### Simple run:
8 |
9 | ```
10 | docker run -d --name some_cool_name oosidat/fluentd-elasticsearch-container
11 | ```
12 |
13 | ###### Run in vagrant box alongside other docker containers
14 |
15 | * Ensure that there's an ElasticSearch container running at port 9200
16 | * Run fluentd container:
17 | ```
18 | docker run --net=host -d --name some_cool_name oosidat/fluentd-elasticsearch-container
19 | ```
20 | * Ensure that other containers (which need to send logs) have their [log driver set to fluentd](https://docs.docker.com/engine/reference/logging/fluentd/)
21 |
22 | ###### Supported Environment Variables
23 |
24 | *Pass using `-e` in `docker run` or using the `environment` key using `docker-compose`*
25 |
26 | | Name | Description | Default |
27 | | --- | --- | --- |
28 | | ES_HOST | ElasticSearch host | localhost |
29 | | ES_PORT | ElasticSearch port | 9200 |
30 | | ES_INDEX | ElasticSearch index name | fluentd |
31 | | ES_TYPE | ElasticSearch index type | fluentd |
32 | | MATCH_PATTERN | fluentd matching pattern, used in `` & `` | `docker.**` |
33 | | FLUENTD_OPT | other options for running fluent | none (empty) |
34 |
35 | ##### Credits:
36 | * [openfirmware/docker-fluentd-elasticsearch](https://github.com/openfirmware/docker-fluentd-elasticsearch)
37 | * [Eric Fortin's custom fluent-plugin-parser plugin](https://github.com/docker/docker/issues/17830#issuecomment-176145149)
38 | * [Fluentd docker image](https://github.com/fluent/fluentd-docker-image)
39 |
--------------------------------------------------------------------------------
/scripts/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ ! -f /.plugin_setup ]; then
4 | /setup_plugins.sh
5 | fi
6 |
7 | fluentd -c /fluentd/etc/$FLUENTD_CONF -p /fluentd/plugins $FLUENTD_OPT
8 |
--------------------------------------------------------------------------------
/scripts/setup_plugins.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ -f /.plugin_setup ]; then
4 | echo "Plugins already setup, /.plugin_setup file exists"
5 | exit 0
6 | fi
7 |
8 | echo "Initialized plugin setup"
9 |
10 | FLUENT_CONF_FILE=/fluentd/etc/fluent.conf
11 |
12 | DEFAULT_HOST=${ELASTICSEARCH_PORT_9200_TCP_ADDR:-localhost}
13 | DEFAULT_PORT=${ELASTICSEARCH_PORT_9200_TCP_PORT:-9200}
14 |
15 | ES_HOST=${ES_HOST:-$DEFAULT_HOST}
16 | ES_PORT=${ES_PORT:-$DEFAULT_PORT}
17 | ES_INDEX=${ES_INDEX:-fluentd}
18 | ES_TYPE=${ES_TYPE:-fluentd}
19 |
20 | AWS_ES=${AWS_ES:-false}
21 |
22 | MATCH_PATTERN=${MATCH_PATTERN:-docker.**}
23 |
24 | STORES=""
25 |
26 | ELASTIC_SEARCH_STORE="
27 |
28 | type elasticsearch
29 | logstash_format true
30 | host $ES_HOST
31 | port $ES_PORT
32 | index_name $ES_INDEX
33 | type_name $ES_TYPE
34 | include_tag_key true
35 | "
36 |
37 | AWS_ELASTIC_SEARCH_STORE="
38 |
39 | type aws-elasticsearch-service
40 | index_name $ES_INDEX
41 | flush_interval 5s
42 | logstash_format true
43 | buffer_type memory
44 | buffer_queue_limit 64
45 | buffer_chunk_limit 8m
46 | include_tag_key true
47 |
48 | region $AWS_REGION
49 | url $AWS_URL
50 | access_key_id $AWS_ACCESS_KEY_ID
51 | secret_access_key $AWS_SECRET_ACCESS_KEY
52 |
53 | "
54 |
55 | if ! $AWS_ES; then
56 | STORES=$ELASTIC_SEARCH_STORE
57 | else
58 | STORES=$AWS_ELASTIC_SEARCH_STORE
59 | fi
60 |
61 | echo "
62 |
63 | type parser
64 | format json_in_string
65 | time_format %Y-%m-%dT%H:%M:%S.%L%Z
66 | key_name log
67 | hash_value_field log
68 | " >> $FLUENT_CONF_FILE
69 |
70 | echo "
71 |
72 | type copy
73 | $STORES
74 | " >> $FLUENT_CONF_FILE
75 |
76 | echo "
77 |
78 | type stdout
79 | " >> $FLUENT_CONF_FILE
80 |
81 | touch /.plugin_setup
82 |
83 | echo "Finished setting up plugins on file $FLUENT_CONF_FILE"
84 |
--------------------------------------------------------------------------------