├── .env ├── .gitattributes ├── kibana ├── Dockerfile └── config │ └── kibana.yml ├── logstash ├── Dockerfile ├── pipeline │ └── logstash.conf └── config │ └── logstash.yml ├── elasticsearch ├── Dockerfile └── config │ └── elasticsearch.yml ├── docker-stack.yml ├── docker-compose.yml └── README.md /.env: -------------------------------------------------------------------------------- 1 | ELK_VERSION=7.6.2 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Declare files that will always have LF line endings on checkout. 2 | *.sh text eol=lf -------------------------------------------------------------------------------- /kibana/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/kibana/kibana:${ELK_VERSION} 5 | 6 | # Add your kibana plugins setup here 7 | # Example: RUN kibana-plugin install 8 | -------------------------------------------------------------------------------- /logstash/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/logstash/logstash:${ELK_VERSION} 5 | 6 | # Add your logstash plugins setup here 7 | # Example: RUN logstash-plugin install logstash-filter-json 8 | -------------------------------------------------------------------------------- /elasticsearch/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION} 5 | 6 | # Add your elasticsearch plugins setup here 7 | # Example: RUN elasticsearch-plugin install analysis-icu 8 | -------------------------------------------------------------------------------- /logstash/pipeline/logstash.conf: -------------------------------------------------------------------------------- 1 | input { 2 | tcp { 3 | port => 5000 4 | } 5 | } 6 | 7 | ## Add your filters / logstash plugins configuration here 8 | 9 | output { 10 | elasticsearch { 11 | hosts => "elasticsearch:9200" 12 | user => "elastic" 13 | password => "changeme" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /logstash/config/logstash.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Logstash configuration from Logstash base image. 3 | ## https://github.com/elastic/logstash/blob/master/docker/data/logstash/config/logstash-full.yml 4 | # 5 | http.host: "0.0.0.0" 6 | xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ] 7 | 8 | ## X-Pack security credentials 9 | # 10 | xpack.monitoring.enabled: true 11 | xpack.monitoring.elasticsearch.username: elastic 12 | xpack.monitoring.elasticsearch.password: changeme 13 | -------------------------------------------------------------------------------- /kibana/config/kibana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Kibana configuration from Kibana base image. 3 | ## https://github.com/elastic/kibana/blob/master/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.js 4 | # 5 | server.name: kibana 6 | server.host: "0" 7 | elasticsearch.hosts: [ "http://elasticsearch:9200" ] 8 | xpack.monitoring.ui.container.elasticsearch.enabled: true 9 | 10 | ## X-Pack security credentials 11 | # 12 | elasticsearch.username: elastic 13 | elasticsearch.password: changeme 14 | -------------------------------------------------------------------------------- /elasticsearch/config/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Elasticsearch configuration from Elasticsearch base image. 3 | ## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml 4 | # 5 | cluster.name: "docker-cluster" 6 | network.host: 0.0.0.0 7 | 8 | ## X-Pack settings 9 | ## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html 10 | # 11 | # xpack.license.self_generated.type: trial 12 | xpack.security.enabled: true 13 | xpack.monitoring.collection.enabled: true 14 | -------------------------------------------------------------------------------- /docker-stack.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | 5 | elasticsearch: 6 | image: docker.elastic.co/elasticsearch/elasticsearch:7.6.2 7 | ports: 8 | - "9200:9200" 9 | - "9300:9300" 10 | configs: 11 | - source: elastic_config 12 | target: /usr/share/elasticsearch/config/elasticsearch.yml 13 | environment: 14 | ES_JAVA_OPTS: "-Xmx256m -Xms256m" 15 | ELASTIC_PASSWORD: changeme 16 | # Use single node discovery in order to disable production mode and avoid bootstrap checks 17 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 18 | discovery.type: single-node 19 | networks: 20 | - elk 21 | deploy: 22 | mode: replicated 23 | replicas: 1 24 | 25 | logstash: 26 | image: docker.elastic.co/logstash/logstash:7.6.2 27 | ports: 28 | - "5000:5000" 29 | - "9600:9600" 30 | configs: 31 | - source: logstash_config 32 | target: /usr/share/logstash/config/logstash.yml 33 | - source: logstash_pipeline 34 | target: /usr/share/logstash/pipeline/logstash.conf 35 | environment: 36 | LS_JAVA_OPTS: "-Xmx256m -Xms256m" 37 | networks: 38 | - elk 39 | deploy: 40 | mode: replicated 41 | replicas: 1 42 | 43 | kibana: 44 | image: docker.elastic.co/kibana/kibana:7.6.2 45 | ports: 46 | - "5601:5601" 47 | configs: 48 | - source: kibana_config 49 | target: /usr/share/kibana/config/kibana.yml 50 | networks: 51 | - elk 52 | deploy: 53 | mode: replicated 54 | replicas: 1 55 | 56 | configs: 57 | 58 | elastic_config: 59 | file: ./elasticsearch/config/elasticsearch.yml 60 | logstash_config: 61 | file: ./logstash/config/logstash.yml 62 | logstash_pipeline: 63 | file: ./logstash/pipeline/logstash.conf 64 | kibana_config: 65 | file: ./kibana/config/kibana.yml 66 | 67 | networks: 68 | elk: 69 | driver: overlay 70 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | elasticsearch: 5 | build: 6 | context: elasticsearch/ 7 | args: 8 | ELK_VERSION: $ELK_VERSION 9 | volumes: 10 | - type: bind 11 | source: ./elasticsearch/config/elasticsearch.yml 12 | target: /usr/share/elasticsearch/config/elasticsearch.yml 13 | read_only: true 14 | - type: volume 15 | source: elasticsearch 16 | target: /usr/share/elasticsearch/data 17 | ports: 18 | - "9200:9200" 19 | - "9300:9300" 20 | environment: 21 | ES_JAVA_OPTS: "-Xmx256m -Xms256m" 22 | ELASTIC_PASSWORD: changeme 23 | # Use single node discovery in order to disable production mode and avoid bootstrap checks 24 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 25 | discovery.type: single-node 26 | networks: 27 | - elk 28 | 29 | logstash: 30 | build: 31 | context: logstash/ 32 | args: 33 | ELK_VERSION: $ELK_VERSION 34 | volumes: 35 | - type: bind 36 | source: ./logstash/config/logstash.yml 37 | target: /usr/share/logstash/config/logstash.yml 38 | read_only: true 39 | - type: bind 40 | source: ./logstash/pipeline 41 | target: /usr/share/logstash/pipeline 42 | read_only: true 43 | ports: 44 | - "5000:5000/tcp" 45 | - "5000:5000/udp" 46 | - "9600:9600" 47 | environment: 48 | LS_JAVA_OPTS: "-Xmx256m -Xms256m" 49 | networks: 50 | - elk 51 | depends_on: 52 | - elasticsearch 53 | 54 | kibana: 55 | build: 56 | context: kibana/ 57 | args: 58 | ELK_VERSION: $ELK_VERSION 59 | volumes: 60 | - type: bind 61 | source: ./kibana/config/kibana.yml 62 | target: /usr/share/kibana/config/kibana.yml 63 | read_only: true 64 | ports: 65 | - "5601:5601" 66 | networks: 67 | - elk 68 | depends_on: 69 | - elasticsearch 70 | 71 | networks: 72 | elk: 73 | driver: bridge 74 | 75 | volumes: 76 | elasticsearch: 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Elasticsearch stack (ELK) with docker-compose 2 | 3 | Run the latest version of the [Elastic stack][elk-stack] with Docker and Docker Compose. 4 | 5 | Additional information is available from [Docker - ELK 7.6 : Elastic Stack with Docker Compose](https://www.bogotobogo.com/DevOps/Docker/Docker_ELK_7_6_Elastic_Stack_Docker_Compose.php) 6 | 7 | It gives us the ability to analyze any data set by using the searching/aggregation capabilities of Elasticsearch and 8 | the visualization power of Kibana. 9 | 10 | Based on the official Docker images from Elastic: 11 | 12 | * [Elasticsearch](https://github.com/elastic/elasticsearch/tree/master/distribution/docker) 13 | * [Logstash](https://github.com/elastic/logstash/tree/master/docker) 14 | * [Kibana](https://github.com/elastic/kibana/tree/master/src/dev/build/tasks/os_packages/docker_generator) 15 | 16 | ## Contents 17 | 18 | 1. [Requirements](#requirements) 19 | * [Host setup](#host-setup) 20 | 2. [Usage](#usage) 21 | * [Version selection](#version-selection) 22 | * [Bringing up the stack](#bringing-up-the-stack) 23 | * [Cleanup](#cleanup) 24 | * [Initial setup](#initial-setup) 25 | * [Setting up user authentication](#setting-up-user-authentication) 26 | * [Injecting data](#injecting-data) 27 | * [Default Kibana index pattern creation](#default-kibana-index-pattern-creation) 28 | 3. [Configuration](#configuration) 29 | * [How to configure Elasticsearch](#how-to-configure-elasticsearch) 30 | * [How to configure Kibana](#how-to-configure-kibana) 31 | * [How to configure Logstash](#how-to-configure-logstash) 32 | * [How to enable paid features](#how-to-enable-paid-features) 33 | * [How to scale out the Elasticsearch cluster](#how-to-scale-out-the-elasticsearch-cluster) 34 | 4. [JVM tuning](#jvm-tuning) 35 | * [How to specify the amount of memory used by a service](#how-to-specify-the-amount-of-memory-used-by-a-service) 36 | * [How to enable a remote JMX connection to a service](#how-to-enable-a-remote-jmx-connection-to-a-service) 37 | 5. [Going further](#going-further) 38 | * [Plugins and integrations](#plugins-and-integrations) 39 | * [Swarm mode](#swarm-mode) 40 | 6. [Note](#note) 41 | 42 | ## Requirements 43 | 44 | ### Host setup 45 | 46 | * [Docker Engine](https://docs.docker.com/install/) version **17.05** or newer 47 | * [Docker Compose](https://docs.docker.com/compose/install/) version **1.20.0** or newer 48 | * 1.5 GB of RAM 49 | 50 | By default, the stack exposes the following ports: 51 | * 5000: Logstash TCP input 52 | * 9200: Elasticsearch HTTP 53 | * 9300: Elasticsearch TCP transport 54 | * 5601: Kibana 55 | 56 | ## Usage 57 | 58 | ### Version selection 59 | 60 | This repository tries to stay aligned with the latest version of the Elastic stack. The `master` branch tracks the 61 | current major version (7.x). 62 | 63 | To use a different version of the core Elastic components, simply change the version number inside the `.env` file. If 64 | we are upgrading an existing stack, please carefully read the note in the next section. 65 | 66 | 67 | ### Bringing up the stack 68 | 69 | Clone this repository onto the Docker host that will run the stack, then start services locally using Docker Compose: 70 | 71 | ```console 72 | $ docker-compose up 73 | ``` 74 | 75 | We can also run all services in the background (detached mode) by adding the `-d` flag to the above command. 76 | 77 | > :warning: We must rebuild the stack images with `docker-compose build` whenever we switch branch or update the 78 | > version of an already existing stack. 79 | 80 | To start the stack for the very first time, please read the section below attentively. 81 | 82 | ### Cleanup 83 | 84 | Elasticsearch data is persisted inside a volume by default. 85 | 86 | In order to entirely shutdown the stack and remove all persisted data, use the following Docker Compose command: 87 | 88 | ```console 89 | $ docker-compose down -v 90 | ``` 91 | 92 | ## Initial setup 93 | 94 | ### Setting up user authentication 95 | 96 | > :information_source: Refer to [How to enable paid features](#how-to-enable-paid-features) to enable authentication. 97 | 98 | The stack is pre-configured with the following **privileged** bootstrap user: 99 | 100 | * user: *elastic* 101 | * password: *changeme* 102 | 103 | Although all stack components work out-of-the-box with this user, we strongly recommend using the unprivileged [built-in 104 | users][builtin-users] instead for increased security. 105 | 106 | 1. Initialize passwords for built-in users 107 | 108 | ```console 109 | $ docker-compose exec -T elasticsearch bin/elasticsearch-setup-passwords auto --batch 110 | ``` 111 | 112 | Passwords for all 6 built-in users will be randomly generated. Take note of them. 113 | 114 | 2. Unset the bootstrap password (_optional_) 115 | 116 | Remove the `ELASTIC_PASSWORD` environment variable from the `elasticsearch` service inside the Compose file 117 | (`docker-compose.yml`). It is only used to initialize the keystore during the initial startup of Elasticsearch. 118 | 119 | 3. Replace usernames and passwords in configuration files 120 | 121 | Use the `kibana` user inside the Kibana configuration file (`kibana/config/kibana.yml`) and the `logstash_system` user 122 | inside the Logstash configuration file (`logstash/config/logstash.yml`) in place of the existing `elastic` user. 123 | 124 | Replace the password for the `elastic` user inside the Logstash pipeline file (`logstash/pipeline/logstash.conf`). 125 | 126 | > :information_source: Do not use the `logstash_system` user inside the Logstash *pipeline* file, it does not have 127 | > sufficient permissions to create indices. Follow the instructions at [Configuring Security in Logstash][ls-security] 128 | > to create a user with suitable roles. 129 | 130 | See also the [Configuration](#configuration) section below. 131 | 132 | 4. Restart Kibana and Logstash to apply changes 133 | 134 | ```console 135 | $ docker-compose restart kibana logstash 136 | ``` 137 | 138 | > :information_source: Learn more about the security of the Elastic stack at [Tutorial: Getting started with 139 | > security][sec-tutorial]. 140 | 141 | ### Injecting data 142 | 143 | Give Kibana about a minute to initialize, then access the Kibana web UI by hitting 144 | [http://localhost:5601](http://localhost:5601) with a web browser and use the following default credentials to log in: 145 | 146 | * user: *elastic* 147 | * password: *\* 148 | 149 | Now that the stack is running, we can go ahead and inject some log entries. The shipped Logstash configuration allows 150 | us to send content via TCP: 151 | 152 | 153 | ```console 154 | # Using BSD netcat (Debian, Ubuntu, MacOS system, ...) 155 | $ cat /path/to/logfile.log | nc -q0 localhost 5000 156 | ``` 157 | 158 | ```console 159 | # Using GNU netcat (CentOS, Fedora, MacOS Homebrew, ...) 160 | $ cat /path/to/logfile.log | nc -c localhost 5000 161 | ``` 162 | 163 | We can also load the sample data provided by our Kibana installation. 164 | 165 | ### Default Kibana index pattern creation 166 | 167 | When Kibana launches for the first time, it is not configured with any index pattern. 168 | 169 | #### Via the Kibana web UI 170 | 171 | > :information_source: We need to inject data into Logstash before being able to configure a Logstash index pattern via 172 | the Kibana web UI. 173 | 174 | Navigate to the _Discover_ view of Kibana from the left sidebar. We will be prompted to create an index pattern. Enter 175 | `logstash-*` to match Logstash indices then, on the next page, select `@timestamp` as the time filter field. Finally, 176 | click _Create index pattern_ and return to the _Discover_ view to inspect our log entries. 177 | 178 | Refer to [Connect Kibana with Elasticsearch][connect-kibana] and [Creating an index pattern][index-pattern] for detailed 179 | instructions about the index pattern configuration. 180 | 181 | #### On the command line 182 | 183 | Create an index pattern via the Kibana API: 184 | 185 | ```console 186 | $ curl -XPOST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \ 187 | -H 'Content-Type: application/json' \ 188 | -H 'kbn-version: 7.6.2' \ 189 | -u elastic: \ 190 | -d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}' 191 | ``` 192 | 193 | The created pattern will automatically be marked as the default index pattern as soon as the Kibana UI is opened for the first time. 194 | 195 | ## Configuration 196 | 197 | > :information_source: Configuration is not dynamically reloaded, we will need to restart individual components after 198 | any configuration change. 199 | 200 | ### How to configure Elasticsearch 201 | 202 | The Elasticsearch configuration is stored in [`elasticsearch/config/elasticsearch.yml`][config-es]. 203 | 204 | We can also specify the options we want to override by setting environment variables inside the Compose file: 205 | 206 | ```yml 207 | elasticsearch: 208 | 209 | environment: 210 | network.host: _non_loopback_ 211 | cluster.name: my-cluster 212 | ``` 213 | 214 | Please refer to the following documentation page for more details about how to configure Elasticsearch inside Docker 215 | containers: [Install Elasticsearch with Docker][es-docker]. 216 | 217 | ### How to configure Kibana 218 | 219 | The Kibana default configuration is stored in [`kibana/config/kibana.yml`][config-kbn]. 220 | 221 | It is also possible to map the entire `config` directory instead of a single file. 222 | 223 | Please refer to the following documentation page for more details about how to configure Kibana inside Docker 224 | containers: [Running Kibana on Docker][kbn-docker]. 225 | 226 | ### How to configure Logstash 227 | 228 | The Logstash configuration is stored in [`logstash/config/logstash.yml`][config-ls]. 229 | 230 | It is also possible to map the entire `config` directory instead of a single file, however we must be aware that 231 | Logstash will be expecting a [`log4j2.properties`][log4j-props] file for its own logging. 232 | 233 | Please refer to the following documentation page for more details about how to configure Logstash inside Docker 234 | containers: [Configuring Logstash for Docker][ls-docker]. 235 | 236 | ### How to enable paid features 237 | 238 | Switch the value of Elasticsearch's `xpack.license.self_generated.type` option from `basic` to `trial`(see [License 239 | settings][trial-license]). 240 | 241 | ### How to scale out the Elasticsearch cluster 242 | 243 | Follow the instructions from the Wiki: [Scaling out Elasticsearch](https://github.com/deviantony/docker-elk/wiki/Elasticsearch-cluster) 244 | 245 | ## Extensibility 246 | 247 | ### How to add plugins 248 | 249 | To add plugins to any ELK component we have to: 250 | 251 | 1. Add a `RUN` statement to the corresponding `Dockerfile` (eg. `RUN logstash-plugin install logstash-filter-json`) 252 | 2. Add the associated plugin code configuration to the service configuration (eg. Logstash input/output) 253 | 3. Rebuild the images using the `docker-compose build` command 254 | 255 | ## JVM tuning 256 | 257 | ### How to specify the amount of memory used by a service 258 | 259 | By default, both Elasticsearch and Logstash start with [1/4 of the total host 260 | memory](https://docs.oracle.com/javase/8/docs/technotes/guides/vm/gctuning/parallel.html#default_heap_size) allocated to 261 | the JVM Heap Size. 262 | 263 | The startup scripts for Elasticsearch and Logstash can append extra JVM options from the value of an environment 264 | variable, allowing the user to adjust the amount of memory that can be used by each component: 265 | 266 | | Service | Environment variable | 267 | |---------------|----------------------| 268 | | Elasticsearch | ES_JAVA_OPTS | 269 | | Logstash | LS_JAVA_OPTS | 270 | 271 | To accomodate environments where memory is scarce (Docker for Mac has only 2 GB available by default), the Heap Size 272 | allocation is capped by default to 256MB per service in the `docker-compose.yml` file. If we want to override the 273 | default JVM configuration, edit the matching environment variable(s) in the `docker-compose.yml` file. 274 | 275 | For example, to increase the maximum JVM Heap Size for Logstash: 276 | 277 | ```yml 278 | logstash: 279 | 280 | environment: 281 | LS_JAVA_OPTS: -Xmx1g -Xms1g 282 | ``` 283 | 284 | ### How to enable a remote JMX connection to a service 285 | 286 | As for the Java Heap memory (see above), we can specify JVM options to enable JMX and map the JMX port on the Docker 287 | host. 288 | 289 | Update the `{ES,LS}_JAVA_OPTS` environment variable with the following content (I've mapped the JMX service on the port 290 | 18080, we can change that). Do not forget to update the `-Djava.rmi.server.hostname` option with the IP address of our 291 | Docker host (replace **DOCKER_HOST_IP**): 292 | 293 | ```yml 294 | logstash: 295 | 296 | environment: 297 | LS_JAVA_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=18080 -Dcom.sun.management.jmxremote.rmi.port=18080 -Djava.rmi.server.hostname=DOCKER_HOST_IP -Dcom.sun.management.jmxremote.local.only=false 298 | ``` 299 | 300 | ## Going further 301 | 302 | ### Plugins and integrations 303 | 304 | See the following Wiki pages: 305 | 306 | * [External applications](https://github.com/deviantony/docker-elk/wiki/External-applications) 307 | * [Popular integrations](https://github.com/deviantony/docker-elk/wiki/Popular-integrations) 308 | 309 | ### Swarm mode 310 | 311 | Experimental support for Docker [Swarm mode][swarm-mode] is provided in the form of a `docker-stack.yml` file, which can 312 | be deployed in an existing Swarm cluster using the following command: 313 | 314 | ```console 315 | $ docker stack deploy -c docker-stack.yml elk 316 | ``` 317 | 318 | If all components get deployed without any error, the following command will show 3 running services: 319 | 320 | ```console 321 | $ docker stack services elk 322 | ``` 323 | 324 | > :information_source: To scale Elasticsearch in Swarm mode, configure *zen* to use the DNS name `tasks.elasticsearch` 325 | instead of `elasticsearch`. 326 | 327 | 328 | [elk-stack]: https://www.elastic.co/elk-stack 329 | [stack-features]: https://www.elastic.co/products/stack 330 | [paid-features]: https://www.elastic.co/subscriptions 331 | [trial-license]: https://www.elastic.co/guide/en/elasticsearch/reference/current/license-settings.html 332 | 333 | [linux-postinstall]: https://docs.docker.com/install/linux/linux-postinstall/ 334 | 335 | [booststap-checks]: https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 336 | [es-sys-config]: https://www.elastic.co/guide/en/elasticsearch/reference/current/system-config.html 337 | 338 | [win-shareddrives]: https://docs.docker.com/docker-for-windows/#shared-drives 339 | [mac-mounts]: https://docs.docker.com/docker-for-mac/osxfs/ 340 | 341 | [builtin-users]: https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html 342 | [ls-security]: https://www.elastic.co/guide/en/logstash/current/ls-security.html 343 | [sec-tutorial]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-getting-started.html 344 | 345 | [connect-kibana]: https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html 346 | [index-pattern]: https://www.elastic.co/guide/en/kibana/current/index-patterns.html 347 | 348 | [config-es]: ./elasticsearch/config/elasticsearch.yml 349 | [config-kbn]: ./kibana/config/kibana.yml 350 | [config-ls]: ./logstash/config/logstash.yml 351 | 352 | [es-docker]: https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html 353 | [kbn-docker]: https://www.elastic.co/guide/en/kibana/current/docker.html 354 | [ls-docker]: https://www.elastic.co/guide/en/logstash/current/docker-config.html 355 | 356 | [log4j-props]: https://github.com/elastic/logstash/tree/7.6/docker/data/logstash/config 357 | [esuser]: https://github.com/elastic/elasticsearch/blob/7.6/distribution/docker/src/docker/Dockerfile#L23-L24 358 | 359 | [upgrade]: https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html 360 | 361 | [swarm-mode]: https://docs.docker.com/engine/swarm/ 362 | 363 | ## Note 364 | This repository is a forked/trimmed version of [docker-elk](https://github.com/deviantony/docker-elk) 365 | --------------------------------------------------------------------------------