├── .dockerignore
├── .flake8
├── .github
├── dependabot.yml
└── workflows
│ ├── automerge.yml
│ └── ci.yml
├── .gitignore
├── Caddyfile
├── Dockerfile
├── LICENSE
├── README.md
├── docker-compose.yml
├── epilog
├── __init__.py
└── emitter.py
├── filebeat.yml
├── makefile
├── pyproject.toml
├── requirements-dev.in
├── requirements-dev.txt
├── requirements.in
├── requirements.txt
├── scripts
├── health_check.sh
├── purge_logs.sh
└── wait_for.sh
└── tests
├── __init__.py
└── test_emitter.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.pyc
3 | *.pyo
4 | *.pyd
5 | .Python
6 | pip-log.txt
7 | pip-delete-this-directory.txt
8 | .tox
9 | .coverage
10 | .coverage.*
11 | .cache
12 | nosetests.xml
13 | coverage.xml
14 | *.cover
15 | *.log
16 | .git
17 | .mypy_cache
18 | .pytest_cache
19 | .hypothesis
20 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | extend-exclude =
3 | .git,
4 | __pycache__,
5 | docs/source/conf.py,
6 | old,
7 | build,
8 | dist,
9 | .venv,
10 | venv
11 |
12 | extend-ignore = E203, E266, E501, W605
13 |
14 | # Black's default line length.
15 | max-line-length = 88
16 |
17 | max-complexity = 18
18 |
19 | # Specify the list of error codes you wish Flake8 to report.
20 | select = B,C,E,F,W,T4,B9
21 |
22 | # Parallelism
23 | jobs = 4
24 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "pip" # See documentation for possible values
4 | directory: "/" # Location of package manifests
5 | schedule:
6 | interval: "daily"
7 |
8 | - package-ecosystem: "docker" # See documentation for possible values
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "daily"
12 |
--------------------------------------------------------------------------------
/.github/workflows/automerge.yml:
--------------------------------------------------------------------------------
1 | # .github/workflows/automerge.yml
2 |
3 | name: Dependabot auto-merge
4 |
5 | on: pull_request
6 |
7 | permissions:
8 | contents: write
9 |
10 | jobs:
11 | dependabot:
12 | runs-on: ubuntu-latest
13 | if: ${{ github.actor == 'dependabot[bot]' }}
14 | steps:
15 | - name: Enable auto-merge for Dependabot PRs
16 | run: gh pr merge --auto --merge "$PR_URL"
17 | env:
18 | PR_URL: ${{github.event.pull_request.html_url}}
19 | # GitHub provides this variable in the CI env. You don't
20 | # need to add anything to the secrets vault.
21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | build:
13 | runs-on: ${{ matrix.os }}
14 | strategy:
15 | matrix:
16 | os: [ubuntu-latest, macos-latest]
17 | python-version: ["3.8", "3.9", "3.10"]
18 |
19 | steps:
20 | - uses: actions/checkout@v2
21 |
22 | - name: Set up Python ${{ matrix.python-version }}
23 | uses: actions/setup-python@v2
24 | with:
25 | python-version: ${{ matrix.python-version }}
26 |
27 | - name: Install the test dependencies
28 | run: |
29 | echo "Upgrading pip ...."
30 | python -m pip install --upgrade pip
31 | python -m pip install -r requirements-dev.txt
32 |
33 | - name: Check linter
34 | run: |
35 | echo "Checking black formatting..."
36 | python3 -m black --check .
37 | echo "Checking isort formatting..."
38 | python3 -m isort --check .
39 | echo "Checking flake8 formatting..."
40 | python3 -m flake8 .
41 |
42 | - name: Run the tests
43 | run: |
44 | pytest -s -v
45 |
46 | # - name: Build containers
47 |
48 | # run: |
49 | # sudo sysctl -w vm.max_map_count=262144
50 | # sudo docker compose up -d
51 |
52 | # apt install jq
53 | # until docker inspect --format "{{json .State.Health }}" caddy | \
54 | # jq '.Status' | grep 'healthy'; do
55 |
56 | # >&2 echo "Elk stack is unhealthy - waiting..."
57 |
58 | # sleep 1
59 | # done
60 |
61 | # >&2 echo "Elk stack is healthy - proceeding..."
62 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | data
2 |
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 | cover/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
100 | __pypackages__/
101 |
102 | # Celery stuff
103 | celerybeat-schedule
104 | celerybeat.pid
105 |
106 | # SageMath parsed files
107 | *.sage.py
108 |
109 | # Environments
110 | .env
111 | .venv
112 | env/
113 | venv/
114 | ENV/
115 | env.bak/
116 | venv.bak/
117 |
118 | # Spyder project settings
119 | .spyderproject
120 | .spyproject
121 |
122 | # Rope project settings
123 | .ropeproject
124 |
125 | # mkdocs documentation
126 | /site
127 |
128 | # mypy
129 | .mypy_cache/
130 | .dmypy.json
131 | dmypy.json
132 |
133 | # Pyre type checker
134 | .pyre/
135 |
136 | # pytype static type analyzer
137 | .pytype/
138 |
139 | # Cython debug symbols
140 | cython_debug/
141 |
--------------------------------------------------------------------------------
/Caddyfile:
--------------------------------------------------------------------------------
1 | elasticsearch.localhost {
2 | reverse_proxy es01:9200
3 | encode zstd gzip
4 |
5 | }
6 |
7 | kibana.localhost {
8 | reverse_proxy kib01:5601
9 | encode zstd gzip
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.14.0b2-alpine
2 | ENV PYTHONUNBUFFERED=1
3 |
4 | RUN apk --no-cache add curl
5 |
6 | WORKDIR /code
7 | COPY /epilog /code/epilog
8 | COPY /scripts /code/scripts
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Epilog
4 |
5 |
6 |
>> Dead simple container log aggregation with ELK stack <<
7 |
8 |
9 |
10 |
11 | 
12 | 
13 | 
14 | 
15 | 
16 | ## Preface
17 |
18 | Epilog aims to demonstrate a language-agnostic, non-invasive, and straightforward way to add centralized logging to your stack. Centralized logging can be difficult depending on how much control you need over the log messages, how robust you need the logging system to be, and how you want to display the data to the consumer.
19 |
20 | ## Why?
21 |
22 | Invasive logging usually entails you having to build a logging pipeline and integrate that into your application. Adding an extensive logging workflow directly to your application is non-trivial for a few reasons:
23 |
24 | * The workflow becomes language-specific and hard to scale as your application gets decentralized over time and starts to take advantage of multiple languages.
25 |
26 | * The logging pipeline gets tightly coupled with the application code.
27 |
28 | * Extensive logging in a blocking manner can significantly hurt the performance of the application.
29 |
30 | * Doing logging in a non-blocking state is difficult and usually requires a non-trivial amount of application code changes when the logging requirements change.
31 |
32 | This repository lays out a dead-simple but extensible centralized logging workflow that collects logs from docker containers in a non-invasive manner. To achieve this, we've used the reliable ELK stack which is at this point, an industry standard.
33 |
34 |
35 | ## Features
36 |
37 | * Asynchronous log-aggregation pipeline that's completely decoupled from the app instances generating the logs.
38 |
39 | * Zero effect on performance if the app instances aren't doing expensive synchronous logging operations internally.
40 |
41 | * Horizontal scaling is achievable by adding more nodes to the Elasticsearch cluster.
42 |
43 | * To keep the storage requirements at bay, log messages are automatically deleted after 7 days. This is configurable.
44 |
45 | * Synchronization during container startup to reduce the number of missing logs.
46 |
47 | * All the Log messages can be filtered and queried interactively from a centralized location via the Kibana dashboard.
48 |
49 |
50 | ## Architecture
51 |
52 | This workflow leverages [Filebeat](https://www.elastic.co/beats/filebeat) to collect the logs, [Elasticsearch](https://www.elastic.co/elasticsearch/) to store and query the log messages, and [Kibana](https://www.elastic.co/kibana/) to visualize the data interactively. The following diagram explains how logs flow from your application containers and becomes queryable in the Kibana dashboards:
53 |
54 | 
55 |
56 | Here, the **Application** is a dockerized Python module that continuously sends log messages to the standard output.
57 |
58 | On a Unix machine, Docker containers save these log messages in the `/var/lib/docker/containers/*/*.log` directory. In this directory, **Filebeat** listens for new log messages and sends them to **Elasticsearch** in batches. This makes the entire logging workflow asynchronous as Filebeat isn't coupled with the application and is lightweight enough to be deployed with every instance of your application.
59 |
60 | The log consumer can make query requests via the **Kibana** dashboards and interactively search and filter the relevant log messages. The **Caddy** reverse proxy server is helpful during local development as you won't have to memorize the ports to access Elasticsearch and Kibana. You can also choose to use Caddy instead of Ngnix as a reverse proxy and load balancer in your production orchestration.
61 |
62 |
63 | ## Installation
64 |
65 | * Make sure you have [Docker](https://www.docker.com/), [Docker compose V2](https://docs.docker.com/compose/cli-command/) installed on your system.
66 |
67 | * Clone the repo.
68 |
69 | * Go to the root directory and run:
70 |
71 | ```
72 | make up
73 | ```
74 | This will spin up 2 Elasticsearch nodes, 1 Filebeat instance, 1 log emitting app instance, and the reverse proxy server.
75 |
76 | * To shut down everything gracefully, run:
77 |
78 | ```
79 | make down
80 | ```
81 |
82 | * To kill the container processes and clean up all the volumes, run:
83 |
84 | ```
85 | make kill && make clean
86 | ```
87 |
88 | ## Exploration
89 |
90 | Once you've run the `make up` command:
91 |
92 | * To access the Kibana dashboard, go to `https://kibana.localhost`. Since our reverse proxy adds SSL to the localhost, your browser will complain about the site being unsafe. Just ignore it and move past.
93 |
94 | * When prompted for credentials, use `elastic` as username and `debian` as password. You can configure this in the `.env` file.
95 |
96 | * Once you're inside the Kibana dashboard, head over to the Logs panel under the Observability section on the left panel.
97 |
98 | 
99 |
100 | * You can filter the logs by container name. Once you start typing `container.name` literally, Kibana will give you suggestions based on the names of the containers running on your machine.
101 |
102 |
103 | 
104 | )
105 |
106 | * Another filter you might want to explore is filtering by hostname. To do so, type `host.name` and it'll show the available host identifiers in a dropdown. In this case, all the containers live in the same host. So there's only one available host to filter by. These filters are defined in the `processors` segment of the `filebeat.yml` file. You can find a comprehensive list of `processors` [here](https://www.elastic.co/guide/en/beats/filebeat/current/defining-processors.html).
107 |
108 | 
109 |
110 |
111 | ## Maintenance & Extensibility
112 |
113 | * If you need log transformation, adding Logstash to this stack is quite easy. All you'll have to do is add a Logstash instance to the docker-compose.yml file and point Filebeat to send the logs to Logstash instead of Elasticsearch. Logstash will then transform the logs and save them in the Elasticsearch search cluster.
114 |
115 | * To scale up the Elasticsearch cluster, you can follow the configuration of `es02` node in the docker-compose file. More nodes can be added similarly to achieve horizontal scaling.
116 |
117 | * In a production setup, your app will most likely live in separate hosts than the Elasticsearch clusters. In that case, a Filebeat instance should live with every instance of the log generating app and these will send the logs to a centralized location—directly to Elasticsearch or first to Logstash and then to Elasticsearch clusters—depending on your need.
118 |
119 | ## Disclaimer
120 |
121 | * This pipleline was tested in a Unix-like system, mainly Ubuntu and macOS. Also, the bash scripts might not work out of the box on Windows.
122 |
123 | * This setup only employs a rudimentary password-based authentication system. You should add TLS encryption to your production ELK stack. [Here's](https://www.elastic.co/guide/en/elasticsearch/reference/current/configuring-tls-docker.html) an example of how you might be able to do so.
124 |
125 | * For demonstration purposes, this repository has `.env` file in the root directory. In your production application, you should never add the `.env` files to your version control system.
126 |
127 | ## Resources
128 |
129 | * [Elasticsearch: What It Is, How It Works, And What It’s Used For](https://www.knowi.com/blog/what-is-elastic-search/)
130 |
131 | * [ELK: Delete old logging data using the Index Lifecycle Management](http://blog.ehrnhoefer.com/2019-05-04-elasticsearch-index-lifecycle-management/)
132 |
133 |
134 |
135 | ✨ 🍰 ✨
136 |
137 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.9'
2 | services:
3 | es01:
4 | image: 'docker.elastic.co/elasticsearch/elasticsearch:7.15.0'
5 | container_name: es01
6 | environment:
7 | - node.name=es01
8 | - cluster.name=es-docker-cluster
9 | - discovery.seed_hosts=es02
10 | - 'cluster.initial_master_nodes=es01,es02'
11 | - bootstrap.memory_lock=true
12 | - ES_JAVA_OPTS=-Xms1024m -Xmx1024m
13 | - xpack.security.enabled=true
14 | - 'ELASTIC_PASSWORD=${ELASTICSEARCH_PASSWORD}'
15 | ulimits:
16 | memlock:
17 | soft: -1
18 | hard: -1
19 | volumes:
20 | - 'data_es01:/usr/share/elasticsearch/data'
21 | ports:
22 | - '9200:9200'
23 | networks:
24 | - elastic
25 |
26 |
27 | es02:
28 | image: 'docker.elastic.co/elasticsearch/elasticsearch:7.15.0'
29 | container_name: es02
30 | environment:
31 | - node.name=es02
32 | - cluster.name=es-docker-cluster
33 | - discovery.seed_hosts=es01
34 | - 'cluster.initial_master_nodes=es01,es02'
35 | - bootstrap.memory_lock=true
36 | - ES_JAVA_OPTS=-Xms1024m -Xmx1024m
37 | - xpack.security.enabled=true
38 | - 'ELASTIC_PASSWORD=${ELASTICSEARCH_PASSWORD}'
39 | ulimits:
40 | memlock:
41 | soft: -1
42 | hard: -1
43 | volumes:
44 | - 'data_es02:/usr/share/elasticsearch/data'
45 | networks:
46 | - elastic
47 |
48 |
49 | kib01:
50 | image: 'docker.elastic.co/kibana/kibana:7.15.0'
51 | container_name: kib01
52 | ports:
53 | - '5601:5601'
54 | environment:
55 | - 'ELASTICSEARCH_URL=http://es01:9200'
56 | - 'ELASTICSEARCH_USERNAME=${ELASTICSEARCH_USERNAME}'
57 | - 'ELASTICSEARCH_PASSWORD=${ELASTICSEARCH_PASSWORD}'
58 | - 'ELASTICSEARCH_HOSTS=["http://es01:9200","http://es02:9200"]'
59 | networks:
60 | - elastic
61 | depends_on:
62 | - es01
63 | - es02
64 |
65 |
66 | filebeat:
67 | image: 'docker.elastic.co/beats/filebeat:7.15.0'
68 | command:
69 | - '--strict.perms=false'
70 | user: root
71 | volumes:
72 | - './filebeat.yml:/usr/share/filebeat/filebeat.yml:ro'
73 | - '/var/lib/docker:/var/lib/docker:ro'
74 | - '/var/run/docker.sock:/var/run/docker.sock'
75 | env_file:
76 | - .env
77 | networks:
78 | - elastic
79 | depends_on:
80 | - elasticsearch
81 | - kibana
82 |
83 |
84 | app:
85 | build: .
86 | entrypoint:
87 | - /bin/sh
88 | - '-c'
89 | command:
90 | - |
91 | ./scripts/wait_for.sh
92 | ./scripts/purge_logs.sh
93 | python3.10 -m epilog.emitter
94 | ports:
95 | - '8001:8001'
96 | networks:
97 | - elastic
98 | env_file:
99 | - .env
100 | depends_on:
101 | - es01
102 | - es02
103 | - filebeat
104 | - kibana
105 |
106 |
107 | caddy:
108 | image: 'caddy:2-alpine'
109 | container_name: caddy
110 | env_file:
111 | - .env
112 | ports:
113 | - '80:80'
114 | - '443:443'
115 | volumes:
116 | - './Caddyfile:/etc/caddy/Caddyfile'
117 | - './data/caddy_data:/data'
118 | - './data/caddy_config:/config'
119 | - './scripts:/scripts'
120 | networks:
121 | - elastic
122 | healthcheck:
123 | test: ../scripts/health_check.sh
124 | interval: 20s
125 | timeout: 10s
126 | start_period: 10s
127 | retries: 3
128 | depends_on:
129 | - es0
130 | - es1
131 | - filebeat
132 | - kibana
133 | - app
134 |
135 |
136 | volumes:
137 | data_es01: null
138 | data_es02: null
139 |
140 |
141 | networks:
142 | elastic:
143 | driver: bridge
144 |
--------------------------------------------------------------------------------
/epilog/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logger = logging.getLogger("primary")
4 | logger.setLevel(logging.DEBUG)
5 | stream_handler = logging.StreamHandler()
6 | stream_handler.setLevel(logging.DEBUG)
7 | logger.addHandler(stream_handler)
8 | logger.propagate = True
9 |
--------------------------------------------------------------------------------
/epilog/emitter.py:
--------------------------------------------------------------------------------
1 | """Emits logs."""
2 |
3 | from __future__ import annotations
4 |
5 | import argparse
6 | import itertools
7 | import logging
8 | import time
9 | from collections.abc import Sequence
10 |
11 | # Behavior of this logger has been modified in the `epilog/__init__.py` file.
12 | logger = logging.getLogger("primary")
13 |
14 |
15 | def emit_log(limit: int | None = None) -> None:
16 | """Emits 4 levels of log messages."""
17 |
18 | batch_cnts = itertools.count(start=1)
19 |
20 | for batch_cnt in batch_cnts:
21 | logger.debug(f"batch {batch_cnt}: This is a debug log message.")
22 | time.sleep(1)
23 |
24 | logger.info(f"batch {batch_cnt}: This is a info log message.")
25 | time.sleep(1)
26 |
27 | logger.warning(f"batch {batch_cnt}: This is a warning log message.")
28 | time.sleep(1)
29 |
30 | logger.error(f"batch {batch_cnt}: This is a error log message.")
31 | time.sleep(1)
32 |
33 | if limit and limit == batch_cnt:
34 | break
35 |
36 |
37 | def cli(argv: Sequence[str] | None = None) -> argparse.Namespace:
38 | """
39 | The 'emit_log' function runs indefinitely. This CLI allows us to
40 | limit the run.
41 |
42 | """
43 | parser = argparse.ArgumentParser("Simple log emitter...")
44 | parser.add_argument(
45 | "-l",
46 | "--limit",
47 | default=None,
48 | nargs=1,
49 | help="Limit log after n iterations.",
50 | type=int,
51 | )
52 | args = parser.parse_args(args=argv) # Adding argv increases testability.
53 | return args
54 |
55 |
56 | if __name__ == "__main__":
57 | args = cli()
58 | emit_log(limit=args.limit.pop() if args.limit else None)
59 |
--------------------------------------------------------------------------------
/filebeat.yml:
--------------------------------------------------------------------------------
1 | filebeat.inputs:
2 | - type: container
3 | paths:
4 | - '/var/lib/docker/containers/*/*.log'
5 |
6 | processors:
7 | - add_docker_metadata:
8 | host: "unix:///var/run/docker.sock" # Filebeat communicates via unix socket.
9 | match_fields: ["system.process.cgroup.id"]
10 | match_pids: ["process.pid", "process.ppid"]
11 | match_source: true
12 | match_source_index: 4
13 | match_short_id: true
14 | cleanup_timeout: 60
15 | labels.dedot: false
16 |
17 | - add_host_metadata:
18 | cache.ttl: 5m
19 |
20 | - decode_json_fields:
21 | fields: ["message"]
22 | target: "json"
23 | overwrite_keys: true
24 |
25 | output.elasticsearch:
26 | hosts: ["es01:9200"] # Remote host can be : "https://myEShost:9200".
27 | username: ${ELASTICSEARCH_USERNAME}
28 | password: ${ELASTICSEARCH_PASSWORD}
29 |
30 | indices:
31 | - index: "filebeat-%{[agent.version]}-%{+yyyy.MM.dd}"
32 |
33 | logging.json: true
34 | logging.metrics.enabled: false
35 |
--------------------------------------------------------------------------------
/makefile:
--------------------------------------------------------------------------------
1 | path := .
2 |
3 | define Comment
4 | - Run `make help` to see all the available options.
5 | - Run `make lint` to run the linter.
6 | - Run `make lint-check` to check linter conformity.
7 | - Run `dep-lock` to lock the deps in 'requirements.txt' and 'requirements-dev.txt'.
8 | - Run `dep-sync` to sync current environment up to date with the locked deps.
9 | endef
10 |
11 |
12 | .PHONY: lint
13 | lint: black isort flake mypy ## Apply all the linters.
14 |
15 |
16 | .PHONY: lint-check
17 | lint-check: ## Check whether the codebase satisfies the linter rules.
18 | @echo
19 | @echo "Checking linter rules..."
20 | @echo "========================"
21 | @echo
22 | @black --check $(path)
23 | @isort --check $(path)
24 | @flake8 $(path)
25 | @mypy $(path)
26 |
27 |
28 | .PHONY: black
29 | black: ## Apply black.
30 | @echo
31 | @echo "Applying black..."
32 | @echo "================="
33 | @echo
34 | @black --fast $(path)
35 | @echo
36 |
37 |
38 | .PHONY: isort
39 | isort: ## Apply isort.
40 | @echo "Applying isort..."
41 | @echo "================="
42 | @echo
43 | @isort $(path)
44 |
45 |
46 | .PHONY: flake
47 | flake: ## Apply flake8.
48 | @echo
49 | @echo "Applying flake8..."
50 | @echo "================="
51 | @echo
52 | @flake8 $(path)
53 |
54 |
55 | .PHONY: mypy
56 | mypy: ## Apply mypy.
57 | @echo
58 | @echo "Applying mypy..."
59 | @echo "================="
60 | @echo
61 | @mypy $(path)
62 |
63 |
64 | .PHONY: help
65 | help: ## Show this help message.
66 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
67 |
68 |
69 | .PHONY: test
70 | test: ## Run the tests against the current version of Python.
71 | pytest -v -s
72 |
73 |
74 | .PHONY: dep-lock
75 | dep-lock: ## Freeze deps in 'requirements.txt' file.
76 | @pip-compile requirements.in -o requirements.txt
77 | @pip-compile requirements-dev.in -o requirements-dev.txt
78 |
79 |
80 | .PHONY: dep-sync
81 | dep-sync: ## Sync venv installation with 'requirements.txt' file.
82 | @pip-sync
83 |
84 |
85 | .PHONY: up
86 | up: ## Start all the containers.
87 | @sudo sysctl -w vm.max_map_count=262144 && docker compose up -d
88 |
89 | .PHONY: down
90 | down: ## Stop all the containers gracefully.
91 | @docker compose down
92 |
93 | .PHONY: kill
94 | kill: ## Kill all the containers.
95 | @docker compose kill
96 |
97 | .PHONY: clean
98 | clean: ## Clean up all the container artifacts.
99 | @docker system prune -f && docker volume prune -f && sudo rm -rf ./data
100 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # Linter configuruation.
2 | [tool.isort]
3 | profile = "black"
4 | atomic = true
5 | extend_skip_glob = "migrations,scripts"
6 | line_length = 88
7 |
8 |
9 | [tool.black]
10 | extend-exclude = "migrations,scripts"
11 |
12 |
13 | [tool.mypy]
14 | follow_imports = "skip"
15 | ignore_missing_imports = true
16 | warn_no_return = false
17 | warn_unused_ignores = true
18 | allow_untyped_globals = true
19 | allow_redefinition = true
20 | pretty = true
21 |
22 |
23 | [[tool.mypy.overrides]]
24 | module = "tests.*"
25 | ignore_errors = true
26 |
--------------------------------------------------------------------------------
/requirements-dev.in:
--------------------------------------------------------------------------------
1 | black
2 | isort
3 | flake8
4 | mypy
5 | pip-tools
6 | pytest
7 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with python 3.10
3 | # To update, run:
4 | #
5 | # pip-compile --output-file=requirements-dev.txt requirements-dev.in
6 | #
7 | black==25.1.0
8 | # via -r requirements-dev.in
9 | build==1.0.3
10 | # via pip-tools
11 | click==8.0.1
12 | # via
13 | # black
14 | # pip-tools
15 | exceptiongroup==1.1.3
16 | # via pytest
17 | flake8==7.2.0
18 | # via -r requirements-dev.in
19 | iniconfig==1.1.1
20 | # via pytest
21 | isort==6.0.1
22 | # via -r requirements-dev.in
23 | mccabe==0.7.0
24 | # via flake8
25 | mypy==1.16.0
26 | # via -r requirements-dev.in
27 | mypy-extensions==1.0.0
28 | # via
29 | # black
30 | # mypy
31 | packaging==23.0
32 | # via
33 | # black
34 | # build
35 | # pytest
36 | pathspec==0.9.0
37 | # via
38 | # black
39 | # mypy
40 | pip-tools==7.4.1
41 | # via -r requirements-dev.in
42 | platformdirs==2.3.0
43 | # via black
44 | pluggy==1.5.0
45 | # via pytest
46 | pycodestyle==2.13.0
47 | # via flake8
48 | pyflakes==3.3.1
49 | # via flake8
50 | pygments==2.19.1
51 | # via pytest
52 | pyproject-hooks==1.0.0
53 | # via
54 | # build
55 | # pip-tools
56 | pytest==8.4.0
57 | # via -r requirements-dev.in
58 | tomli==1.2.1
59 | # via
60 | # black
61 | # build
62 | # mypy
63 | # pip-tools
64 | # pyproject-hooks
65 | # pytest
66 | typing-extensions==4.6.3
67 | # via
68 | # black
69 | # mypy
70 | wheel==0.37.0
71 | # via pip-tools
72 |
73 | # The following packages are considered to be unsafe in a requirements file:
74 | # pip
75 | # setuptools
76 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/epilog/0322b1b536e43baa99dcef555fd350677f2b4d47/requirements.in
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with python 3.10
3 | # To update, run:
4 | #
5 | # pip-compile --output-file=requirements.txt requirements.in
6 | #
7 |
--------------------------------------------------------------------------------
/scripts/health_check.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | # Load environment variables from .env file.
6 | if [ -f .env ]
7 | then
8 | export $(cat .env | sed 's/#.*//g' | xargs)
9 | fi
10 |
11 | apk add curl
12 | curl --fail http://es01:9200 \
13 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} || exit 1
14 |
15 | curl -fail http://kib01:5601/api/features \
16 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} || exit 1
17 |
--------------------------------------------------------------------------------
/scripts/purge_logs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | if [ -f .env ]
6 | then
7 | export $(cat .env | sed 's/#.*//g' | xargs)
8 | fi
9 |
10 |
11 | curl -X PUT "http://es01:9200/_ilm/policy/cleanup_policy?pretty" \
12 | -H 'Content-Type: application/json' \
13 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} \
14 | -d '{
15 | "policy": {
16 | "phases": {
17 | "hot": {
18 | "actions": {}
19 | },
20 | "delete": {
21 | "min_age": "'"${PURGE_AFTER}"'",
22 | "actions": { "delete": {} }
23 | }
24 | }
25 | }
26 | }'
27 |
28 |
29 | curl -X PUT "http://es01:9200/${INDEX_PREFIX}-*/_settings?pretty" \
30 | -H 'Content-Type: application/json' \
31 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} \
32 | -d '{ "lifecycle.name": "cleanup_policy" }'
33 |
34 |
35 |
36 | curl -X PUT "http://es01:9200/_template/logging_policy_template?pretty" \
37 | -H 'Content-Type: application/json' \
38 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} \
39 | -d '{
40 | "index_patterns": ["${INDEX_PREFIX}-*"],
41 | "settings": { "index.lifecycle.name": "cleanup_policy" }
42 | }'
43 |
44 |
45 | curl -X POST "http://es01:9200/${INDEX_PREFIX}-*/_ilm/remove?pretty"\
46 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}
47 |
48 |
49 | curl -X PUT "http://es01:9200/${INDEX_PREFIX}-*/_settings?pretty" \
50 | -H 'Content-Type: application/json' \
51 | -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD} \
52 | -d '{ "lifecycle.name": "cleanup_policy" }'
53 |
--------------------------------------------------------------------------------
/scripts/wait_for.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | # Load environment variables from .env file.
6 | if [ -f .env ]
7 | then
8 | export $(cat .env | sed 's/#.*//g' | xargs)
9 | fi
10 |
11 |
12 | wait_for () {
13 | until \
14 | curl -X GET $1 -u ${ELASTICSEARCH_USERNAME}:${ELASTICSEARCH_PASSWORD}\
15 | -s -o /dev/null --head -w "%{http_code}" | grep 200 > /dev/null; do
16 |
17 | >&2 echo "$2 is unavailable - waiting..."
18 |
19 | sleep 1
20 | done
21 |
22 | >&2 echo "$2 is up - proceeding..."
23 |
24 | }
25 |
26 | wait_for http://es01:9200 Elasticsearch
27 |
28 | wait_for http://kib01:5601/api/features Kibana
29 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/epilog/0322b1b536e43baa99dcef555fd350677f2b4d47/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_emitter.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from unittest.mock import patch
3 |
4 | from epilog import emitter
5 |
6 |
7 | def test_logger(caplog):
8 | with caplog.at_level(logging.DEBUG):
9 | emitter.logger.debug("Test debug message.")
10 | emitter.logger.info("Test info message.")
11 | emitter.logger.warning("Test warning message.")
12 | emitter.logger.error("Test error message.")
13 |
14 | assert "debug" in caplog.text
15 | assert "info" in caplog.text
16 | assert "warning" in caplog.text
17 | assert "error" in caplog.text
18 |
19 |
20 | @patch("epilog.emitter.time.sleep", autospec=True)
21 | def test_emitter(mock_sleep, caplog):
22 | mock_sleep.return_value = None
23 |
24 | with caplog.at_level(logging.DEBUG):
25 | emitter.emit_log(limit=1)
26 |
27 | assert "debug" in caplog.text
28 | assert "info" in caplog.text
29 | assert "warning" in caplog.text
30 | assert "error" in caplog.text
31 |
32 |
33 | def test_cli():
34 | args = emitter.cli(["-l 1"])
35 | assert args.limit.pop() == 1
36 |
--------------------------------------------------------------------------------