├── .gitignore
├── elastic
├── elasticsearch.yaml
├── kibana.yaml
├── logstash.yaml
└── logging.conf
├── requests
├── api.http
└── http-client.env.json
├── views
├── index.jade
├── error.jade
└── layout.jade
├── public
└── stylesheets
│ └── style.css
├── .idea
├── vcs.xml
├── .gitignore
├── modules.xml
└── loggingExample.iml
├── models
└── User.js
├── data
├── db.js
├── seeds
│ └── 1_users.js
└── migrations
│ └── 20210705221247_users.js
├── services
└── users.js
├── routes
├── index.js
└── users.js
├── Dockerfile
├── knexfile.js
├── package.json
├── Makefile
├── app.js
├── README.md
├── docker-compose.yaml
├── bin
└── www
└── helpers
└── logging.js
/.gitignore:
--------------------------------------------------------------------------------
1 | **/node_modules
2 | **/.DS_Store
3 |
--------------------------------------------------------------------------------
/elastic/elasticsearch.yaml:
--------------------------------------------------------------------------------
1 | cluster.name: "docker-cluster"
2 | network.host: 0.0.0.0
3 |
--------------------------------------------------------------------------------
/requests/api.http:
--------------------------------------------------------------------------------
1 | ## Users endpoint
2 | GET {{host}}/users
3 | Content-Type: application/json
4 |
--------------------------------------------------------------------------------
/requests/http-client.env.json:
--------------------------------------------------------------------------------
1 | {
2 | "dev": {
3 | "host": "http://localhost:3000/"
4 | }
5 | }
--------------------------------------------------------------------------------
/views/index.jade:
--------------------------------------------------------------------------------
1 | extends layout
2 |
3 | block content
4 | h1= title
5 | p Welcome to #{title}
6 |
--------------------------------------------------------------------------------
/elastic/kibana.yaml:
--------------------------------------------------------------------------------
1 | elasticsearch:
2 | hosts:
3 | - http://eslogs:9200
4 | server:
5 | host: 0.0.0.0
6 |
--------------------------------------------------------------------------------
/views/error.jade:
--------------------------------------------------------------------------------
1 | extends layout
2 |
3 | block content
4 | h1= message
5 | h2= error.status
6 | pre #{error.stack}
7 |
--------------------------------------------------------------------------------
/public/stylesheets/style.css:
--------------------------------------------------------------------------------
1 | body {
2 | padding: 50px;
3 | font: 14px "Lucida Grande", Helvetica, Arial, sans-serif;
4 | }
5 |
6 | a {
7 | color: #00B7FF;
8 | }
9 |
--------------------------------------------------------------------------------
/views/layout.jade:
--------------------------------------------------------------------------------
1 | doctype html
2 | html
3 | head
4 | title= title
5 | link(rel='stylesheet', href='/stylesheets/style.css')
6 | body
7 | block content
8 |
--------------------------------------------------------------------------------
/elastic/logstash.yaml:
--------------------------------------------------------------------------------
1 | http:
2 | host: "0.0.0.0"
3 | xpack:
4 | monitoring:
5 | enabled: true
6 | elasticsearch:
7 | hosts:
8 | - http://eslogs:9200
9 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/models/User.js:
--------------------------------------------------------------------------------
1 | const {Model} = require('objection')
2 |
3 | class User extends Model {
4 | id
5 | name
6 | email
7 |
8 | static tableName = 'users'
9 | }
10 |
11 | module.exports = User
12 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Datasource local storage ignored files
5 | /dataSources/
6 | /dataSources.local.xml
7 | # Editor-based HTTP Client requests
8 | /httpRequests/
9 |
--------------------------------------------------------------------------------
/data/db.js:
--------------------------------------------------------------------------------
1 | const knex = require('knex')
2 | const knexfile = require('../knexfile')
3 |
4 | const env = process.env.NODE_ENV || 'development'
5 | const configOptions = knexfile[env]
6 |
7 | module.exports = knex(configOptions)
8 |
--------------------------------------------------------------------------------
/data/seeds/1_users.js:
--------------------------------------------------------------------------------
1 | exports.seed = async function seed(knex) {
2 | await knex('users').insert([
3 | {name: 'Johnny Testalot', email: 'jtestalot@test.com'},
4 | {name: 'Harold J. Droptable', email: 'hdroptable@sql.org'},
5 | {name: 'Sheldon Dinkleberg', email: 'sdinkleberg@aol.com'},
6 | ])
7 | }
--------------------------------------------------------------------------------
/services/users.js:
--------------------------------------------------------------------------------
1 | const logger = require('../helpers/logging')
2 | const Users = require('../models/User')
3 |
4 | async function getUsers() {
5 | logger.info('Fetching users from database')
6 | const users = await Users.query()
7 | return users
8 | }
9 |
10 | module.exports = {getUsers}
11 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/routes/index.js:
--------------------------------------------------------------------------------
1 | var express = require('express');
2 | var router = express.Router();
3 | const logger = require('../helpers/logging')
4 |
5 | /* GET home page. */
6 | router.get('/', function(req, res, next) {
7 | logger.info('log from root route')
8 | res.render('index', { title: 'Express' });
9 | });
10 |
11 | module.exports = router;
12 |
--------------------------------------------------------------------------------
/data/migrations/20210705221247_users.js:
--------------------------------------------------------------------------------
1 | exports.up = knex => {
2 | return knex.schema
3 | .createTable('users', table => {
4 | table.increments('id').primary()
5 | table.string('name')
6 | table.string('email')
7 | })
8 | }
9 |
10 | exports.down = knex => {
11 | return knex.schema
12 | .dropTable('users')
13 | }
14 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:16-alpine
2 |
3 | RUN apk update && apk upgrade && apk add bash make git python3
4 |
5 | # Set working directory
6 | WORKDIR /app
7 |
8 | # Add `/app/node_modules/bin` to $PATH
9 | ENV PATH /app/node_modules/.bin:$PATH
10 |
11 | # Install app dependencies
12 | COPY package*.json ./
13 | RUN npm install
14 |
15 | EXPOSE 9229
16 | EXPOSE 3000
17 |
18 | # Fire it up!
19 | CMD ["npm", "run", "start"]
20 |
--------------------------------------------------------------------------------
/routes/users.js:
--------------------------------------------------------------------------------
1 | var express = require('express');
2 | var router = express.Router();
3 | const logger = require('../helpers/logging')
4 | const {getUsers} = require('../services/users')
5 |
6 | /* GET users listing. */
7 | router.get('/', async function(req, res, next) {
8 | logger.info('log from user endpoint', {with: 'some', extra: 'data'})
9 | const users = await getUsers()
10 | res.send(users)
11 | });
12 |
13 | module.exports = router;
14 |
--------------------------------------------------------------------------------
/knexfile.js:
--------------------------------------------------------------------------------
1 | // knex db connection info: create additional configs (test, staging, prod) here
2 | module.exports = {
3 | dev: {
4 | client: 'pg',
5 | connection: {
6 | host: 'db',
7 | database: 'users',
8 | user: 'postgres',
9 | password: 'password',
10 | },
11 | pool: {
12 | min: 2,
13 | max: 10
14 | },
15 | migrations: {
16 | directory: './data/migrations',
17 | },
18 | seeds: {
19 | directory: './data/seeds',
20 | }
21 | }
22 | }
--------------------------------------------------------------------------------
/.idea/loggingExample.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "loggingexample",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "start": "node ./bin/www",
7 | "dev": "./node_modules/.bin/nodemon --inspect=0.0.0.0:9229 ./bin/www"
8 | },
9 | "dependencies": {
10 | "cookie-parser": "~1.4.4",
11 | "debug": "~2.6.9",
12 | "express": "~4.16.1",
13 | "http-errors": "~1.6.3",
14 | "jade": "^1.9.2",
15 | "knex": "^0.21.19",
16 | "morgan": "~1.9.1",
17 | "objection": "^2.2.15",
18 | "pg": "^8.6.0",
19 | "winston": "^3.3.3",
20 | "winston-logstash-transport": "^2.0.0"
21 | },
22 | "devDependencies": {
23 | "nodemon": "^2.0.9"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/elastic/logging.conf:
--------------------------------------------------------------------------------
1 | input {
2 | udp {
3 | id => "express_api"
4 | port => 1514
5 | codec => "json"
6 | type => "express_api"
7 | }
8 | }
9 |
10 | filter {
11 | if [level] == "http" {
12 | grok {
13 | match => { "message" => "%{COMBINEDAPACHELOG}" }
14 | }
15 | date {
16 | match => [ "timestamp", "dd/MMM/YYYY:H:m:s Z" ]
17 | remove_field => "timestamp"
18 | }
19 | useragent {
20 | source => "agent"
21 | target => "user_agent"
22 | remove_field => "agent"
23 | }
24 | }
25 | }
26 |
27 | output {
28 | elasticsearch {
29 | hosts => ["eslogs:9200"]
30 | }
31 | stdout {
32 | codec => rubydebug
33 | }
34 | }
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ## up: bring the Express server and logging environment online using docker compose
2 | .PHONY: up
3 | up:
4 | @echo '🔥 bringing environment up'
5 | docker-compose up -d --build
6 |
7 | ## down: shutdown the environment but persist data for next time
8 | .PHONY: down
9 | down:
10 | @echo '🛑 shutting down environment, data is preserved'
11 | docker-compose down
12 |
13 | ## destroy: shutdown the environment and remove all saved data
14 | .PHONY: destroy
15 | destroy:
16 | @echo '💣 shutting down environment and destroying data'
17 | docker-compose down -v
18 |
19 | ## migrate: migrate the database to the latest schema
20 | .PHONY: migrate
21 | migrate:
22 | @echo '🛠 migrating database schema'
23 | docker exec expressjs ./node_modules/.bin/knex migrate:latest
24 |
25 | ## rollback_schema: rollback the database schema
26 | .PHONY: rollback_schema
27 | rollback_schema:
28 | @echo '🛠 rolling back db schema'
29 | docker exec expressjs ./node_modules/.bin/knex migrate:rollback
30 |
31 | ## seed: seed the database with data for development
32 | .PHONY: seed
33 | seed:
34 | @echo '🌱 seeding database with development data'
35 | docker exec expressjs ./node_modules/.bin/knex seed:run
36 |
--------------------------------------------------------------------------------
/app.js:
--------------------------------------------------------------------------------
1 | var createError = require('http-errors');
2 | var express = require('express');
3 | var path = require('path');
4 | var cookieParser = require('cookie-parser');
5 | const logger = require('./helpers/logging')
6 | const knex = require('./data/db')
7 | const {Model} = require('objection')
8 | var indexRouter = require('./routes/index');
9 | var usersRouter = require('./routes/users');
10 |
11 | logger.info('Connecting to database...')
12 | Model.knex(knex)
13 | var app = express();
14 |
15 | // view engine setup
16 | app.set('views', path.join(__dirname, 'views'));
17 | app.set('view engine', 'jade');
18 |
19 | app.use(require('morgan')('combined', {'stream': logger.stream}))
20 | app.use(express.json());
21 | app.use(express.urlencoded({ extended: false }));
22 | app.use(cookieParser());
23 | app.use(express.static(path.join(__dirname, 'public')));
24 |
25 | app.use('/', indexRouter);
26 | app.use('/users', usersRouter);
27 |
28 | // catch 404 and forward to error handler
29 | app.use(function(req, res, next) {
30 | next(createError(404));
31 | });
32 |
33 | // error handler
34 | app.use(function(err, req, res, next) {
35 | // set locals, only providing error in development
36 | res.locals.message = err.message;
37 | res.locals.error = req.app.get('env') === 'development' ? err : {};
38 |
39 | // render the error page
40 | res.status(err.status || 500);
41 | res.render('error');
42 | });
43 |
44 | module.exports = app;
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Logging NodeJS with the ELK Stack
2 |
3 | This repo serves as a functioning proof-of-concept for the video [Logging Nodejs apps with ELK: Elasticsearch, Logstash, and Kibana](https://www.youtube.com/watch?v=nnpcTyHZvS8).
4 |
5 | # Requirements
6 |
7 | You'll need docker desktop to run the example.
8 |
9 | # Usage
10 |
11 | ## Launch the environment
12 |
13 | Type `make up` to bring the docker-compose environment up. This will launch the ExpressJS application server, Elasticsearch,
14 | Logstash, and Kibana.
15 |
16 | ## Seeding the database environment
17 |
18 | Use `make migrate` to create the database schema (required for the `/users` endpoint) followed by `make seed` to populate
19 | the database with test data.
20 |
21 | ## Generate some logs
22 |
23 | Once the environment is running, hit `http://localhost:3000` and `http://localhost:3000/users` to generate some logs.
24 |
25 | The logs are sent to logstash using the winston-logstash-transport in `./helpers/logging.js`. Logstash is configured to send
26 | the logs to Elasticsearch using the config found in `elastic/logging.conf`.
27 |
28 | Launch [Kibana](http://localhost:5601) and follow the instructions to setup the logstash index pattern. You can then view the logs
29 | using Kibana Discovery.
30 |
31 | The ExpressJS `/` endpoint simply logs an info message. The `/users` endpoint logs an info message but also some key/value
32 | pairs that can be visualized in Kibana (and optionally reported/monitored/put on a dashboard/etc...)
33 |
34 | Happy coding!
35 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | # NodeJS/ExpressJS server
4 | express_server:
5 | container_name: expressjs
6 | build:
7 | context: ./
8 | dockerfile: Dockerfile
9 | volumes:
10 | - './:/app'
11 | - '/app/node_modules'
12 | ports:
13 | - 3000:3000
14 | - 9229:9229
15 | environment:
16 | - NODE_ENV=dev
17 | entrypoint: ["npm", "run", "dev"]
18 |
19 | # Elasticsearch
20 | eslogs:
21 | image: elasticsearch:7.8.0
22 | container_name: eslogs
23 | volumes:
24 | - elasticsearch_data:/usr/share/elasticsearch/data
25 | - ./elastic/elasticsearch.yaml:/usr/share/elasticsearch/config/elasticsearch.yml
26 | ports:
27 | - 9200:9200
28 | - 9300:9300
29 | environment:
30 | discovery.type: single-node
31 |
32 | # Kibana. Gotta have a pretty UI, amiright?
33 | kibana_demo:
34 | image: docker.elastic.co/kibana/kibana:7.8.0
35 | container_name: kibana_demo
36 | volumes:
37 | - ./elastic/kibana.yaml:/usr/share/kibana/config/kibana.yml
38 | ports:
39 | - 5601:5601
40 |
41 | # All hail the mighty, mighty LOOOOOOOGSTAAAAAAAAASH:
42 | logstash_demo:
43 | image: logstash:7.8.1
44 | container_name: logstash_demo
45 | volumes:
46 | - ./elastic/logging.conf:/usr/share/logstash/pipeline/logging.conf
47 | - ./elastic/logstash.yaml:/usr/share/logstash/config/logstash.yml
48 | ports:
49 | - 1514:1514/udp
50 | - 1515:1515
51 | - 1515:1515/udp
52 |
53 | # Postgres DB for saving stuff
54 | db:
55 | image: postgres:latest
56 | environment:
57 | POSTGRES_USER: postgres
58 | POSTGRES_PASSWORD: password
59 | POSTGRES_DB: users
60 | volumes:
61 | - api_db_data:/var/lib/postgresql/data
62 | ports:
63 | - 5432:5432
64 |
65 | volumes:
66 | elasticsearch_data:
67 | driver: local
68 | api_db_data:
69 | driver: local
70 |
--------------------------------------------------------------------------------
/bin/www:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Module dependencies.
5 | */
6 |
7 | var app = require('../app');
8 | var debug = require('debug')('loggingexample:server');
9 | var http = require('http');
10 |
11 | /**
12 | * Get port from environment and store in Express.
13 | */
14 |
15 | var port = normalizePort(process.env.PORT || '3000');
16 | app.set('port', port);
17 |
18 | /**
19 | * Create HTTP server.
20 | */
21 |
22 | var server = http.createServer(app);
23 |
24 | /**
25 | * Listen on provided port, on all network interfaces.
26 | */
27 |
28 | server.listen(port);
29 | server.on('error', onError);
30 | server.on('listening', onListening);
31 |
32 | /**
33 | * Normalize a port into a number, string, or false.
34 | */
35 |
36 | function normalizePort(val) {
37 | var port = parseInt(val, 10);
38 |
39 | if (isNaN(port)) {
40 | // named pipe
41 | return val;
42 | }
43 |
44 | if (port >= 0) {
45 | // port number
46 | return port;
47 | }
48 |
49 | return false;
50 | }
51 |
52 | /**
53 | * Event listener for HTTP server "error" event.
54 | */
55 |
56 | function onError(error) {
57 | if (error.syscall !== 'listen') {
58 | throw error;
59 | }
60 |
61 | var bind = typeof port === 'string'
62 | ? 'Pipe ' + port
63 | : 'Port ' + port;
64 |
65 | // handle specific listen errors with friendly messages
66 | switch (error.code) {
67 | case 'EACCES':
68 | console.error(bind + ' requires elevated privileges');
69 | process.exit(1);
70 | break;
71 | case 'EADDRINUSE':
72 | console.error(bind + ' is already in use');
73 | process.exit(1);
74 | break;
75 | default:
76 | throw error;
77 | }
78 | }
79 |
80 | /**
81 | * Event listener for HTTP server "listening" event.
82 | */
83 |
84 | function onListening() {
85 | var addr = server.address();
86 | var bind = typeof addr === 'string'
87 | ? 'pipe ' + addr
88 | : 'port ' + addr.port;
89 | debug('Listening on ' + bind);
90 | }
91 |
--------------------------------------------------------------------------------
/helpers/logging.js:
--------------------------------------------------------------------------------
1 | const winston = require('winston')
2 | const logstash = require('winston-logstash-transport')
3 |
4 | const MESSAGE = Symbol.for('message')
5 | const LEVEL = Symbol.for('level')
6 |
7 | const errorToLog = log => {
8 | // convert an instance of the Error class to a formatted log
9 | const formatted = {
10 | message: null,
11 | level: 'error',
12 | }
13 | formatted[LEVEL] = 'error'
14 | if (log.message) {
15 | formatted.message = `${log.message}: ${log.stack}`
16 | } else {
17 | formatted.message = log.stack
18 | }
19 | return formatted
20 | }
21 |
22 | const errorFormatter = logEntry => {
23 | if (logEntry instanceof Error) {
24 | // an error object was passed in
25 | return errorToLog(logEntry)
26 | }
27 | if (logEntry.stack) {
28 | // an error object was passed in addition to an error message
29 | logEntry.message = `${logEntry.message}: ${logEntry.stack}`
30 | }
31 | if (logEntry.message && (typeof(logEntry.message)) === 'object') {
32 | if (logEntry.message?.err instanceof Error) {
33 | // Ugh. So here we are with a log message that is an instance of the Error class
34 | return errorToLog(logEntry.message.err)
35 | } else {
36 | // here we have an object as the log message but it's not an Error object
37 | logEntry.message = JSON.stringify(logEntry.message)
38 | }
39 | }
40 | return logEntry
41 | }
42 |
43 | const consoleTransport = new winston.transports.Console({
44 | format: winston.format.combine(
45 | winston.format.colorize(),
46 | winston.format.cli({
47 | colors: {
48 | error: 'red',
49 | warn: 'yellow',
50 | info: 'blue',
51 | http: 'green',
52 | verbose: 'cyan',
53 | debug: 'white'
54 | }
55 | }),
56 | ),
57 | handleExceptions: true
58 | })
59 | const logstashTransport = new logstash.LogstashTransport({
60 | host: 'logstash_demo',
61 | port: 1514
62 | })
63 |
64 | const envTag = (logEntry) => {
65 | const tag = {
66 | env: process.env.APPLICATION_ENV || 'local'
67 | }
68 | const taggedLog = Object.assign(tag, logEntry)
69 | logEntry[MESSAGE] = JSON.stringify(taggedLog)
70 | return logEntry
71 | }
72 |
73 | const transports = []
74 |
75 | // configure transports (defined above)
76 | transports.push(consoleTransport)
77 | transports.push(logstashTransport)
78 |
79 | const logger = winston.createLogger({
80 | level: process.env.LOG_LEVEL || 'info',
81 | format: winston.format.combine(
82 | winston.format(errorFormatter)(),
83 | winston.format(envTag)(),
84 | ),
85 | transports
86 | })
87 |
88 | logger.stream = {
89 | write: function(message, _encoding) {
90 | logger.http(message)
91 | }
92 | }
93 |
94 | module.exports = logger
95 |
--------------------------------------------------------------------------------