├── .github └── workflows │ └── cd.yml ├── .gitignore ├── FUNDING.yml ├── README.md ├── book.src ├── SUMMARY.md ├── author.md ├── elasticsearch.md ├── end.md ├── filebeat.md ├── imgs │ ├── AskCeelog.png │ ├── Kafka-Intro.png │ ├── ceelog.jpg │ ├── docker-compose.jpg │ ├── docker-containers.png │ ├── docker-desktop-mac.png │ ├── docker-desktop-resource.png │ ├── docker-desktop-start.png │ ├── elasticsearch.jpg │ ├── elk-stack.gliffy │ ├── elk-stack.jpg │ ├── filebeat.png │ ├── grok-debugger.png │ ├── kafka-overview.png │ ├── kibana-create-dashbord.jpg │ ├── kibana-create-index-pattern.jpg │ ├── kibana-create-visualize.png │ ├── kibana-dashbord.png │ ├── kibana-index-pattern.png │ ├── kibana-query-demo.png │ ├── kibana-tsvb.jpg │ ├── kibana-visualization-area.png │ ├── kibana-visualization-pie.png │ ├── kibana-visualization.png │ ├── kibana-welcome.jpg │ ├── kibana.jpg │ ├── logstash.jpg │ └── nginx-web.jpg ├── kafka.md ├── kibana.md ├── kibana │ ├── dashbord.md │ ├── query.md │ └── visualize.md ├── logstash.md ├── nginx.md ├── preface.md ├── setup.md └── what-is-elk.md ├── book.toml └── docker-compose ├── .env ├── docker-compose.yml ├── elasticsearch ├── Dockerfile └── config │ └── elasticsearch.yml ├── filebeat ├── Dockerfile └── filebeat.yml ├── kibana ├── Dockerfile └── config │ └── kibana.yml ├── logstash ├── Dockerfile ├── config │ └── logstash.yml └── pipeline │ ├── logstash.conf │ └── nginx.conf └── nginx ├── html └── index.html └── nginx.conf /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: github pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-18.04 11 | steps: 12 | - uses: actions/checkout@v2 13 | 14 | - name: Setup mdBook 15 | uses: peaceiris/actions-mdbook@v1 16 | with: 17 | # mdbook-version: '0.4.1' 18 | mdbook-version: 'latest' 19 | 20 | - run: mdbook build 21 | 22 | - name: Deploy 23 | uses: peaceiris/actions-gh-pages@v3 24 | with: 25 | github_token: ${{ secrets.GITHUB_TOKEN }} 26 | publish_dir: ./book -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /book 2 | /TODO 3 | /elk -------------------------------------------------------------------------------- /FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: #[Ceelog] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: ['https://pic2.zhimg.com/80/v2-6b84f07b309b6e1ad1b3141c3e9d34dc_1440w.png'] 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ELK 实践小册 2 | 3 | 本小册全面介绍 ELK Stack 包含的各个组件及其使用方法,让零基础的读者也能轻松搭建整个套件,并运用到实际的生产环境中。 4 | 5 | 在示例中,我们将打造一个大规模日志实时分析系统,实现每天数十亿条日志或者 PB 级数据的分析处理。 6 | 7 | ## 在线阅读 8 | 9 | [https://ceelog.github.io/learn-elk/](https://ceelog.github.io/learn-elk/) 10 | 11 | ## 目录 12 | 13 | - [前言](./book.src/preface.md) 14 | - [什么是 ELK Stack](./book.src/what-is-elk.md) 15 | - [演示环境安装与配置](./book.src/setup.md) 16 | - [Nginx 访问日志](./book.src/nginx.md) 17 | - [Filebeat 日志收割](./book.src/filebeat.md) 18 | - [Kafka 消息队列](./book.src/kafka.md) 19 | - [Logstash 处理日志](logstash.md) 20 | - [Elasticsearch 索引日志](./book.src/elasticsearch.md) 21 | - [Kibana 分析日志](./book.src/kibana.md) 22 | - [查询日志](./book.src/kibana/query.md) 23 | - [可视化报表](./book.src/kibana/visualize.md) 24 | - [Dashbord](./book.src/kibana/dashbord.md) 25 | - [结语](./book.src/end.md) 26 | - [关于作者](./book.src/author.md) -------------------------------------------------------------------------------- /book.src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # 基于 elk 的大规模日志实时分析系统 2 | 3 | - [前言](./preface.md) 4 | - [什么是 ELK Stack](./what-is-elk.md) 5 | - [演示环境安装与配置](./setup.md) 6 | - [Nginx 访问日志](./nginx.md) 7 | - [Filebeat 日志收集](./filebeat.md) 8 | - [Kafka 消息队列](./kafka.md) 9 | - [Logstash 处理日志](logstash.md) 10 | - [Elasticsearch 索引日志](./elasticsearch.md) 11 | - [Kibana 分析日志](./kibana.md) 12 | - [查询日志](./kibana/query.md) 13 | - [可视化报表](./kibana/visualize.md) 14 | - [Dashbord](./kibana/dashbord.md) 15 | - [结语](./end.md) 16 | - [关于作者](./author.md) -------------------------------------------------------------------------------- /book.src/author.md: -------------------------------------------------------------------------------- 1 | # 关于作者 2 | 3 | - 关注作者微信: 4 | 5 |  6 | -------------------------------------------------------------------------------- /book.src/elasticsearch.md: -------------------------------------------------------------------------------- 1 | # Elasticsearch 索引日志 2 | 3 | ## Elasticsearch 服务配置 4 | 5 | 在 docker-compose.yaml 对于 Elasticsearch 服务的配置中: 6 | 7 | ```yaml 8 | elasticsearch: 9 | build: 10 | context: elasticsearch/ 11 | args: 12 | ELK_VERSION: $ELK_VERSION 13 | volumes: 14 | - type: bind 15 | source: ./elasticsearch/config/elasticsearch.yml 16 | target: /usr/share/elasticsearch/config/elasticsearch.yml 17 | read_only: true 18 | - type: volume 19 | source: elasticsearch 20 | target: /usr/share/elasticsearch/data 21 | ports: 22 | - "9200:9200" 23 | - "9300:9300" 24 | environment: 25 | ES_JAVA_OPTS: "-Xmx512m -Xms512m" 26 | ELASTIC_PASSWORD: changeme 27 | # Use single node discovery in order to disable production mode and avoid bootstrap checks 28 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 29 | discovery.type: single-node 30 | networks: 31 | - elk 32 | ``` 33 | 34 | 35 | - 使用 elasticsearch/ 目录下的 Dockerfile 构建镜像 36 | - 数据卷映射: 37 | - 将宿主机目录下的配置文件映射到容器内 38 | - 使用宿主机上名为elasticsearch 的数据卷,并映射到容器内的 `/usr/share/elasticsearch/data` 目录,这样可以长久保存日志数据 39 | - 使用名为 elk 的子网络,这样便可以访问同样使用这个子网络的其他容器 40 | -------------------------------------------------------------------------------- /book.src/end.md: -------------------------------------------------------------------------------- 1 | # 结语 2 | 3 | 恭喜你!完成了整个 Elastic Stack 的学习。 4 | 5 | 现在,你拥有了搭建大规模日志实时分析系统的能力,马上试试应用的实际工作中去吧! 6 | -------------------------------------------------------------------------------- /book.src/filebeat.md: -------------------------------------------------------------------------------- 1 | # Filebeat 日志收割 2 | 3 | ## Filebeat 服务配置 4 | 5 | 在 docker-compose.yaml 对于 Filebeat 服务的配置中: 6 | 7 | ```yaml 8 | filebeat: 9 | build: 10 | context: filebeat/ 11 | args: 12 | ELK_VERSION: $ELK_VERSION 13 | volumes: 14 | - "./filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro" 15 | - type: volume 16 | source: nginx-log 17 | target: /var/log 18 | depends_on: 19 | - nginx 20 | networks: 21 | - elk 22 | ``` 23 | 24 | - 使用 filebeat/ 目录下的 Dockerfile 构建镜像 25 | - 数据卷映射: 26 | - 将宿主机目录下的配置文件映射到容器内 27 | - 使用宿主机上名为 nginx-log 的数据卷,并映射到容器内的 `/var/log` 目录,这和 Nginx 容器使用的是同一个目录 28 | - Filebeat 服务依赖于 Nginx 服务,所以 Docker Compose 会先启动 Nginx ,然后再启动 Filebeat 29 | - 使用名为 elk 的子网络,这样便可以访问同样使用这个子网络的其他容器 30 | 31 | ## Filebeat 日志收割配置 32 | 33 | 对于 `filebeat/filebeat.yml` 配置: 34 | 35 | ```yaml 36 | {{#include ../docker-compose/filebeat/filebeat.yml}} 37 | ``` 38 | 39 | Filebeat 通过配置 input / processors / output 模块,实现日志收集、预处理、投递等工作 40 | 41 | 对于 input 模块: 42 | 43 | - `type` 配置数据来源的类型,例如 文本日志 log 或者 控制台输出 stdout 44 | - `paths` 指定日志的路径,可以使用模糊匹配,例如 `/var/log/*access.log` 45 | - `encoding` 日志的编码,例如 utf-8 46 | - `fields` 自定义添加的字段,例如 增加一个主题字段 `topic: nginx-log` 47 | - `scan_frequency: 1s` 扫描日志文件的间隔 48 | - `harvester_buffer_size: 16384` 日志收割缓存大小 49 | - `tail_files: false` 启动 filebeat 后,是否从目标日志文件的结尾开始收集 50 | - `close_eof: true` 采集日志到文件结尾的时候,是否关闭采集进程 51 | - `clean_removed: true` 采集目标日志文件被移除后,是否从 filebeat registry 清楚 52 | 53 | 更全面的配置信息,请参考: 54 | - [https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-log.html](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-log.html) 55 | 56 | 对于 processors 模块: 57 | 58 | - `drop_fields` 可以舍弃部分多余字段 59 | 60 | 对于 output 模块: 61 | 62 | - `output.kafka` 将收集的日志投递到 Kafka 消息队列 63 | - `hosts: ["kafka:9092"]` 指定 Kafka 服务的地址 64 | - `topic: elk-%{[fields.topic]}` 指定投递到 Kafka 消息队列的频道 65 | - `keep_alive: 60` Filebeat 和 Kafka 之间保持连接的时间 66 | - `required_acks: 1` 日志消息投递后,是否需要等待 Kafka 的确认反馈 67 | 68 | 更全面的配置信息,请参考: 69 | - [https://www.elastic.co/guide/en/beats/filebeat/current/kafka-output.html](https://www.elastic.co/guide/en/beats/filebeat/current/kafka-output.html) 70 | 71 | ## 小结 72 | 73 | Filebeat 启动后,开始监听目标文件,当发现新日志后就收集转发出去。 -------------------------------------------------------------------------------- /book.src/imgs/AskCeelog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/AskCeelog.png -------------------------------------------------------------------------------- /book.src/imgs/Kafka-Intro.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/Kafka-Intro.png -------------------------------------------------------------------------------- /book.src/imgs/ceelog.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/ceelog.jpg -------------------------------------------------------------------------------- /book.src/imgs/docker-compose.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/docker-compose.jpg -------------------------------------------------------------------------------- /book.src/imgs/docker-containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/docker-containers.png -------------------------------------------------------------------------------- /book.src/imgs/docker-desktop-mac.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/docker-desktop-mac.png -------------------------------------------------------------------------------- /book.src/imgs/docker-desktop-resource.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/docker-desktop-resource.png -------------------------------------------------------------------------------- /book.src/imgs/docker-desktop-start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/docker-desktop-start.png -------------------------------------------------------------------------------- /book.src/imgs/elasticsearch.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/elasticsearch.jpg -------------------------------------------------------------------------------- /book.src/imgs/elk-stack.gliffy: -------------------------------------------------------------------------------- 1 | {"contentType":"application/gliffy+json","version":"1.1","metadata":{"title":"untitled","revision":0,"exportBorder":false},"embeddedResources":{"index":0,"resources":[]},"stage":{"objects":[{"x":665.5555555555558,"y":295.0000000000001,"rotation":0,"id":34,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":184.44444444444443,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":26,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"none","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":4.098765432098766,"y":0,"rotation":0,"id":35,"uid":null,"width":176.24691358024697,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"
Kafdrop
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":665.5555555555558,"y":255.71428571428575,"rotation":0,"id":31,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":184.44444444444443,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":23,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"none","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":4.098765432098766,"y":0,"rotation":0,"id":33,"uid":null,"width":176.24691358024697,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"ZooKeeper
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":434.99999999999994,"y":387.7142857142858,"rotation":0,"id":29,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":184.44444444444443,"height":157.14285714285717,"lockAspectRatio":false,"lockShape":false,"order":22,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[1.1368683772161603e-13,-6.266314418188983],[1.1368683772161603e-13,-21.98706675498312],[1.1368683772161603e-13,-37.70781909177731],[1.1368683772161603e-13,-53.428571428571445]],"lockSegments":{}}},"children":[],"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":17,"px":0.5,"py":0}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":7,"px":0.5,"py":1}}},"linkMap":[]},{"x":739.3333333333334,"y":383.0000000000001,"rotation":0,"id":28,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":184.44444444444443,"height":157.14285714285717,"lockAspectRatio":false,"lockShape":false,"order":21,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-2.7051851851853144,-1.5714285714286689],[-2.7051851851853144,-25.142857142857224],[-132.43470494849885,-25.142857142857224],[-132.43470494849885,-48.71428571428578]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":23,"px":0.5,"py":0}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":7,"px":0.7071067811865476,"py":1}}},"linkMap":[]},{"x":132.51111111111123,"y":384.57142857142867,"rotation":0,"id":27,"uid":"com.gliffy.shape.basic.basic_v1.default.line","width":184.44444444444443,"height":157.14285714285717,"lockAspectRatio":false,"lockShape":false,"order":20,"graphic":{"type":"Line","Line":{"strokeWidth":1,"strokeColor":"#000000","fillColor":"none","dashStyle":null,"startArrow":0,"endArrow":1,"startArrowRotation":"auto","endArrowRotation":"auto","ortho":true,"interpolationType":"linear","cornerRadius":10,"controlPath":[[-1.8444444444444628,-3.142857142857224],[-1.8444444444444628,-26.71428571428578],[130.5902605040544,-26.71428571428578],[130.5902605040544,-50.28571428571439]],"lockSegments":{}}},"children":null,"constraints":{"constraints":[],"startConstraint":{"type":"StartPositionConstraint","StartPositionConstraint":{"nodeId":9,"px":0.5,"py":0}},"endConstraint":{"type":"EndPositionConstraint","EndPositionConstraint":{"nodeId":7,"px":0.29289321881345254,"py":0.9999999999999998}}},"linkMap":[]},{"x":625.9614814814814,"y":420.71428571428584,"rotation":0,"id":21,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":18,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#FFFFFF","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.983703703703704,"y":0,"rotation":0,"id":22,"uid":null,"width":219.36592592592604,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Log
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":625.9614814814814,"y":381.42857142857144,"rotation":0,"id":23,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":16,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#d9d2e9","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.983703703703704,"y":0,"rotation":0,"id":24,"uid":null,"width":219.36592592592604,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Filebeat
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":322.98074074074077,"y":420.71428571428584,"rotation":0,"id":15,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":14,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#FFFFFF","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.983703703703704,"y":0,"rotation":0,"id":16,"uid":null,"width":219.36592592592604,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Log
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":322.98074074074077,"y":381.42857142857144,"rotation":0,"id":17,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":12,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#d9d2e9","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.983703703703704,"y":0,"rotation":0,"id":18,"uid":null,"width":219.36592592592604,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Filebeat
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20.0000000000001,"y":420.71428571428584,"rotation":0,"id":11,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":10,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#FFFFFF","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.9837037037037036,"y":0,"rotation":0,"id":12,"uid":null,"width":219.365925925926,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Log
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20.0000000000001,"y":381.42857142857144,"rotation":0,"id":9,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":221.33333333333334,"height":39.28571428571429,"lockAspectRatio":false,"lockShape":false,"order":8,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#d9d2e9","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":0.983703703703704,"y":0,"rotation":0,"id":10,"uid":null,"width":219.36592592592604,"height":18,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Filebeat
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20,"y":255.71428571428572,"rotation":0,"id":7,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":830.0000000000001,"height":78.57142857142858,"lockAspectRatio":false,"lockShape":false,"order":6,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#f9cb9c","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":3.6888888888888887,"y":0,"rotation":0,"id":8,"uid":null,"width":822.6222222222223,"height":22,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Kafka
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20,"y":177.14285714285717,"rotation":0,"id":5,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":830.0000000000001,"height":78.57142857142858,"lockAspectRatio":false,"lockShape":false,"order":4,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#a4c2f4","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":3.6888888888888887,"y":0,"rotation":0,"id":6,"uid":null,"width":822.6222222222223,"height":22,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Logstash
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20,"y":98.5714285714286,"rotation":0,"id":3,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":830.0000000000001,"height":78.57142857142858,"lockAspectRatio":false,"lockShape":false,"order":2,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#ea9999","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":3.6888888888888887,"y":0,"rotation":0,"id":4,"uid":null,"width":822.6222222222223,"height":22,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Elasticsearch
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]},{"x":20,"y":20,"rotation":0,"id":0,"uid":"com.gliffy.shape.basic.basic_v1.default.rectangle","width":830.0000000000001,"height":78.57142857142858,"lockAspectRatio":false,"lockShape":false,"order":0,"graphic":{"type":"Shape","Shape":{"tid":"com.gliffy.stencil.rectangle.basic_v1","strokeWidth":1,"strokeColor":"#333333","fillColor":"#b6d7a8","gradient":false,"dropShadow":false,"state":0,"shadowX":0,"shadowY":0,"opacity":1}},"children":[{"x":3.6888888888888887,"y":0,"rotation":0,"id":2,"uid":null,"width":822.6222222222223,"height":22,"lockAspectRatio":false,"lockShape":false,"order":"auto","graphic":{"type":"Text","Text":{"tid":null,"valign":"middle","overflow":"none","vposition":"none","hposition":"none","html":"Kibana
","paddingLeft":2,"paddingRight":2,"paddingBottom":2,"paddingTop":2}},"children":null}],"linkMap":[]}],"background":"#FFFFFF","width":851,"height":461,"maxWidth":5000,"maxHeight":5000,"nodeIndex":41,"autoFit":true,"exportBorder":false,"gridOn":true,"snapToGrid":true,"drawingGuidesOn":true,"pageBreaksOn":false,"printGridOn":false,"printPaper":"LETTER","printShrinkToFit":false,"printPortrait":true,"shapeStyles":{"com.gliffy.shape.basic.basic_v1.default":{"fill":"none","stroke":"#333333","strokeWidth":1}},"lineStyles":{"global":{"strokeWidth":1,"endArrow":1}},"textStyles":{},"themeData":null}} -------------------------------------------------------------------------------- /book.src/imgs/elk-stack.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/elk-stack.jpg -------------------------------------------------------------------------------- /book.src/imgs/filebeat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/filebeat.png -------------------------------------------------------------------------------- /book.src/imgs/grok-debugger.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/grok-debugger.png -------------------------------------------------------------------------------- /book.src/imgs/kafka-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kafka-overview.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-create-dashbord.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-create-dashbord.jpg -------------------------------------------------------------------------------- /book.src/imgs/kibana-create-index-pattern.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-create-index-pattern.jpg -------------------------------------------------------------------------------- /book.src/imgs/kibana-create-visualize.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-create-visualize.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-dashbord.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-dashbord.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-index-pattern.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-index-pattern.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-query-demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-query-demo.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-tsvb.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-tsvb.jpg -------------------------------------------------------------------------------- /book.src/imgs/kibana-visualization-area.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-visualization-area.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-visualization-pie.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-visualization-pie.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-visualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-visualization.png -------------------------------------------------------------------------------- /book.src/imgs/kibana-welcome.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana-welcome.jpg -------------------------------------------------------------------------------- /book.src/imgs/kibana.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/kibana.jpg -------------------------------------------------------------------------------- /book.src/imgs/logstash.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/logstash.jpg -------------------------------------------------------------------------------- /book.src/imgs/nginx-web.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ceelog/learn-elk/9115022b21f214a6badc4d429b2ac2b52d146dde/book.src/imgs/nginx-web.jpg -------------------------------------------------------------------------------- /book.src/kafka.md: -------------------------------------------------------------------------------- 1 | # Kafka 消息队列 2 | 3 | ## Kafka 服务配置 4 | 5 | 在 docker-compose.yaml 对于 Kafka 服务的配置中: 6 | 7 | ```yaml 8 | zookeeper: 9 | image: wurstmeister/zookeeper 10 | ports: 11 | - "2181:2181" 12 | networks: 13 | - elk 14 | 15 | kafka: 16 | image: wurstmeister/kafka:2.12-2.1.0 17 | ports: 18 | - "9092:9092" 19 | environment: 20 | KAFKA_ADVERTISED_HOST_NAME: $HOST_IP 21 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 22 | KAFKA_HEAP_OPTS: "-Xmx512M -Xms512M" 23 | volumes: 24 | - /var/run/docker.sock:/var/run/docker.sock 25 | depends_on: 26 | - zookeeper 27 | networks: 28 | - elk 29 | 30 | kafdrop: 31 | image: obsidiandynamics/kafdrop 32 | ports: 33 | - "9000:9000" 34 | environment: 35 | KAFKA_BROKERCONNECT: kafka:9092 36 | JVM_OPTS: "-Xms32M -Xmx64M" 37 | SERVER_SERVLET_CONTEXTPATH: "/" 38 | depends_on: 39 | - kafka 40 | networks: 41 | - elk 42 | ``` 43 | 44 | - Kafka 依赖 zookeeper 提供分布式状态存储服务 45 | - 注意选择 Kafka 的版本和 ELK 兼容,这里选的是 `wurstmeister/kafka:2.12-2.1.0` 46 | - 使用名为 elk 的子网络,这样便可以访问同样使用这个子网络的其他容器 47 | - 使用 Kafka 可视化管理工具 Kafdrop 可以查看 Kafka 当前运行状态 48 | 49 | ## Kafdrop 可视化管理后台 50 | 51 | 52 | [Kafdrop](https://github.com/obsidiandynamics/kafdrop) 是一款 Kafka web ui 软件,支持丰富的管理功能,包括: 53 | 54 | - 查看 Kafka 节点 55 | - 查看/创建 Topics 56 | - 浏览消息 57 | - 查看 consumer groups 58 | - ... 59 | 60 |  61 | 62 | 63 | ## 小结 64 | 65 | Kafka 作为消息队列服务,具有高吞吐、低延迟、高并发、高可用等优点,应用到 Elastic Stack 中可以显著提升整个系统的稳定性。 66 | 67 | 成百上千的 Filebeat 实例可以即时地将大量日志转发到 Kafka,再由 Logstash 按照阈值能力慢慢处理,达到「削峰填谷」的效果。 -------------------------------------------------------------------------------- /book.src/kibana.md: -------------------------------------------------------------------------------- 1 | # Kibana 分析日志 2 | 3 | ## Kibana 服务配置 4 | 5 | 在 docker-compose.yaml 对于 Kibana 服务的配置中: 6 | 7 | ```yaml 8 | kibana: 9 | build: 10 | context: kibana/ 11 | args: 12 | ELK_VERSION: $ELK_VERSION 13 | volumes: 14 | - type: bind 15 | source: ./kibana/config/kibana.yml 16 | target: /usr/share/kibana/config/kibana.yml 17 | read_only: true 18 | ports: 19 | - "5601:5601" 20 | networks: 21 | - elk 22 | depends_on: 23 | - elasticsearch 24 | ``` 25 | 26 | - 使用 kibana/ 目录下的 Dockerfile 构建镜像 27 | - 数据卷映射: 28 | - 将宿主机目录下的配置文件映射到容器内 29 | - 使用名为 elk 的子网络,这样便可以访问同样使用这个子网络的其他容器 30 | 31 | ## 运行效果 32 | 33 |  -------------------------------------------------------------------------------- /book.src/kibana/dashbord.md: -------------------------------------------------------------------------------- 1 | # Dashbord 2 | 3 | 4 | ## 创建 Dashbord 5 | 6 | 将上一节创建的可视化图表聚合到一个页面上就是 Dashbord,有了实时大盘便可以全面感知业务运行状态。 7 | 8 | 首先需要新建一个 Dashbord,然后加入可视化图表: 9 | 10 |  11 | 12 | ## Dashbord 示例 13 | 14 |  -------------------------------------------------------------------------------- /book.src/kibana/query.md: -------------------------------------------------------------------------------- 1 | # 查询日志 2 | 3 | ## 创建 Index Pattern 4 | 5 | Nginx 访问日志经过收集、过滤、索引后就可以通过 Kibana 查询,但是查询之前需要指定索引 index pattern: 6 | 7 |  8 | 9 | 如上述截图示例,在 Kibana 管理 > Index Patterns > Creata Index Pattern 进入创建页面: 10 | 11 |  12 | 13 | Index Pattern 的名称是由 Logstash 配置的时候确定的: 14 | ```yaml 15 | #logstash/pipeline/nginx.conf 16 | 17 | output { 18 | 19 | elasticsearch { 20 | hosts => "elasticsearch:9200" 21 | index => "logstash-%{[fields][topic]}-%{+YYYY.MM.dd.HH}" 22 | } 23 | } 24 | 25 | ``` 26 | 27 | ## 查询日志 28 | 29 | 在搜索框输入要查询的字段的值,同时在右侧选择时间区间,就可以开始查询索引的日志: 30 | 31 |  32 | 33 | ## 查询语法 34 | 35 | - 精准查询: 36 | - `ip:"192.168.42.10"` 37 | - `path:"/api/user/login"` 38 | - `status:404` 39 | 40 | - 模糊查询: 41 | - `p_key:"关键词*"` 42 | 43 | - AND 与查询: 44 | - `ip:"192.168.42.10" AND status:404` 45 | 46 | - OR 或查询: 47 | - `ip:"192.168.42.10" OR status:404` 48 | - `(ip:"192.168.42.10" AND status:404) OR (ip:"192.168.42.11" AND status:503)` 49 | 50 | - NOT 非查询: 51 | - `NOT ip:"192.168.42.10"` 52 | 53 | - 比较查询: 54 | - `status >= 500` 55 | 56 | 更多查询语法,参考 [https://www.elastic.co/guide/en/kibana/7.7/kuery-query.html](https://www.elastic.co/guide/en/kibana/7.7/kuery-query.html) -------------------------------------------------------------------------------- /book.src/kibana/visualize.md: -------------------------------------------------------------------------------- 1 | # 可视化 2 | 3 | Kibana 可以创建 面积图、饼图、折线图、表格、直方图、热力图、地理分布图等各种样式的可视化图表: 4 | 5 |  6 | 7 | 8 | ## 面积图示例 9 | 10 |  11 | 12 | ## 饼图示例 13 | 14 |  15 | 16 | ## TSVB示例 17 | 18 |  19 | 20 | -------------------------------------------------------------------------------- /book.src/logstash.md: -------------------------------------------------------------------------------- 1 | # Logstash 处理日志 2 | 3 | 4 | ## Logstash 服务配置 5 | 6 | 在 docker-compose.yaml 对于 Logstash 服务的配置中: 7 | 8 | ```yaml 9 | logstash: 10 | build: 11 | context: logstash/ 12 | args: 13 | ELK_VERSION: $ELK_VERSION 14 | volumes: 15 | - type: bind 16 | source: ./logstash/config/logstash.yml 17 | target: /usr/share/logstash/config/logstash.yml 18 | read_only: true 19 | - type: bind 20 | source: ./logstash/pipeline 21 | target: /usr/share/logstash/pipeline 22 | read_only: true 23 | ports: 24 | - "9600:9600" 25 | environment: 26 | LS_JAVA_OPTS: "-Xmx256m -Xms256m" 27 | networks: 28 | - elk 29 | depends_on: 30 | - elasticsearch 31 | ``` 32 | 33 | - 使用 logstash/ 目录下的 Dockerfile 构建镜像 34 | - 数据卷映射: 35 | - 将宿主机目录下的配置文件映射到容器内 36 | - 使用名为 elk 的子网络,这样便可以访问同样使用这个子网络的其他容器 37 | 38 | 39 | ## Logstash Pipeline 配置 40 | 41 | 在 `logstash/pipeline/nginx.conf` 配置: 42 | 43 | ```yaml 44 | {{#include ../docker-compose/logstash/pipeline/nginx.conf}} 45 | ``` 46 | 47 | Logstash pipeline 配置主要分为 3 个部分: `input`/`filter`/`output` 48 | 49 | 对于 `input` 模块: 50 | 51 | - 配置数据来源,这里是从 Kafka 获取日志 52 | - 更多来源配置,参见 [https://www.elastic.co/guide/en/logstash/current/input-plugins.html](https://www.elastic.co/guide/en/logstash/current/input-plugins.html) 53 | 54 | 对于 `filter` 模块: 55 | 56 | - 配置数据过滤或格式化规则,这里主要使用 `grok` 和 `kv` 匹配日志字段和解析请求参数 57 | - 更多过滤器配置,参见 [https://www.elastic.co/guide/en/logstash/current/filter-plugins.html](https://www.elastic.co/guide/en/logstash/current/filter-plugins.html) 58 | 59 | 对于 `output` 模块: 60 | 61 | - 配置格式化后的数据输出目标,这里将日志输出到 Elasticsearch 62 | - 更多输出配置,参见 [https://www.elastic.co/guide/en/logstash/current/output-plugins.html](https://www.elastic.co/guide/en/logstash/current/output-plugins.html) 63 | 64 | 65 | ### 关于 Grok 66 | 67 | Grok 是正则匹配工具,更多正则模式请参考 [https://grokdebug.herokuapp.com/patterns#](https://grokdebug.herokuapp.com/patterns#) 68 | 69 |  70 | -------------------------------------------------------------------------------- /book.src/nginx.md: -------------------------------------------------------------------------------- 1 | # Nginx 访问日志 2 | 3 | ## Nginx 服务配置 4 | 5 | 在 docker-compose.yaml 对于 Nginx 服务的配置中: 6 | 7 | ```yaml 8 | nginx: 9 | image: nginx:1.18 10 | ports: 11 | - "8000:80" 12 | volumes: 13 | - "./nginx/nginx.conf:/etc/nginx/nginx.conf:ro" 14 | - "./nginx/html:/usr/share/nginx/html:ro" 15 | - type: volume 16 | source: nginx-log 17 | target: /var/log/nginx 18 | ``` 19 | 20 | - 使用 Nginx 1.18 版本的官方镜像,默认使用的镜像仓库地址为: [https://hub.docker.com/_/nginx](https://hub.docker.com/_/nginx) 21 | - 将宿主机的 8000 端口映射到到 Nginx 容器内的 80 端口,这样就可以通过宿主机地址访问容器内服务 22 | - 数据卷映射: 23 | - 将宿主机目录下的配置文件 以及 `html` 目录映射到容器内相应的目录,这样容器内应用就可以访问宿主机上的文件或目录 24 | - 使用宿主机上名为 `nginx-log` 的数据卷,并映射到容器内的 `/var/log/nginx` 目录,目的是让运行在同一宿主机上的容器之间共享目录 25 | - Filebeat 容器也会用到 `nginx-log` 数据卷,在这个目录收割 Nginx 访问日志 26 | 27 | ## Nginx 配置 28 | 29 | 对于 `nginx/nginx.conf` 配置: 30 | 31 | ```nginx 32 | {{#include ../docker-compose/nginx/nginx.conf}} 33 | ``` 34 | 35 | 通过配置 Nginx 日志格式,可以将每个用户的访问信息记录到日志中,主要包括以下字段: 36 | 37 | - `$remote_addr` 用户 ip 地址 38 | - `$remote_user` 用户标识 39 | - `$time_local` 服务器时间 40 | - `$request` http 请求头信息,包括 http method / uri / http 版本 41 | - `$status` 服务器响应状态码,例如 200/302/404/504 42 | - `$body_bytes_sent` 服务器响应内容大小(单位 bytes) 43 | - `$request_time` 请求处理时间 44 | - `$http_referer` 请求来源 referer 45 | - `$http_user_agent` 用户系统/浏览器等信息 46 | - `$http_x_forwarded_for` 网络代理 ip 列表(如果使用代理) 47 | 48 | 49 | 日志示例: 50 | 51 | ```shell 52 | #tail -f /var/log/nginx/elk-access.log 53 | 192.168.0.1 - - [26/May/2020:12:31:31 +0000] "GET /api/news/view HTTP/1.0" 404 555 0.000 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" "-" 54 | 192.168.0.1 - - [26/May/2020:12:31:32 +0000] "GET /api/app/update HTTP/1.0" 404 555 0.000 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" "-" 55 | 192.168.0.1 - - [26/May/2020:12:31:32 +0000] "GET /api/user/login HTTP/1.0" 404 555 0.000 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" "-" 56 | ``` 57 | 58 | 这些日志将被 Filebeat 采集后投递到 Kafka 消息队列,再经过 Logstash 的格式化处理后,索引到 Elasticsearch 中,最后通过 Kibana 可视化查询。 59 | 60 | ## 运行效果 61 | 62 |  -------------------------------------------------------------------------------- /book.src/preface.md: -------------------------------------------------------------------------------- 1 | # 前言 2 | 3 | 大数据时代,业务无时不刻在产生大量数据。如何挖掘这些数据的价值,已经成为每个公司的必修课。 4 | 5 | 工欲善其事,必先利其器,一款方便高效的工具,可以辅助我们实时洞察源源不断产生的数据,发现其中的价值。 6 | 7 | ELK Stack 正是在这样一款工具。 8 | 9 | 准确来说 ELK Stack 是一组工具集合,可以完成数据采集、清洗过滤、运输存储、索引查询、交互式探索、可视化报表等一系列工作。 10 | 11 | 本小册全面介绍 ELK Stack 包含的各个组件及其使用方法,让零基础的读者也能轻松搭建整个套件,并运用到实际的生产环境中。 12 | 13 | 在示例中,我们将打造一个大规模日志实时分析系统,实现每天数十亿条日志或者 PB 级数据的分析处理。 14 | 15 | 事不宜迟,马上开始! -------------------------------------------------------------------------------- /book.src/setup.md: -------------------------------------------------------------------------------- 1 | # 演示环境安装与配置 2 | 3 | ## 本地开发环境 4 | 5 | 为了深入理解各个组件的使用,建议读者在本地搭建一个开发环境。 6 | 7 | 为了解决系统环境和依赖的问题,本小册使用 Docker 容器运行各个组件,并使用 Docker Compose 进行服务编排,实现可「一键启动」的开发环境。 8 | 9 | ### Docker 下载与安装 10 | 11 | Docker 是一种容器化技术,可以将软件及其依赖的运行环境打包进一个容器中,和虚拟机不同的是,Docker 容器非常轻量,可以实现秒级的启动或关闭。 12 | 13 | - Docker 支持 Linux / Windows / MacOS 平台。 14 | 15 | - 在官网下载与安装 Docker [https://docs.docker.com/get-docker/](https://docs.docker.com/get-docker/) 16 | 17 | - 系统配置要求: 18 | - 4GB 以上内存 19 | - 2 核以上 CPU 20 | 21 | #### Docker Desktop for Windows/Mac 22 | 23 | 为了支持 Windows 和 Mac 操作系统,Docker 开发了 Docker Desktop for Windows/Mac 24 | 25 | 以下是 Docker Desktop 运行起来的界面: 26 | 27 |  28 | 29 | 可以通过设置页面,配置足够的计算资源: 30 | 31 |  32 | 33 | #### Docker for Linux 34 | 35 | 由于 Docker 源于 Linux 虚拟化技术,所以对 Linux 操作系统支持的很好。 36 | 37 | 安装参见 [https://docs.docker.com/engine/install/](https://docs.docker.com/engine/install/) 38 | 39 | #### 启动 Docker 40 | 41 | Docker Desktop for Windows/Mac 安装完成后,直接双击图标就可以启动: 42 | 43 |  44 | 45 | Docker for Linux 安装完成后,需要先启动 Docker 服务: 46 | ``` 47 | service docker start 48 | ``` 49 | 50 | 启动成功后,可以在命令行查看 Docker 相关信息: 51 | ```shell 52 | # docker version 53 | Client: Docker Engine - Community 54 | Version: 19.03.7 55 | API version: 1.40 56 | Go version: go1.12.17 57 | Git commit: 7141c199a2 58 | Built: Wed Mar 4 01:24:10 2020 59 | OS/Arch: linux/amd64 60 | Experimental: false 61 | 62 | Server: Docker Engine - Community 63 | Engine: 64 | Version: 19.03.7 65 | API version: 1.40 (minimum version 1.12) 66 | Go version: go1.12.17 67 | Git commit: 7141c199a2 68 | Built: Wed Mar 4 01:22:45 2020 69 | OS/Arch: linux/amd64 70 | Experimental: false 71 | containerd: 72 | Version: 1.2.13 73 | GitCommit: 7ad184331fa3e55e52b890ea95e65ba581ae3429 74 | runc: 75 | Version: 1.0.0-rc10 76 | GitCommit: dc9208a3303feef5b3839f4323d9beb36df0a9dd 77 | docker-init: 78 | Version: 0.18.0 79 | ``` 80 | 81 | 如果没有启动成功,则会报错: 82 | ```shell 83 | # docker ps 84 | Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running? 85 | 86 | ``` 87 | #### Docker Compose 88 | 89 | 有了 Docker 后我们可以轻松运行各种软件,但是对于一个大型系统,涉及很多服务,以及服务之间的依赖关系。 90 | 91 | 如果手动处理各个服务、网络配置等计算资源,效率低下且容易出错。 92 | 93 | 这个时候可以使用 [Docker Compose](https://docs.docker.com/compose/) 编排这些服务,实现大型系统的「一键启动」 94 | 95 |  96 | 97 | ### Docker 配置 98 | 99 | 本小册演示环境的所有配置都已经开源并托管在 Github ,下载地址: 100 | - [https://github.com/Ceelog/learn-elk](https://github.com/Ceelog/learn-elk) 101 | 102 | 系统配置目录结构如下所示: 103 | 104 | 105 | ``` 106 | . 107 | ├── docker-compose.yml 108 | ├── elasticsearch 109 | │ ├── config 110 | │ │ └── elasticsearch.yml 111 | │ └── Dockerfile 112 | ├── .env 113 | ├── filebeat 114 | │ ├── Dockerfile 115 | │ └── filebeat.yml 116 | ├── kibana 117 | │ ├── config 118 | │ │ └── kibana.yml 119 | │ └── Dockerfile 120 | ├── logstash 121 | │ ├── config 122 | │ │ └── logstash.yml 123 | │ ├── Dockerfile 124 | │ └── pipeline 125 | │ ├── logstash.conf 126 | │ └── nginx.conf 127 | └── nginx 128 | ├── html 129 | │ └── index.html 130 | └── nginx.conf 131 | ``` 132 | 133 | 其中 `.env` 文件声明使用 7.6.2 版本的 Elastic Stack: 134 | ```shell 135 | {{#include ../docker-compose/.env}} 136 | ``` 137 | 138 | 如果你想测试其他版本的 Elastic Stack,只需修改`ELK_VERSION`参数即可。 139 | 140 | 141 | 142 | ### 一键启动 143 | 144 | 上述配置文件中,`docker-compose.yml` 声明了我们需要的服务以及如果构建这些服务。 145 | 146 | Docker 安装并启动后,执行以下命令即可「一键启动」整个开发环境: 147 | 148 | ```shell 149 | cd learn-elk.git/docker-compose/ 150 | 151 | docker-compose -f "docker-compose.yml" up -d --build 152 | ``` 153 | ```shell 154 | # 运行结果 155 | Creating network "learn-elkgit_elk" with driver "bridge" 156 | Creating network "learn-elkgit_default" with the default driver 157 | 158 | Building elasticsearch 159 | Step 1/2 : ARG ELK_VERSION 160 | Step 2/2 : FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION} 161 | ---> f29a1ee41030 162 | Successfully built f29a1ee41030 163 | Successfully tagged learn-elkgit_elasticsearch:latest 164 | Building logstash 165 | Step 1/2 : ARG ELK_VERSION 166 | Step 2/2 : FROM docker.elastic.co/logstash/logstash:${ELK_VERSION} 167 | ---> fa5b3b1e9757 168 | Successfully built fa5b3b1e9757 169 | Successfully tagged learn-elkgit_logstash:latest 170 | Building kibana 171 | Step 1/2 : ARG ELK_VERSION 172 | Step 2/2 : FROM docker.elastic.co/kibana/kibana:${ELK_VERSION} 173 | ---> f70986bc5191 174 | Successfully built f70986bc5191 175 | Successfully tagged learn-elkgit_kibana:latest 176 | Building filebeat 177 | Step 1/2 : ARG ELK_VERSION 178 | Step 2/2 : FROM docker.elastic.co/beats/filebeat:${ELK_VERSION} 179 | ---> 0272ee9a7757 180 | Successfully built 0272ee9a7757 181 | Successfully tagged learn-elkgit_filebeat:latest 182 | Creating learn-elkgit_nginx_1 ... done 183 | Creating learn-elkgit_zookeeper_1 ... done 184 | Creating learn-elkgit_elasticsearch_1 ... done 185 | Creating learn-elkgit_kafka_1 ... done 186 | Creating learn-elkgit_kibana_1 ... done 187 | Creating learn-elkgit_logstash_1 ... done 188 | Creating learn-elkgit_filebeat_1 ... done 189 | Creating learn-elkgit_kafdrop_1 ... done 190 | Creating learn-elkgit_metricbeat_1 ... done 191 | 192 | ``` 193 | 194 | > 注意:首次启动的时候,需要下载镜像文件,可能需要数十分钟,请耐心等候一下 195 | 196 | 如果你使用 VSCode 编辑器 并且 安装了 Docker 插件,那么你可以看到运行中的容器状态: 197 | 198 |  199 | 200 | ## 小结 201 | 202 | 本小节介绍了在线演示环境使用、本地开发环境搭建,在这个过程中读者也对整个系统有了大概的认识。 203 | 204 | 接下来,我们深入每个模块,了解其配置和运行环境。 205 | 206 | ## 常见问题 207 | 208 | - filebeat 启动报错: 209 | 210 | ```shell 211 | Exiting: error loading config file: 212 | config file ("filebeat.yml") can only be writable by the owner but the permissions are "-rw-rw-r--" 213 | (to fix the permissions use: 'chmod go-w /usr/share/filebeat/filebeat.yml') 214 | ``` 215 | 216 | 原因: 217 | 218 | 如果在 Linux 系统下启动,需要保证 filebeat 配置文件的读写权限正确,否则 Filebaet 容器无法启动并报错 219 | 220 | 解决方法: 221 | 222 | 修改 filebeat 配置文件权限 `chmod 644 filebeat.yml` 223 | -------------------------------------------------------------------------------- /book.src/what-is-elk.md: -------------------------------------------------------------------------------- 1 | # 什么是 ELK Stack 2 | 3 |  4 | 5 | ELK 是 3 款开源软件 [Elasticsearch](https://github.com/elastic/elasticsearch) / [Logstash](https://github.com/elastic/Logstash) / [Kibana](https://github.com/elastic/kibana) 的简称,基于这 3 款软件及其相关组件组成的数据处理系统称之为 ELK Stack。 6 | 7 | 从上面的架构图可以清晰地看到各个组件以栈[Stack]的形式构成整个系统。 8 | 9 | 由于系统涉及的组件越来越多,比如 Filebeat、Metricbeat、Kafka、Zookeeper 等,而不仅仅是 ELK,所以 ELK Stack 也逐渐被称为 Elastic Stack。 10 | 11 | 下面,简单介绍 Elastic Stack 涉及的各个组件: 12 | 13 | ## Elasticsearch 14 | 15 | > [Elasticsearch](https://www.elastic.co/cn/elasticsearch) 是一个搜索和分析引擎,主要负责将日志索引并存储起来,方便业务方检索查询 16 | 17 |  18 | 19 | ## Logstash 20 | 21 | > [Logstash](https://www.elastic.co/cn/logstash) 是服务器端数据处理管道,能够同时从多个来源采集数据,转换数据,然后将数据发送到诸如 Elasticsearch 等“存储库”中 22 | 23 |  24 | 25 | 26 | ## Kibana 27 | 28 | > [Kibana](https://www.elastic.co/cn/kibana) 则可以让用户在 Elasticsearch 中使用图形和图表对数据进行可视化 29 | 30 |  31 | 32 | 33 | ## Kafka 34 | 35 | > 在生产环境中,为了提升系统的稳定性和可扩展性,会增加一层消息队列中间件,用来「削峰填谷」,从容应对各类突发流量,保障后端日志索引服务的平稳性。 36 | > 37 | > [Kafka](https://kafka.apache.org/) 正是这样一款分布式消息队列服务中间件。 38 | 39 |  40 | 41 | 42 | ## Filebeat 43 | 44 | > [Filebeat](https://www.elastic.co/cn/beats/filebeat) 是轻量型日志采集器,用于转发和汇总日志与文件,让简单的事情不再繁杂。 45 | 46 |  47 | 48 | ## 小结 49 | 50 | 基于 Elastic Stack 可以轻松打造大规模日志实时分析处理系统。 51 | 52 | 所谓「大规模」,指的是 Elastic Stack 支持每天收集、处理、索引数十甚至上百亿规模的各类日志,这主要得益于 Filebeat、Kafka、Logstash、Elasticsearch 都支持分布式部署,可以无限水平扩展。 53 | 54 | 各类文本形式的日志都在处理范围,本小册以最常见的 Nginx 访问日志为例,演示如何搭建大规模日志实时分析处理系统。对访问日志的实时分析,可以帮助我们随时掌握业务的运行状况、统计 PV/UV、发现异常流量、分析用户行为、查看热门站内搜索关键词等。 55 | -------------------------------------------------------------------------------- /book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | authors = ["Ceelog"] 3 | language = "zh" 4 | multilingual = false 5 | src = "book.src" 6 | 7 | [output.html] 8 | git-repository-url = "https://github.com/Ceelog/learn-elk" 9 | git-repository-icon = "fa-github" -------------------------------------------------------------------------------- /docker-compose/.env: -------------------------------------------------------------------------------- 1 | ELK_VERSION=7.6.2 -------------------------------------------------------------------------------- /docker-compose/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.2' 2 | 3 | services: 4 | elasticsearch: 5 | build: 6 | context: elasticsearch/ 7 | args: 8 | ELK_VERSION: $ELK_VERSION 9 | volumes: 10 | - type: bind 11 | source: ./elasticsearch/config/elasticsearch.yml 12 | target: /usr/share/elasticsearch/config/elasticsearch.yml 13 | read_only: true 14 | - type: volume 15 | source: elasticsearch 16 | target: /usr/share/elasticsearch/data 17 | ports: 18 | - "9200:9200" 19 | - "9300:9300" 20 | environment: 21 | ES_JAVA_OPTS: "-Xmx512m -Xms512m" 22 | ELASTIC_PASSWORD: changeme 23 | # Use single node discovery in order to disable production mode and avoid bootstrap checks 24 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 25 | discovery.type: single-node 26 | networks: 27 | - elk 28 | 29 | logstash: 30 | build: 31 | context: logstash/ 32 | args: 33 | ELK_VERSION: $ELK_VERSION 34 | volumes: 35 | - type: bind 36 | source: ./logstash/config/logstash.yml 37 | target: /usr/share/logstash/config/logstash.yml 38 | read_only: true 39 | - type: bind 40 | source: ./logstash/pipeline 41 | target: /usr/share/logstash/pipeline 42 | read_only: true 43 | ports: 44 | - "9600:9600" 45 | environment: 46 | LS_JAVA_OPTS: "-Xmx256m -Xms256m" 47 | networks: 48 | - elk 49 | depends_on: 50 | - elasticsearch 51 | 52 | kibana: 53 | build: 54 | context: kibana/ 55 | args: 56 | ELK_VERSION: $ELK_VERSION 57 | volumes: 58 | - type: bind 59 | source: ./kibana/config/kibana.yml 60 | target: /usr/share/kibana/config/kibana.yml 61 | read_only: true 62 | ports: 63 | - "5601:5601" 64 | networks: 65 | - elk 66 | depends_on: 67 | - elasticsearch 68 | 69 | zookeeper: 70 | image: wurstmeister/zookeeper 71 | ports: 72 | - "2181:2181" 73 | networks: 74 | - elk 75 | 76 | kafka: 77 | image: wurstmeister/kafka:2.12-2.1.0 78 | ports: 79 | - "9092:9092" 80 | environment: 81 | KAFKA_ADVERTISED_HOST_NAME: kafka 82 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 83 | KAFKA_HEAP_OPTS: "-Xmx256M -Xms256M" 84 | volumes: 85 | - /var/run/docker.sock:/var/run/docker.sock 86 | depends_on: 87 | - zookeeper 88 | networks: 89 | - elk 90 | 91 | kafdrop: 92 | image: obsidiandynamics/kafdrop 93 | ports: 94 | - "9090:9000" 95 | environment: 96 | KAFKA_BROKERCONNECT: kafka:9092 97 | JVM_OPTS: "-Xms64M -Xmx64M" 98 | SERVER_SERVLET_CONTEXTPATH: "/" 99 | depends_on: 100 | - kafka 101 | networks: 102 | - elk 103 | 104 | nginx: 105 | image: nginx:1.18 106 | ports: 107 | - "8000:80" 108 | volumes: 109 | - "./nginx/nginx.conf:/etc/nginx/nginx.conf:ro" 110 | - "./nginx/html:/usr/share/nginx/html:ro" 111 | - type: volume 112 | source: nginx-log 113 | target: /var/log/nginx 114 | 115 | filebeat: 116 | build: 117 | context: filebeat/ 118 | args: 119 | ELK_VERSION: $ELK_VERSION 120 | volumes: 121 | - "./filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro" 122 | - type: volume 123 | source: nginx-log 124 | target: /var/log 125 | depends_on: 126 | - nginx 127 | networks: 128 | - elk 129 | 130 | networks: 131 | elk: 132 | driver: bridge 133 | 134 | volumes: 135 | elasticsearch: 136 | nginx-log: 137 | -------------------------------------------------------------------------------- /docker-compose/elasticsearch/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION} 5 | 6 | # Add your elasticsearch plugins setup here 7 | # Example: RUN elasticsearch-plugin install analysis-icu 8 | -------------------------------------------------------------------------------- /docker-compose/elasticsearch/config/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Elasticsearch configuration from Elasticsearch base image. 3 | ## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml 4 | # 5 | cluster.name: "docker-cluster" 6 | network.host: 0.0.0.0 7 | 8 | ## X-Pack settings 9 | ## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html 10 | # 11 | xpack.license.self_generated.type: trial 12 | xpack.security.enabled: false 13 | xpack.monitoring.collection.enabled: true 14 | -------------------------------------------------------------------------------- /docker-compose/filebeat/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/beats/filebeat:${ELK_VERSION} 5 | -------------------------------------------------------------------------------- /docker-compose/filebeat/filebeat.yml: -------------------------------------------------------------------------------- 1 | filebeat.inputs: 2 | - type: log 3 | paths: 4 | - /var/log/*access.log 5 | encoding: utf-8 6 | fields: 7 | topic: nginx-log 8 | scan_frequency: 1s 9 | harvester_buffer_size: 16384 10 | tail_files: false 11 | close_eof: true 12 | clean_removed: true 13 | 14 | processors: 15 | - drop_fields: 16 | fields: ["ecs", "input", "host", "agent", "log.offset"] 17 | ignore_missing: true 18 | 19 | #------------------------------- Kafka output --------------------------------- 20 | output.kafka: 21 | enabled: true 22 | hosts: ["kafka:9092"] 23 | topic: elk-%{[fields.topic]} 24 | worker: 2 25 | keep_alive: 60 26 | required_acks: 1 27 | -------------------------------------------------------------------------------- /docker-compose/kibana/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG ELK_VERSION 2 | 3 | # https://www.docker.elastic.co/ 4 | FROM docker.elastic.co/kibana/kibana:${ELK_VERSION} 5 | 6 | # Add your kibana plugins setup here 7 | # Example: RUN kibana-plugin install