├── .gitignore
├── FUNDING.yml
├── LICENSE
├── PULLREQUESTS.md
├── README.md
├── ansible
├── local_build.yml
├── roles
│ ├── aws-cli
│ │ ├── defaults
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── azure-cli
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── meta
│ │ │ └── main.yml
│ │ ├── tasks
│ │ │ └── main.yml
│ │ └── templates
│ │ │ └── azure-cli.sources.j2
│ ├── cerebro
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── domain_stats
│ │ ├── files
│ │ │ ├── domain_stats.yaml
│ │ │ └── gunicorn_config.py
│ │ └── tasks
│ │ │ └── main.yml
│ ├── elastalert
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── files
│ │ │ └── elastalert_config.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── elastic-apt-source
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── tasks
│ │ │ └── main.yml
│ │ └── templates
│ │ │ └── elastic-8.x.list.j2
│ ├── elasticsearch
│ │ ├── files
│ │ │ ├── elasticsearch.yml
│ │ │ └── systemd_override.conf
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── filebeat
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── gcp-cli
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── google-cloud-apt-source
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── tasks
│ │ │ └── main.yml
│ │ └── templates
│ │ │ └── google-cloud-sdk.list.j2
│ ├── kibana
│ │ ├── files
│ │ │ └── kibana.yml
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── logstash
│ │ ├── defaults
│ │ │ └── main.yaml
│ │ ├── files
│ │ │ └── logstash.yml
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── microsoft-apt-source
│ │ ├── defaults
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── powershell
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ ├── amd64.yml
│ │ │ ├── arm64.yml
│ │ │ └── main.yml
│ ├── sof-elk_base
│ │ ├── defaults
│ │ │ └── main.yml
│ │ ├── files
│ │ │ ├── GeoIP.conf.default
│ │ │ ├── no-cache.conf
│ │ │ ├── sof-elk_post-merge.sh
│ │ │ ├── sof-elk_sysctl.conf
│ │ │ ├── sshd-keygen.service
│ │ │ └── workbook-redirect.conf
│ │ ├── tasks
│ │ │ ├── for509.yml
│ │ │ └── main.yml
│ │ ├── templates
│ │ │ ├── GeoIP.conf.j2
│ │ │ ├── issue.prep.j2
│ │ │ ├── user_ssh_config.j2
│ │ │ └── user_ssh_known_hosts.j2
│ │ └── vars
│ │ │ └── for509.yml
│ └── sof-elk_finalize
│ │ └── tasks
│ │ └── main.yml
└── sof-elk_single_vm.yml
├── configfiles-UNSUPPORTED
├── 0003-input-windows_json.conf
├── 0005-input-suricata.conf
├── 0035-input-alexa.conf
├── 0300-input-httpdlog.conf
├── 1026-preprocess-dhcp.conf
├── 1029-preprocess-esxi.conf
├── 1030-preprocess-greensql.conf
├── 1031-preprocess-iis.conf
├── 1032-preprocess-mcafee.conf
├── 1033-preprocess-snort.conf
├── 1035-preprocess-alexa.conf
├── 2060-sflow.conf
├── 6020-custom_wordpress.conf
├── 6021-dovecot.conf
├── 6022-courier.conf
├── 6023-sendmail.conf
├── 6024-xinetd.conf
├── 6025-crond.conf
├── 6026-milter-greylist.conf
├── 6027-spamassassin.conf
├── 6028-fail2ban.conf
├── 6029-sudo.conf
├── 6030-ansible.conf
├── 6031-yum.conf
├── 6101-switch_brocade.conf
├── 6200-firewall_fortinet.conf
├── 6300-windows.conf
├── 6301-dns_windows.conf
├── 6302-android.conf
├── 6400-suricata.conf
├── 6502-kape_filefolderaccess.conf
├── 8002-postprocess-tags.conf
├── 8007-postprocess-dns_alexa_tagging.conf
├── 8502-postprocess-freq_analysis_zeek_dns.conf
├── 8503-postprocess-freq_analysis_zeek_http.conf
├── 8504-postprocess-freq_analysis_zeek_ssl.conf
├── 8505-postprocess-freq_analysis_zeek_x509.conf
├── 8900-postprocess-ip_tagging.conf
├── 9031-output-iis.conf
├── 9901-output-alexa.conf
├── 9999-output-alerts.conf
├── alexa_readme.txt
├── broker
│ ├── 0997-input-broker_kafka.conf
│ ├── 0998-input-broker_rabbitmq.conf
│ ├── 9997-output-broker_kafka.conf
│ └── 9998-output-broker_rabbitmq.conf
├── prereq_readme.txt
└── sflow
│ ├── 0004-input-pipe_sflow.conf
│ ├── sflowtool_init_script.txt
│ └── sflowtool_readme.txt
├── configfiles-templates
├── 04xx-input-google_pubsub.conf.sample
└── 6xxx-parsing_template.conf.sample
├── configfiles
├── 0000-input-beats.conf
├── 0000-input-stdin.conf
├── 0004-input-syslog.conf
├── 1000-preprocess-all.conf
├── 1001-preprocess-json.conf
├── 1002-preprocess-xml.conf
├── 1010-preprocess-snare.conf
├── 1100-preprocess-syslog.conf
├── 1200-preprocess-zeek.conf
├── 1601-preprocess-plaso.conf
├── 1701-preprocess-microsoft365.conf
├── 1801-preprocess-azure.conf
├── 1950-preprocess-gcp.conf
├── 1952-preprocess-kubernetes.conf
├── 2050-netflow.conf
├── 2051-zeek_conn-netflow.conf
├── 6010-snare.conf
├── 6012-dhcpd.conf
├── 6013-bindquery.conf
├── 6014-passivedns.conf
├── 6015-sshd.conf
├── 6016-pam.conf
├── 6017-iptables.conf
├── 6018-cisco_asa.conf
├── 6050-sysmon.conf
├── 6100-httpd.conf
├── 6200-zeek_dns.conf
├── 6201-zeek_http.conf
├── 6202-zeek_files.conf
├── 6203-zeek_ssl.conf
├── 6204-zeek_x509.conf
├── 6205-zeek_ftp.conf
├── 6275-zeek_notice.conf
├── 6276-zeek_weird.conf
├── 6501-kape_mftecmd.conf
├── 6503-kape_lecmd.conf
├── 6504-kape_evtxecmd.conf
├── 6601-plaso.conf
├── 6701-microsoft365.conf
├── 6801-azure.conf
├── 6901-aws.conf
├── 6950-gcp.conf
├── 6951-gws.conf
├── 6952-kubernetes.conf
├── 8000-postprocess-zeek.conf
├── 8006-postprocess-dns.conf
├── 8050-postprocess-ip_addresses.conf
├── 8051-postprocess-ports.conf
├── 8052-postprocess-community-id.conf
├── 8053-postprocess-mac.conf
├── 8054-postprocess-netflow.conf
├── 8060-postprocess-useragent.conf
├── 8100-postprocess-syslog.conf
├── 8110-postprocess-httpd.conf
├── 8599-postprocess-kape.conf
├── 8701-postprocess-microsoft365.conf
├── 8951-postprocess-gws.conf
├── 8952-postprocess-kubernetes.conf
├── 8999-postprocess-all.conf
├── 9000-output-elastic-consolidated.conf
└── 9999-output-stdout.conf
├── doc
└── ecs_fields.csv
├── grok-patterns
├── archive-netflow
├── cisco_asa
└── sof-elk_custom
├── kibana
├── dashboard
│ ├── 11f462e0-8d11-11e9-a03c-33ad7fc2c4d0.json
│ ├── 14142080-d1c4-11ea-8c51-e9420681eaa9.json
│ ├── 52e0ec80-7a77-11e9-92d0-dfa499d9fb15.json
│ ├── 6d272a20-7319-11e8-9f32-cf7527ac183d.json
│ ├── 8e8ec8e0-d27c-11e8-b129-a1f6f7111a75.json
│ ├── 99d1b510-72b3-11e8-9159-894bd7d62352.json
│ ├── ac6e6490-3021-11e8-9faf-f77fbc81a50d.json
│ ├── ba531340-2f17-11e8-9faf-f77fbc81a50d.json
│ ├── db653a10-286e-11eb-bf98-fb32763e0d19.json
│ └── f8c3cbb0-51c1-11e9-8b54-fde301eff14e.json
├── data_views
│ ├── all-records.json
│ ├── aws.json
│ ├── azure.json
│ ├── evtxlogs.json
│ ├── filesystem.json
│ ├── gcp.json
│ ├── gws.json
│ ├── httpdlog.json
│ ├── kape.json
│ ├── kubernetes.json
│ ├── lnkfiles.json
│ ├── logstash.json
│ ├── microsoft365.json
│ ├── netflow.json
│ ├── plaso.json
│ └── zeek.json
├── lens
│ ├── 02a030a4-0331-41c1-af27-670ffc546b86.json
│ ├── 0a8caf55-ebaa-4f11-8814-c374826fbc16.json
│ ├── 0f655fb0-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── 138278b4-4534-4aa2-a1db-41e37631a092.json
│ ├── 13b187af-33b2-48c5-bf0b-9a73926a9e32.json
│ ├── 1609e040-f2c5-4034-8d2f-306559dff160.json
│ ├── 171a55d0-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── 1eb0c770-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── 21a9bd2b-bb80-43aa-be16-04731080a925.json
│ ├── 2692cab2-0827-4109-87fc-b818de74d383.json
│ ├── 26b7f5ab-1a85-40e3-bfd3-c3997797276a.json
│ ├── 2cc7b8a8-248b-44d5-9840-1d3558eec280.json
│ ├── 32f2efb0-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── 43ff4c20-4e61-48c3-a21c-5bcbd83cbcfe.json
│ ├── 45a5fedc-2123-4ea6-90fd-7901f8bae75c.json
│ ├── 4b691cc6-ba10-4bbb-9f4e-dc432d194337.json
│ ├── 4cd59887-d159-45a4-8bf9-8ddc64e16ddb.json
│ ├── 4e9d18c6-5cba-4c0e-ade6-175c225e3db9.json
│ ├── 519fd14a-7f5c-4b8c-85e0-3c83940f7caa.json
│ ├── 5666fb7c-3000-43d7-81c5-fad5d982410f.json
│ ├── 59f54d4e-c7c3-4f50-94a1-9539dbce8f60.json
│ ├── 5fe6a5c5-9ab4-4b06-8363-7fe6890d4b79.json
│ ├── 612ef7a6-e5ed-4b2d-a5db-d9f8cfaeffeb.json
│ ├── 6331b5ef-b00b-4273-8def-be5e35b004d1.json
│ ├── 688eaaa8-4fd4-429c-8dc5-dfc41244e68c.json
│ ├── 6fd03332-b9a9-4974-928c-6ed8d0b20e1f.json
│ ├── 76a5fff1-bf5c-4b58-9c17-69ba58a989c7.json
│ ├── 7e0fa019-0905-46a4-9c8d-a3958f0a3d5c.json
│ ├── 7fdb47ea-c342-4b34-89c3-8ee38eb314e0.json
│ ├── 837e0ef9-e427-46e3-b0e9-04fcf2e0a46e.json
│ ├── 87f42dd5-44ab-47b0-8815-9cfe1d9cdffd.json
│ ├── 8b0f3dc0-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── 933f2712-8ef9-4b6c-9e31-eb5a1234f225.json
│ ├── 9910b561-204c-4501-a47d-c84d1171084b.json
│ ├── 9a153810-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── a4c03cb0-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── a6659177-7b81-4b45-9139-aeaa5ecfe2bf.json
│ ├── a7906280-a0e0-11ee-a0ee-f991cb8cedce.json
│ ├── ad7b4a77-3c3d-46d9-be87-3c0f2db68f38.json
│ ├── ae15c370-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── b7a67e70-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── b9fc4420-a0e0-11ee-a0ee-f991cb8cedce.json
│ ├── c385bd50-a04a-11ee-a0ee-f991cb8cedce.json
│ ├── c59132eb-c58f-4a91-b394-f62b7ec92b40.json
│ ├── d79fda00-a0e0-11ee-a0ee-f991cb8cedce.json
│ ├── dc934d6c-c235-4e41-a73e-0325e2f169f0.json
│ ├── e1683eea-191a-4b6a-bf27-cd96595ffae0.json
│ ├── e3e7b3e3-ac26-4a8c-be6b-6a3bdf97b1f5.json
│ ├── f1f74ee3-49d6-4bab-b5df-72e60576ef39.json
│ ├── fe2e44b4-a5c4-41e8-97c0-c64c716d8d67.json
│ └── fee39b8e-903d-400f-bdb4-9ec91dc86814.json
├── map
│ ├── 222d6430-76f4-11eb-9d54-294249e0048f.json
│ ├── 4c447f20-76f3-11eb-9d54-294249e0048f.json
│ ├── 5ee98b15-398b-4ae4-af73-7b74cc8c2c17.json
│ ├── 6d844e90-76f3-11eb-9d54-294249e0048f.json
│ ├── 7d14c280-76db-11eb-9d54-294249e0048f.json
│ ├── bf84499d-9bdf-4823-a416-bcc3661d5c9b.json
│ ├── ec7313e0-76f2-11eb-9d54-294249e0048f.json
│ └── f2425180-76db-11eb-9d54-294249e0048f.json
├── search
│ ├── 80bd16f0-51bf-11e9-8b54-fde301eff14e.json
│ ├── 8d421a80-7a76-11e9-92d0-dfa499d9fb15.json
│ ├── 9744c560-2f17-11e8-9faf-f77fbc81a50d.json
│ ├── c80386c0-286e-11eb-bf98-fb32763e0d19.json
│ ├── e45686d0-3021-11e8-9faf-f77fbc81a50d.json
│ ├── eb2ab450-8d08-11e9-a03c-33ad7fc2c4d0.json
│ └── fa3332b0-72b0-11e8-9159-894bd7d62352.json
├── sof-elk_config.json
└── visualization
│ ├── 029f7a20-e6f6-11e9-a3ea-d7d01b053a51.json
│ ├── 18bf98f0-2870-11eb-bf98-fb32763e0d19.json
│ ├── 4061ca00-2874-11eb-bf98-fb32763e0d19.json
│ ├── 98309f20-8873-11e8-b4c2-17db52b8990d.json
│ ├── afda9520-2873-11eb-bf98-fb32763e0d19.json
│ └── bdecdc70-731e-11e8-9f32-cf7527ac183d.json
├── lib
├── configfiles
│ └── filebeat.yml
├── dictionaries
│ ├── dns_type_code2name.yaml
│ ├── eventid2desc.yaml
│ ├── ip_proto_int2name.yaml
│ ├── ip_proto_name2int.yaml
│ ├── ip_proto_zeek2int.yaml
│ ├── makeyaml.py
│ ├── port_proto_int2iana.yaml
│ ├── service_int2iana.yaml
│ ├── service_svc2listtype.yaml
│ ├── syslog_facility2int.yaml
│ ├── syslog_severity2int.yaml
│ ├── zeek_connection_state2desc.yaml
│ └── zeek_dns_class2classabbrev.yaml
├── elastalert_rules
│ └── README.txt
├── elasticsearch_templates
│ ├── component_templates
│ │ ├── component-mappings-corecommon.json
│ │ ├── component-mappings-final.json
│ │ ├── component-mappings-geohash.json
│ │ ├── component-mappings-guid.json
│ │ ├── component-mappings-ip_address.json
│ │ ├── component-mappings-port.json
│ │ ├── component-mappings-string.json
│ │ ├── component-mappings-zeekuid.json
│ │ └── component-settings-common.json
│ └── index_templates
│ │ ├── 000_index-example.json.sample
│ │ ├── index-.kibana.json
│ │ ├── index-aws.json
│ │ ├── index-azure.json
│ │ ├── index-evtxlogs.json
│ │ ├── index-filesystem.json
│ │ ├── index-gcp.json
│ │ ├── index-gws.json
│ │ ├── index-httpdlog.json
│ │ ├── index-kubernetes.json
│ │ ├── index-lnkfiles.json
│ │ ├── index-logstash.json
│ │ ├── index-microsoft365.json
│ │ ├── index-netflow.json
│ │ ├── index-plaso.json
│ │ └── index-zeek.json
├── filebeat_inputs
│ ├── aws.yml
│ ├── azure.yml
│ ├── filebeat_template.yml.sample
│ ├── gcp.yml
│ ├── gws.yml
│ ├── httpdlog.yml
│ ├── kape.yml
│ ├── kubernetes.yml
│ ├── microsoft365.yml
│ ├── netflow.yml
│ ├── passivedns.yml
│ ├── plaso.yml
│ ├── syslog.yml
│ └── zeek.yml
├── sof-elk.svg
├── sof-elk_logo_sm.png
└── systemd
│ ├── domain-stats.service
│ ├── domain_stats.service
│ └── elastalert.service
└── supporting-scripts
├── admin
├── cloudtrail-reduce.sh
├── dump_all_indexes.sh
├── dump_dashboards.sh
└── dump_index_template.sh
├── aws-cloudtrail2sof-elk.py
├── aws-vpcflow2sof-elk.sh
├── azure-flow2sof-elk.py
├── change_keyboard.sh
├── community-id.rb
├── cronjobs
└── git-remote-update.cron
├── csv2json.py
├── data_flags_to_array.rb
├── dhcp-renew-login-screen.sh
├── distro_prep.sh
├── elk_user_login.sh
├── es_ls_commands.txt
├── es_plugin_update.sh
├── extra_blocks_to_array.rb
├── fw_modify.sh
├── geoip_bootstrap
└── geoip_bootstrap.sh
├── geoip_update_logstash.sh
├── git-check-pull-needed.sh
├── git-remote-update.sh
├── hex_to_integer.rb
├── kick-aws-logs.sh
├── load_all_dashboards.sh
├── ls_plugin_update.sh
├── nfdump2sof-elk.sh
├── ntfs_flags_to_array.rb
├── post_merge.sh
├── sof-elk_clear.py
├── sof-elk_freeze.py
├── sof-elk_update.sh
├── split_gcp_authinfo_fields.rb
├── split_kv_multi_to_fields.rb
├── split_kv_to_fields.rb
├── tcp_flags_to_array.rb
├── vm-update-check.sh
└── wait_for_es.sh
/.gitignore:
--------------------------------------------------------------------------------
1 | samples/
2 |
--------------------------------------------------------------------------------
/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: [philhagen]
2 |
--------------------------------------------------------------------------------
/ansible/local_build.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # This inventory file is used to create a single-instance SOF-ELK(R) system
3 | # (C)2025 Lewes Technology Consulting, LLC
4 |
5 | all:
6 | children:
7 | ungrouped:
8 | hosts:
9 | 127.0.0.1:
10 | ansible_connection: local
11 |
--------------------------------------------------------------------------------
/ansible/roles/aws-cli/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | aws_cli_dependencies:
6 | - python3-boto3
7 |
--------------------------------------------------------------------------------
/ansible/roles/aws-cli/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | # adapted from https://github.com/xchangebit/ansible-aws-cli
6 | - name: Install AWS CLI dependencies
7 | ansible.builtin.apt:
8 | name: '{{ aws_cli_dependencies }}'
9 | state: present
10 | tags: aws-cli
11 |
12 | - name: Check to see if aws binary exists
13 | ansible.builtin.stat:
14 | path: /usr/local/bin/aws
15 | register: aws_binary
16 | tags: aws-cli
17 |
18 | - name: Download the awscli bundle
19 | ansible.builtin.get_url:
20 | url: https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip
21 | dest: /tmp/awscli-bundle.zip
22 | force: yes
23 | when: not aws_binary.stat.exists
24 | register: aws_cli_download_bundle
25 | tags: aws-cli
26 |
27 | - name: Unarchive the installer
28 | ansible.builtin.unarchive:
29 | src: /tmp/awscli-bundle.zip
30 | dest: /tmp
31 | copy: false
32 | creates: /tmp/aws
33 | when: aws_cli_download_bundle.changed
34 | register: aws_cli_unarchive_installer
35 | tags: aws-cli
36 |
37 | - name: Install awscli package
38 | ansible.builtin.shell: /tmp/aws/install
39 | args:
40 | creates: /usr/bin/aws
41 | when: aws_cli_unarchive_installer.changed
42 | tags: aws-cli
43 |
44 | - name: Remove the installer
45 | ansible.builtin.file:
46 | path: '{{ item }}'
47 | state: absent
48 | with_items:
49 | - /tmp/aws
50 | - /tmp/awscli-bundle.zip
51 | when: aws_cli_download_bundle.changed or aws_cli_unarchive_installer.changed
52 | tags: aws-cli
53 |
--------------------------------------------------------------------------------
/ansible/roles/azure-cli/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | azure_cli_dependencies:
6 | - python3-azure-storage
7 |
--------------------------------------------------------------------------------
/ansible/roles/azure-cli/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: microsoft-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/azure-cli/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Azure CLI dependencies
6 | ansible.builtin.apt:
7 | name: '{{ azure_cli_dependencies }}'
8 | state: present
9 | tags: azure-cli
10 |
11 | - name: Install Microsoft Azure source definition file
12 | ansible.builtin.template:
13 | src: azure-cli.sources.j2
14 | dest: /etc/apt/sources.list.d/azure-cli.sources
15 | mode: 0644
16 | register: azure_source
17 | tags: azure-cli
18 |
19 | - name: Update apt sources
20 | ansible.builtin.command: apt-get update
21 | when: azure_source.changed
22 | tags: azure-cli
23 |
24 | - name: Install Azure packages
25 | ansible.builtin.apt:
26 | name: azure-cli
27 | state: present
28 | tags: azure-cli
29 |
--------------------------------------------------------------------------------
/ansible/roles/azure-cli/templates/azure-cli.sources.j2:
--------------------------------------------------------------------------------
1 | Types: deb
2 | URIs: https://packages.microsoft.com/repos/azure-cli/
3 | Suites: noble
4 | Components: main
5 | Architectures: {{ ubuntu_architecture }}
6 | Signed-by: /usr/share/keyrings/microsoft-prod.gpg
7 |
--------------------------------------------------------------------------------
/ansible/roles/cerebro/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | cerebro_rpm_url: 'https://github.com/lmenezes/cerebro/releases/download/v0.8.3/cerebro-0.8.3-1.noarch.rpm'
6 |
--------------------------------------------------------------------------------
/ansible/roles/cerebro/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: restart cerebro
6 | ansible.builtin.systemd_service:
7 | name: cerebro
8 | state: restarted
9 |
10 | - name: reload firewalld
11 | ansible.builtin.systemd_service:
12 | name: firewalld
13 | state: reloaded
14 |
--------------------------------------------------------------------------------
/ansible/roles/cerebro/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Cerebro RPM
6 | yum:
7 | name: '{{ cerebro_rpm_url }}'
8 | state: present
9 | tags: sof-elk_cerebro
10 |
11 | - name: Set Cerebro directory permissions
12 | file:
13 | dest: '/usr/share/cerebro'
14 | recurse: true
15 | owner: cerebro
16 | group: cerebro
17 | tags: sof-elk_cerebro
18 |
19 | - name: Open firewall ports
20 | firewalld:
21 | port: 9000/tcp
22 | permanent: true
23 | state: enabled
24 | notify: reload firewalld
25 | tags: sof-elk_cerebro
26 |
27 | - name: Enable and start Cerebro service
28 | ansible.builtin.systemd_service:
29 | name: cerebro
30 | enabled: true
31 | state: started
32 | tags: sof-elk_cerebro
33 |
--------------------------------------------------------------------------------
/ansible/roles/domain_stats/files/domain_stats.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | cache_browse_limit: 100
6 | count_rdap_errors: false
7 | enable_freq_scores: true
8 | established_days_age: 730
9 | freq_avg_alert: 5.0
10 | freq_table: freqtable2018.freq
11 | freq_word_alert: 4.0
12 | ip_address: 127.0.0.1
13 | local_port: 5730
14 | log_detail: 0
15 | mode: rdap
16 | rdap_error_ttl_days: 7
17 | threads_per_worker: 12
18 | timezone_offset: 0
19 | workers: 9
--------------------------------------------------------------------------------
/ansible/roles/domain_stats/files/gunicorn_config.py:
--------------------------------------------------------------------------------
1 | import gunicorn
2 | import multiprocessing
3 | import os
4 |
5 | os.environ["SERVER_SOFTWARE"] = "domain_stats"
6 | bind = "127.0.0.1:5730"
7 | workers = 9
8 | threads = 12
9 | gunicorn.SERVER_SOFTWARE = "domain_stats"
10 |
--------------------------------------------------------------------------------
/ansible/roles/domain_stats/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install domain-stats pip dependencies
6 | pip:
7 | name: python-whois
8 | executable: pip3
9 | state: present
10 | tags: domainstats
11 |
12 | - name: Install domain-stats
13 | pip:
14 | name: domain-stats
15 | executable: pip3
16 | state: present
17 | tags: domainstats
18 |
19 | - name: Install domain-stats configuration
20 | copy:
21 | src: '{{ item }}'
22 | dest: /usr/local/share/domain-stats/
23 | owner: root
24 | group: root
25 | mode: 0640
26 | with_items:
27 | - 'domain_stats.yaml'
28 | - 'gunicorn_config.py'
29 | tags: domainstats
30 |
31 | - name: Copy freq table to operational location
32 | copy:
33 | remote_src: true
34 | src: /usr/local/lib/python3.6/site-packages/domain_stats/data/freqtable2018.freq
35 | dest: /usr/local/share/domain-stats/
36 | tags: domainstats
37 |
38 | - name: Install Top 1M records
39 | shell: domain-stats-utils -i /usr/local/lib/python3.6/site-packages/domain_stats/data/top1m.import -nx /usr/local/share/domain-stats/
40 |
41 | - name: Enable domain-stats service
42 | ansible.builtin.systemd_service:
43 | name: /usr/local/sof-elk/lib/systemd/domain-stats.service
44 | enabled: true
45 | tags: domainstats
46 |
47 | - name: Start domain-stats service
48 | ansible.builtin.systemd_service:
49 | name: domain-stats
50 | state: started
51 | tags: domainstats
52 |
--------------------------------------------------------------------------------
/ansible/roles/elastalert/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | elastalert_version: '0.2.4'
6 | elastalert_pip_modules:
7 | - setuptools
8 | - texttable
9 | - thehive4py
10 | - elastalert==0.2.4
11 |
--------------------------------------------------------------------------------
/ansible/roles/elastalert/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Elastalert rpm dependencies
6 | yum:
7 | name: python3-devel
8 | state: present
9 | tags: elastalert
10 |
11 | - name: Create elastalert user
12 | user:
13 | name: 'elastalert'
14 | system: true
15 | state: present
16 | tags: elastalert
17 |
18 | - name: Upgrade pip
19 | pip:
20 | name: pip
21 | executable: pip3
22 | state: latest
23 | tags: elastalert
24 |
25 | - name: Install Elastalert and pip dependencies
26 | pip:
27 | name: '{{ elastalert_pip_modules }}'
28 | executable: pip3
29 | state: present
30 | tags: elastalert
31 |
32 | - name: Configure Elastalert
33 | copy:
34 | src: 'elastalert_config.yml'
35 | dest: '/etc/sysconfig/elastalert_config.yml'
36 | owner: root
37 | group: root
38 | mode: 0644
39 | tags: elastalert
40 |
41 | - name: Create Elastalert index
42 | command: 'elastalert-create-index --host 127.0.0.1 --port 9200 --no-ssl --no-auth --url-prefix "" --index "elastalert_status" --old-index "" --config /etc/sysconfig/elastalert_config.yml'
43 | tags: elastalert
44 |
45 | - name: Enable Elastalert service
46 | ansible.builtin.systemd_service:
47 | name: /usr/local/sof-elk/lib/systemd/elastalert.service
48 | enabled: true
49 | tags: elastalert
50 |
51 | - name: Start Elastalert service
52 | ansible.builtin.systemd_service:
53 | name: elastalert
54 | state: started
55 | tags: elastalert
56 |
--------------------------------------------------------------------------------
/ansible/roles/elastic-apt-source/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | elastic_gpg_keyring: /usr/share/keyrings/elastic-keyring.gpg
6 | elastic_temp_armored_keyring: /tmp/elastic-keyring.asc
--------------------------------------------------------------------------------
/ansible/roles/elastic-apt-source/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Determine if gpg keyring already exists
6 | stat:
7 | path: "{{ elastic_gpg_keyring }}"
8 | register: es_gpg_keyring_file
9 | tags:
10 | - elasticsearch
11 | - logstash
12 | - kibana
13 | - filebeat
14 |
15 | - name: Download armored Elastic GPG key
16 | ansible.builtin.get_url:
17 | url: https://artifacts.elastic.co/GPG-KEY-elasticsearch
18 | dest: "{{ elastic_temp_armored_keyring }}"
19 | mode: 0644
20 | when: not es_gpg_keyring_file.stat.exists
21 | register: elastic_armored_gpg_key_downloaded
22 | tags:
23 | - elasticsearch
24 | - logstash
25 | - kibana
26 | - filebeat
27 |
28 | - name: Convert Elastic GPG key to binary form
29 | ansible.builtin.command: gpg --dearmor -o {{ elastic_gpg_keyring }} {{ elastic_temp_armored_keyring }}
30 | when: elastic_armored_gpg_key_downloaded.changed
31 | tags:
32 | - elasticsearch
33 | - logstash
34 | - kibana
35 | - filebeat
36 |
37 | - name: Remove armored Elastic GPG key
38 | ansible.builtin.file:
39 | path: "{{ elastic_temp_armored_keyring }}"
40 | state: absent
41 | when: elastic_armored_gpg_key_downloaded.changed
42 | tags:
43 | - elasticsearch
44 | - logstash
45 | - kibana
46 | - filebeat
47 |
48 | - name: Install Elastic source definition file
49 | ansible.builtin.template:
50 | src: elastic-8.x.list.j2
51 | dest: /etc/apt/sources.list.d/elastic-8.x.list
52 | owner: root
53 | group: root
54 | mode: 0644
55 | register: es_source
56 | tags:
57 | - elasticsearch
58 | - logstash
59 | - kibana
60 | - filebeat
61 |
62 | - name: Update apt sources
63 | ansible.builtin.command: apt-get update
64 | when: es_source.changed
65 | tags:
66 | - elasticsearch
67 | - logstash
68 | - kibana
69 | - filebeat
70 |
--------------------------------------------------------------------------------
/ansible/roles/elastic-apt-source/templates/elastic-8.x.list.j2:
--------------------------------------------------------------------------------
1 | deb [signed-by={{ elastic_gpg_keyring }}] https://artifacts.elastic.co/packages/8.x/apt stable main
2 |
--------------------------------------------------------------------------------
/ansible/roles/elasticsearch/files/systemd_override.conf:
--------------------------------------------------------------------------------
1 | [Service]
2 | LimitMEMLOCK=infinity
3 | TimeoutStartSec=900
--------------------------------------------------------------------------------
/ansible/roles/elasticsearch/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: elastic-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/elasticsearch/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Elasticsearch
6 | ansible.builtin.apt:
7 | name: elasticsearch={{ elastic_stack_version }}
8 | state: present
9 | tags: elasticsearch
10 |
11 | - name: Set permissions on Elasticsearch data directory
12 | ansible.builtin.file:
13 | path: /var/lib/elasticsearch
14 | state: directory
15 | mode: 0755
16 | owner: elasticsearch
17 | group: elasticsearch
18 | tags: elasticsearch
19 |
20 | - name: Create Elasticsearch systemd override directory
21 | ansible.builtin.file:
22 | dest: '/etc/systemd/system/elasticsearch.service.d'
23 | state: directory
24 | mode: 0755
25 | owner: root
26 | group: root
27 | tags: elasticsearch
28 |
29 | - name: Override Elasticsearch systemd limits
30 | ansible.builtin.copy:
31 | src: 'systemd_override.conf'
32 | dest: '/etc/systemd/system/elasticsearch.service.d/override.conf'
33 | owner: root
34 | group: root
35 | mode: 0644
36 | register: elasticsearch_systemd_config
37 | tags: elasticsearch
38 |
39 | - name: Configure Elasticsearch
40 | ansible.builtin.copy:
41 | src: 'elasticsearch.yml'
42 | dest: '/etc/elasticsearch/elasticsearch.yml'
43 | owner: root
44 | group: elasticsearch
45 | mode: 0660
46 | register: elasticsearch_config
47 | tags: elasticsearch
48 |
49 | - name: Start and Enable Elasticsearch service
50 | ansible.builtin.systemd_service:
51 | daemon_reload: true
52 | name: elasticsearch
53 | enabled: true
54 | state: started
55 | register: enablestart_elasticsearch
56 | tags: elasticsearch
57 |
58 | - name: Restart Elasticsearch
59 | ansible.builtin.systemd_service:
60 | name: elasticsearch
61 | state: restarted
62 | when: (not enablestart_elasticsearch) and (elasticsearch_systemd_config.changed or elasticsearch-config.changed)
63 | tags: elasticsearch
64 |
--------------------------------------------------------------------------------
/ansible/roles/filebeat/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: elastic-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/filebeat/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Filebeat
6 | ansible.builtin.apt:
7 | name: filebeat={{ elastic_stack_version }}
8 | state: present
9 | tags: filebeat
10 |
11 | - name: Configure filebeat
12 | ansible.builtin.file:
13 | src: '/usr/local/sof-elk/lib/configfiles/filebeat.yml'
14 | dest: '/etc/filebeat/filebeat.yml'
15 | state: link
16 | force: true
17 | register: filebeat_config
18 | tags: filebeat
19 |
20 | - name: Create filebeat input base directory
21 | ansible.builtin.file:
22 | dest: '/logstash'
23 | state: directory
24 | owner: root
25 | group: root
26 | mode: 0755
27 |
28 | - name: Create filebeat input directories
29 | ansible.builtin.file:
30 | dest: '/logstash/{{ item }}'
31 | state: directory
32 | owner: root
33 | group: root
34 | mode: 01777
35 | with_items:
36 | - syslog
37 | - nfarch
38 | - httpd
39 | - passivedns
40 | - zeek
41 | - kape
42 | - plaso
43 | - microsoft365
44 | - azure
45 | - aws
46 | - gcp
47 | - gws
48 | - kubernetes
49 | register: filebeat_input_directories
50 | tags: filebeat
51 |
52 | - name: Start and enable Filebeat service
53 | ansible.builtin.systemd_service:
54 | name: filebeat
55 | enabled: true
56 | state: started
57 | register: enablestart_filebeat
58 | tags: filebeat
59 |
60 | - name: Restart Filebeat
61 | ansible.builtin.systemd_service:
62 | daemon_reload: true
63 | name: filebeat
64 | state: restarted
65 | when: (not enablestart_filebeat) and (filebeat_config.changed or filebeat_input_directories.changed)
66 | tags: filebeat
67 |
--------------------------------------------------------------------------------
/ansible/roles/gcp-cli/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | # this is needed to avoid a python version dependency problem
6 | gcp_version: 502.0.0-0
7 |
--------------------------------------------------------------------------------
/ansible/roles/gcp-cli/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: google-cloud-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/gcp-cli/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Google Cloud packages
6 | ansible.builtin.apt:
7 | name: google-cloud-cli={{ gcp_version }}
8 | state: present
9 | tags: gcp-cli
10 |
--------------------------------------------------------------------------------
/ansible/roles/google-cloud-apt-source/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | googlecloud_gpg_keyring: /usr/share/keyrings/gcp-keyring.gpg
6 | googlecloud_temp_armored_keyring: /tmp/gcp-keyring.asc
7 |
--------------------------------------------------------------------------------
/ansible/roles/google-cloud-apt-source/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Determine if Google Cloud gpg keyring already exists
6 | stat:
7 | path: "{{ googlecloud_gpg_keyring }}"
8 | register: googlecloud_gpg_keyring_file
9 | tags: gcp-cli
10 |
11 | - name: Download armored Google Cloud GPG key
12 | ansible.builtin.get_url:
13 | url: https://packages.cloud.google.com/apt/doc/apt-key.gpg
14 | dest: "{{ googlecloud_temp_armored_keyring }}"
15 | mode: 0644
16 | when: not googlecloud_gpg_keyring_file.stat.exists
17 | register: armored_googlecloud_gpg_key_downloaded
18 | tags: gcp-cli
19 |
20 | - name: Convert Google Cloud GPG key to binary form
21 | ansible.builtin.command: gpg --dearmor -o {{ googlecloud_gpg_keyring }} {{ googlecloud_temp_armored_keyring }}
22 | when: armored_googlecloud_gpg_key_downloaded.changed
23 | tags: gcp-cli
24 |
25 | - name: Remove armored Google Cloud GPG key
26 | ansible.builtin.file:
27 | path: "{{ googlecloud_temp_armored_keyring }}"
28 | state: absent
29 | when: armored_googlecloud_gpg_key_downloaded.changed
30 | tags: gcp-cli
31 |
32 | - name: Install Google Cloud source definition file
33 | ansible.builtin.template:
34 | src: google-cloud-sdk.list.j2
35 | dest: /etc/apt/sources.list.d/google-cloud-sdk.list
36 | mode: 0644
37 | register: googlecloud_source
38 | tags: gcp-cli
39 |
40 | - name: Update apt sources
41 | ansible.builtin.command: apt-get update
42 | when: googlecloud_source.changed
43 | tags: gcp-cli
44 |
--------------------------------------------------------------------------------
/ansible/roles/google-cloud-apt-source/templates/google-cloud-sdk.list.j2:
--------------------------------------------------------------------------------
1 | deb [signed-by={{ gcp_gpg_keyring }}] https://packages.cloud.google.com/apt cloud-sdk main
--------------------------------------------------------------------------------
/ansible/roles/kibana/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: elastic-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/kibana/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Kibana
6 | apt:
7 | name: kibana={{ elastic_stack_version }}
8 | state: present
9 | tags: kibana
10 |
11 | - name: Configure Kibana
12 | ansible.builtin.copy:
13 | src: 'kibana.yml'
14 | dest: '/etc/kibana/kibana.yml'
15 | owner: root
16 | group: root
17 | mode: 0644
18 | register: kibana_config
19 | tags: kibana
20 |
21 | - name: Open firewall ports
22 | ansible.posix.firewalld:
23 | port: 5601/tcp
24 | permanent: true
25 | state: enabled
26 | register: firewall_port
27 | #when: not (ansible_system_vendor == 'Microsoft Corporation' and ansible_virtualization_type == 'VirtualPC')
28 | tags: kibana
29 |
30 | - name: Restart firewalld
31 | ansible.builtin.systemd_service:
32 | name: firewalld
33 | state: reloaded
34 | when: firewall_port.changed
35 | tags: kibana
36 |
37 | - name: Enable and start Kibana service
38 | ansible.builtin.systemd_service:
39 | daemon_reload: true
40 | name: kibana
41 | enabled: true
42 | state: started
43 | register: enablestart_kibana
44 | tags: kibana
45 |
46 | - name: Restart Kibana
47 | ansible.builtin.systemd_service:
48 | daemon_reload: true
49 | name: kibana
50 | state: restarted
51 | when: (not enablestart_kibana.changed) and (kibana_config.changed)
52 | tags: kibana
53 |
--------------------------------------------------------------------------------
/ansible/roles/logstash/defaults/main.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | logstash_plugins:
6 | - logstash-input-relp
7 | - logstash-input-google_pubsub
8 | - logstash-filter-tld
9 | - logstash-filter-rest
10 | - logstash-filter-json_encode
11 |
12 | gcp_gpg_keyring: /usr/share/keyrings/gcp-keyring.gpg
13 | gcp_armored_keyring: /tmp/gcp-keyring.asc
14 |
15 | # this is needed to avoid a python version dependency problem
16 | gcp_version: 502.0.0-0
17 |
--------------------------------------------------------------------------------
/ansible/roles/logstash/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: elastic-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/logstash/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install Logstash
6 | ansible.builtin.apt:
7 | name: logstash=1:{{ elastic_stack_version }}-1
8 | state: present
9 | tags: logstash
10 |
11 | - name: Install Logstash Plugins
12 | community.general.logstash_plugin:
13 | name: '{{ item }}'
14 | state: present
15 | with_items: '{{ logstash_plugins }}'
16 | register: logstash_plugin_installation
17 | tags: logstash
18 |
19 | - name: Set Logstash JVM options
20 | ansible.builtin.lineinfile:
21 | dest: '/etc/logstash/jvm.options'
22 | regexp: "^{{ item.parameter | regex_escape() }}"
23 | line: "{{ item.parameter }}{{ item.value }}"
24 | with_items:
25 | - { parameter: '-Xms', value: '750m' }
26 | - { parameter: '-Xss', value: '4m' }
27 | register: logstash_jvm_config
28 | tags: logstash
29 |
30 | - name: Get list of all configfiles
31 | ansible.builtin.find:
32 | paths: '/usr/local/sof-elk/configfiles/'
33 | file_type: file
34 | register: configfile_results
35 | tags: logstash
36 |
37 | - name: Set symlinks for pipeline configuration
38 | ansible.builtin.file:
39 | state: link
40 | src: '/usr/local/sof-elk/configfiles/{{ item.path | basename }}'
41 | dest: '/etc/logstash/conf.d/{{ item.path | basename }}'
42 | with_items:
43 | - '{{ configfile_results.files }}'
44 | register: logstash_config_symlinks
45 | tags: logstash
46 |
47 | - name: Configure Logstash
48 | ansible.builtin.copy:
49 | src: 'logstash.yml'
50 | dest: '/etc/logstash/logstash.yml'
51 | owner: root
52 | group: root
53 | mode: 0644
54 | register: logstash_config
55 | tags: logstash
56 |
57 | - name: Start and Enable Logstash service
58 | ansible.builtin.systemd_service:
59 | name: logstash
60 | enabled: true
61 | state: started
62 | register: enablestart_logstash
63 | tags: logstash
64 |
65 | - name: Restart Logstash
66 | ansible.builtin.systemd_service:
67 | name: logstash
68 | state: restarted
69 | when: (not enablestart_logstash) and (logstash_plugin_installation.changed or logstash_jvm_config.changed or logstash_config_symlinks.changed or logstash_config.changed)
70 | tags: logstash
71 |
--------------------------------------------------------------------------------
/ansible/roles/microsoft-apt-source/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | msprod_source_package_url: https://packages.microsoft.com/config/ubuntu/24.04/packages-microsoft-prod.deb
6 | msprod_temp_local_path: /tmp/packages-microsoft-prod.deb
7 |
--------------------------------------------------------------------------------
/ansible/roles/microsoft-apt-source/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Gather package facts
6 | ansible.builtin.package_facts:
7 | manager: auto
8 | tags:
9 | - azure-cli
10 | - powershell
11 |
12 | - name: Download MSProd source package
13 | ansible.builtin.get_url:
14 | url: "{{ msprod_source_package_url }}"
15 | dest: "{{ msprod_temp_local_path }}"
16 | mode: 0644
17 | when: "'packages-microsoft-prod' not in ansible_facts.packages"
18 | register: msprod_source_package_downloaded
19 | tags:
20 | - azure-cli
21 | - powershell
22 |
23 | - name: Install MSProd source package
24 | ansible.builtin.command: dpkg -i {{ msprod_temp_local_path }}
25 | when: msprod_source_package_downloaded.changed
26 | tags:
27 | - azure-cli
28 | - powershell
29 |
30 | - name: Remove MSProd source package
31 | ansible.builtin.file:
32 | path: "{{ msprod_temp_local_path }}"
33 | state: absent
34 | when: msprod_source_package_downloaded.changed
35 | register: msprod_source
36 | tags:
37 | - azure-cli
38 | - powershell
39 |
40 | - name: Update apt sources
41 | ansible.builtin.command: apt-get update
42 | when: msprod_source.changed
43 | tags:
44 | - azure-cli
45 | - powershell
46 |
--------------------------------------------------------------------------------
/ansible/roles/powershell/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | # x86 uses apt install
6 | powershell_apt_packages:
7 | - powershell
8 |
9 | # arm uses .tar.gz install. (related; WHY, MICROSOFT, WHY?!??!?!?)
10 | powershell_version: 7.4.6
11 | powershell_archive: powershell-{{ powershell_version }}-linux-arm64.tar.gz
12 | powershell_installation_directory: /opt/microsoft/powershell/7
13 | powershell_archive_url: https://github.com/PowerShell/PowerShell/releases/download/v{{ powershell_version }}/{{ powershell_archive }}
14 |
--------------------------------------------------------------------------------
/ansible/roles/powershell/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | dependencies:
6 | - role: microsoft-apt-source
7 |
--------------------------------------------------------------------------------
/ansible/roles/powershell/tasks/amd64.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | # See https://learn.microsoft.com/en-us/powershell/scripting/install/install-ubuntu?view=powershell-7.4
6 | # Note the GPG key is aleady installed from the microsoft-apt-source role (even
7 | # though the ARM version does not install with apt)
8 |
9 | - name: Install Powershell packages
10 | ansible.builtin.apt:
11 | name: '{{ powershell_apt_packages }}'
12 | state: present
13 | tags: powershell
14 |
--------------------------------------------------------------------------------
/ansible/roles/powershell/tasks/arm64.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | # See https://learn.microsoft.com/en-us/powershell/scripting/install/install-other-linux?view=powershell-7.4
6 |
7 | - name: Determine if PowerShell is already installed
8 | ansible.builtin.stat:
9 | path: /usr/bin/pwsh
10 | register: powershell_symlink
11 | tags: powershell
12 |
13 | - name: Create PowerShell installation directory
14 | ansible.builtin.file:
15 | path: "{{ powershell_installation_directory }}"
16 | state: directory
17 | mode: 0755
18 | owner: root
19 | group: root
20 | when: not powershell_symlink.stat.exists
21 | tags: powershell
22 |
23 | - name: Extract PowerShell archive to installation directory
24 | ansible.builtin.unarchive:
25 | src: "{{ powershell_archive_url }}"
26 | dest: "{{ powershell_installation_directory }}"
27 | remote_src: true
28 | when: not powershell_symlink.stat.exists
29 | tags: powershell
30 |
31 | - name: Set PowerShell binary execute permissions
32 | ansible.builtin.file:
33 | path: /opt/microsoft/powershell/7/pwsh
34 | mode: 0755
35 | when: not powershell_symlink.stat.exists
36 | tags: powershell
37 |
38 | - name: Create PowerShell symlink
39 | ansible.builtin.file:
40 | state: link
41 | src: /opt/microsoft/powershell/7/pwsh
42 | dest: /usr/bin/pwsh
43 | when: not powershell_symlink.stat.exists
44 | tags: powershell
45 |
--------------------------------------------------------------------------------
/ansible/roles/powershell/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Install {{ ubuntu_architecture }} PowerShell
6 | ansible.builtin.include_tasks:
7 | file: "{{ ubuntu_architecture }}.yml"
8 | tags: powershell
9 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/defaults/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | base_apt_packages:
6 | - at
7 | - bc
8 | - bind9-dnsutils
9 | - btop
10 | - bzip2
11 | - cifs-utils
12 | - cron
13 | - eject
14 | - exfat-fuse
15 | - file
16 | - firewalld
17 | - gcc
18 | - geoip-bin
19 | - geoipupdate
20 | - git
21 | - htop
22 | - jq
23 | - locate
24 | - logrotate
25 | - lsof
26 | - net-tools
27 | - nfdump
28 | - p7zip
29 | - psmisc
30 | - python3-dev
31 | - python3-elasticsearch
32 | - python3-pip
33 | - rsync
34 | - screen
35 | - tcpdump
36 | - tree
37 | - unzip
38 | - vim
39 | - whois
40 |
41 | vmware_apt_packages:
42 | - open-vm-tools
43 |
44 | revision_string: "<%REVNO%>"
45 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/GeoIP.conf.default:
--------------------------------------------------------------------------------
1 | # Please see https://dev.maxmind.com/geoip/updating-databases?lang=en for
2 | # instructions on setting up geoipupdate, including information on how to
3 | # download a pre-filled GeoIP.conf file.
4 |
5 | # Replace YOUR_ACCOUNT_ID_HERE and YOUR_LICENSE_KEY_HERE with an active account
6 | # ID and license key combination associated with your MaxMind account. These
7 | # are available from https://www.maxmind.com/en/my_license_key.
8 | AccountID <%ACCOUNT_ID%>
9 | LicenseKey <%LICENSE_KEY%>
10 |
11 | # Enter the edition IDs of the databases you would like to update.
12 | # Multiple edition IDs are separated by spaces.
13 | EditionIDs GeoLite2-Country GeoLite2-City GeoLite2-ASN
14 |
15 | # The remaining settings are OPTIONAL.
16 |
17 | # The directory to store the database files. Defaults to /usr/share/GeoIP
18 | DatabaseDirectory /usr/local/share/GeoIP
19 |
20 | # The server to use. Defaults to "updates.maxmind.com".
21 | # Host updates.maxmind.com
22 |
23 | # The proxy host name or IP address. You may optionally specify a
24 | # port number, e.g., 127.0.0.1:8888. If no port number is specified, 1080
25 | # will be used.
26 | # Proxy 127.0.0.1:8888
27 |
28 | # The user name and password to use with your proxy server.
29 | # ProxyUserPassword username:password
30 |
31 | # Whether to preserve modification times of files downloaded from the server.
32 | # Defaults to "0".
33 | # PreserveFileTimes 0
34 |
35 | # The lock file to use. This ensures only one geoipupdate process can run at a
36 | # time.
37 | # Note: Once created, this lockfile is not removed from the filesystem.
38 | # Defaults to ".geoipupdate.lock" under the DatabaseDirectory.
39 | # LockFile /usr/local/share/GeoIP/.geoipupdate.lock
40 |
41 | # The amount of time to retry for when errors during HTTP transactions are
42 | # encountered. It can be specified as a (possibly fractional) decimal number
43 | # followed by a unit suffix. Valid time units are "ns", "us" (or "µs"), "ms",
44 | # "s", "m", "h".
45 | # Defaults to "5m" (5 minutes).
46 | # RetryFor 5m
47 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/no-cache.conf:
--------------------------------------------------------------------------------
1 | #
2 | # E-Workbook configuration to make sure the latest files are always accessed
3 | # filename: /etc/apache2/conf-available/no-cache.conf
4 | #
5 |
6 | ExpiresActive On
7 | ExpiresDefault "access plus 1 seconds"
8 |
9 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/sof-elk_post-merge.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | /usr/local/sbin/post_merge.sh
3 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/sof-elk_sysctl.conf:
--------------------------------------------------------------------------------
1 | vm.swappiness=1
2 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/sshd-keygen.service:
--------------------------------------------------------------------------------
1 | # Adapted from https://github.com/Dlackware/systemd/blob/master/openssh/sshd-keygen.service
2 |
3 | [Unit]
4 | Description=SSH Key Generation
5 | ConditionPathExists=|!/etc/ssh/ssh_host_dsa_key
6 | ConditionPathExists=|!/etc/ssh/ssh_host_dsa_key.pub
7 | ConditionPathExists=|!/etc/ssh/ssh_host_ecdsa_key
8 | ConditionPathExists=|!/etc/ssh/ssh_host_ecdsa_key.pub
9 | ConditionPathExists=|!/etc/ssh/ssh_host_ed25519_key
10 | ConditionPathExists=|!/etc/ssh/ssh_host_ed25519_key.pub
11 | ConditionPathExists=|!/etc/ssh/ssh_host_rsa_key
12 | ConditionPathExists=|!/etc/ssh/ssh_host_rsa_key.pub
13 |
14 | [Service]
15 | ExecStart=/usr/bin/ssh-keygen -A
16 | Type=oneshot
17 | RemainAfterExit=yes
18 |
19 | [Install]
20 | WantedBy=multi-user.target
21 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/files/workbook-redirect.conf:
--------------------------------------------------------------------------------
1 | #
2 | # This configuration file redirects the root URL to the Electronic Workbook
3 | # filename: /etc/apache2/conf-available/workbook-redirect.conf
4 | #
5 |
6 | RedirectMatch 302 ^/$ /workbook/
7 |
8 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/templates/GeoIP.conf.j2:
--------------------------------------------------------------------------------
1 | AccountID {{ geoip_accountid | default('0') }}
2 | LicenseKey {{ geoip_licensekey | default('0') }}
3 | EditionIDs GeoLite2-Country GeoLite2-City GeoLite2-ASN
4 | DatabaseDirectory /usr/local/share/GeoIP
5 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/templates/issue.prep.j2:
--------------------------------------------------------------------------------
1 | \S
2 | Kernel \r on an \m
3 |
4 | SOF-ELK: Security Operations and Forensic ELK (Elasticsearch,
5 | Logstash, and Kibana) Virtual machine
6 | {% if course_id == "for509" %}
7 | Used in: SANS FOR509: Cloud Forensics and Incident Response
8 | SANS FOR572: Advanced Network Forensics and Analysis
9 | SANS FOR589: Cybercrime Intelligence
10 | SANS Aviata: Cloud Solo Flight Challenge
11 | {% else %}
12 | Used in: SANS FOR572: Advanced Network Forensics and Analysis
13 | SANS FOR509: Cloud Forensics and Incident Response
14 | SANS FOR589: Cybercrime Intelligence
15 | SANS Aviata: Cloud Solo Flight Challenge
16 | {% endif %}
17 | To learn more: http://for572.com/sof-elk-readme
18 | Revision: {{ revision_string }}
19 |
20 | -------------------------------------------------------------------------------
21 | {%if course_id == "for509" %}
22 | This VM is hosting the FOR509 Electronic Workbook.
23 | - You can access it with a browser at http://\4
24 |
25 | {% endif %}
26 | This VM is running the Logstash and Kibana applications.
27 | - You can access Kibana at http://\4:5601
28 | - You can SSH to this system at IP address \4
29 |
30 | The VM will ingest log files placed into the "/logstash/*/"
31 | directories. See the Kibana homepage (link above) for specific guidance.
32 |
33 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/templates/user_ssh_config.j2:
--------------------------------------------------------------------------------
1 | Host github.com
2 | User git
3 | IdentityFile ~/.ssh/{{ course_id | lower }}_workbook_id_{{ ssh_keytype }}
4 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/templates/user_ssh_known_hosts.j2:
--------------------------------------------------------------------------------
1 | github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=
2 | github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=
3 | github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl
4 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_base/vars/for509.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2025 Lewes Technology Consulting, LLC
4 |
5 | course_apt_packages:
6 | - apache2
7 | - netcat-traditional
8 |
9 | apache2_confs:
10 | - no-cache
11 | - workbook-redirect
12 |
13 | apache2_mods:
14 | - rewrite
15 | - expires
16 | - headers
17 |
18 | ssh_confs:
19 | - known_hosts
20 | - config
21 |
22 | ssh_keytype: ed25519
23 |
--------------------------------------------------------------------------------
/ansible/roles/sof-elk_finalize/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Ansible file supporting the build of a SOF-ELK(R) standalone system
3 | # (C)2024 Lewes Technology Consulting, LLC
4 |
5 | - name: Wait for Elasticsearch to be ready
6 | ansible.builtin.command: '/usr/local/sof-elk/supporting-scripts/wait_for_es.sh'
7 | tags: sof-elk_finalize
8 |
9 | - name: Run the post-merge script, which also loads all dashboards
10 | ansible.builtin.command: '/usr/local/sof-elk/supporting-scripts/post_merge.sh'
11 | tags: sof-elk_finalize
12 |
13 | - name: Restart Kibana
14 | ansible.builtin.systemd_service:
15 | name: kibana
16 | state: restarted
17 | tags: sof-elk_finalize
18 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/0003-input-windows_json.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | # Last Update: 5/14/2016
4 | #
5 | input {
6 | tcp {
7 | port => 6052
8 | type => "windows"
9 | tags => [ "json" ]
10 | codec => json {
11 | charset => "CP1252"
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/0005-input-suricata.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | # Last Update: 5/14/2016
4 | #
5 | input {
6 | tcp {
7 | port => 6053
8 | codec => json
9 | type => "suricata"
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/0035-input-alexa.conf:
--------------------------------------------------------------------------------
1 | input {
2 | file {
3 | path => "/lib/logstash_data/top-1m.csv"
4 | type => "alexa"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/0300-input-httpdlog.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains inputs for the HTTP Access Log parsers
5 |
6 | input {
7 | # HTTP access logs, in any of the following formats:
8 | #
9 | #
10 | #
11 | #
12 | #
13 |
14 | # HTTP access logs using syslog protocol via udp and tcp port
15 | syslog {
16 | port => 5515
17 | type => "httpdlog"
18 | use_labels => false
19 | }
20 |
21 | # HTTP access logs using relp protocol via tcp port
22 | relp {
23 | port => 5517
24 | type => "httpdlog"
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/1029-preprocess-esxi.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.com
3 | # Last Update: 10/17/2016
4 | #
5 | # This configuration file takes ESXi syslog messages and filters them. There is no input as the logs would have came in via syslog
6 | filter {
7 | # This is an example of using an IP address range to classify a syslog message to a specific type of log
8 | # This is helpful as so many devices only send logs via syslog
9 | if [host] =~ "10\.[0-1]\.9\." {
10 | mutate {
11 | replace => { "type" => "esxi" }
12 | }
13 | }
14 | if [host] =~ "\.234$" {
15 | mutate {
16 | replace => { "type" => "esxi" }
17 | }
18 | }
19 | if [type] == "esxi" {
20 | grok {
21 | match => [ "message", "(?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGHOST:logsource}) (?:%{SYSLOGPROG}): (?(?:\[(?[0-9A-Z]{8,8}) %{DATA:esxi_loglevel} \'%{DATA:esxi_service}\'\] %{GREEDYDATA:esxi_message}|%{GREEDYDATA}))" ]
22 | }
23 | }
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/1030-preprocess-greensql.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.com
3 | # Last Update: 10/17/2016
4 | #
5 | # This configuration file is for parsing out GreenSQL data. There is no input as the log should come in as syslog
6 | filter {
7 | if [type] == "greensql" {
8 | # This section is parsing out the fields for GreenSQL syslog data
9 | grok {
10 | match => { "message" => "<%{INT:Code}>%{DATA:Category}\[%{INT:Transcation}\]:\s*Database=%{DATA:Database}\sUser=%{DATA:UserName}\sApplication Name=%{DATA:Application}\sSource IP=%{IPV4:SrcIp}\sSource Port=%{INT:SrcPort}\sTarget IP=?%{IPV4:DstIp}\sTarget Port=%{DATA:DstPort}\sQuery=%{GREEDYDATA:Query}"}
11 | match => { "message" => "<%{INT:Code}>%{DATA:Category}\[%{INT:Transcation}\]:\sAdmin_Name=%{DATA:UserName}\sIP_Address=%{IPV4:SrcIp}\sUser_Agent=%{DATA:UserAgent}\sMessage=%{DATA:StatusMessage}\sDescription=%{DATA:Description}\sSeverity=%{GREEDYDATA:Severity}"}
12 | }
13 | # Remove the message field as it is unnecessary
14 | #mutate {
15 | # remove_field => [ "message"]
16 | #}
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/1031-preprocess-iis.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.com
3 | # Last Update: 10/17/2016
4 | #
5 | # This conf file is based on accepting logs for IIS
6 | filter {
7 | if [type] == "iis" {
8 | # The log is expected to have come from NXLog and in JSON format. This allows for automatic parsing of fields
9 | json {
10 | source => "message"
11 | }
12 | # This removes the message field as it is unneccesary and tags the packet as web
13 | mutate {
14 | # remove_field => [ "message"]
15 | add_tag => [ "web" ]
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/1032-preprocess-mcafee.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.com
3 | # Last Update: 10/17/2016
4 | #
5 | # This file looks for McAfee EPO logs coming from a NXLog client
6 | filter {
7 | if [type] == "mcafee" {
8 | # NXLog should be sending the logs in JSON format so they auto parse
9 | json {
10 | source => "message"
11 | }
12 | # This section converts the UTC fields to the proper time format
13 | date {
14 | match => [ "ReceivedUTC", "YYYY-MM-dd HH:mm:ss" ]
15 | target => [ "ReceivedUTC" ]
16 | }
17 | date {
18 | match => [ "DetectedUTC", "YYYY-MM-dd HH:mm:ss" ]
19 | target => [ "DetectedUTC" ]
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/1035-preprocess-alexa.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | # Last Update: 10/31/2016
4 | #
5 | # This conf file is used to parse the top-1m.csv for alexa domains
6 | filter {
7 | if [type] == "alexa" {
8 | csv {
9 | columns => [ "alexa_number", "site" ]
10 | separator => ","
11 | remove_field => [ "message" ]
12 | }
13 | mutate {
14 | convert => { "alexa_number" => "integer" }
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/2060-sflow.conf:
--------------------------------------------------------------------------------
1 | filter {
2 | if [type] == "sflow" {
3 | if [message] =~ /CNTR/ {
4 | mutate {
5 | add_tag => [ "dropcandidate" ]
6 | }
7 | }
8 |
9 | grok {
10 | match => { "message" => "%{WORD:sample_type},%{IP:sflow_source_ip},%{WORD:in_port:int},%{WORD:out_port:int},%{WORD:source_mac},%{WORD:destination_mac},%{WORD:ether_type},%{NUMBER:in_vlan:int},%{NUMBER:out_vlan:int},%{IP:source_ip},%{IP:destination_ip},%{NUMBER:protocol:int},%{WORD:type_of_service},%{WORD:ttl:int},%{NUMBER:source_port:int},%{NUMBER:destination_port:int},%{DATA:tcp_flags},%{NUMBER:packet_size:int},%{NUMBER:ip_size:int},%{NUMBER:sample_rate:int}" }
11 | }
12 |
13 | if "_grokparsefailure" in [tags] {
14 | mutate {
15 | add_tag => [ "dropcandidate" ]
16 | }
17 | }
18 |
19 | mutate {
20 | add_field => {
21 | "[source_hostname]" => "%{source_ip}"
22 | "[destination_hostname]" => "%{destination_ip}"
23 | "[sflow_source_hostname]" => "%{sflow_source_ip}"
24 | }
25 | }
26 |
27 | translate {
28 | source => "[source_port]"
29 | target => "[source_service]"
30 | dictionary_path => "/usr/local/sof-elk/lib/dictionaries/service_int2iana.yaml"
31 | }
32 |
33 | translate {
34 | field => "[destination_port]"
35 | destination => "[destination_service]"
36 | dictionary_path => "/usr/local/sof-elk/lib/dictionaries/service_int2iana.yaml"
37 | }
38 |
39 | translate {
40 | field => "[protocol]"
41 | destination => "[protocol_name]"
42 | dictionary_path => "/usr/local/sof-elk/lib/dictionaries/port_proto_int2iana.yaml"
43 | }
44 |
45 | # translate {
46 | # field => "[tcp_flags]"
47 | # destination => "[tcp_flag]"
48 | # dictionary_path => "/usr/local/sof-elk/lib/dictionaries/tcp_flags_hex2full.yaml"
49 | # }
50 |
51 | dns {
52 | reverse => [ "source_hostname" ]
53 | action => "replace"
54 | }
55 |
56 | dns {
57 | reverse => [ "destination_hostname" ]
58 | action => "replace"
59 | }
60 |
61 | dns {
62 | reverse => [ "sflow_source_hostname" ]
63 | action => "replace"
64 | }
65 |
66 | mutate {
67 | add_field => { "ips" => [ "%{sflow_source_ip}" ] }
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6020-custom_wordpress.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for custom wordpress login messages from a private plugin. You probably don't need this file :)
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | filter {
8 | if ([type] == "syslog") {
9 | if [syslog_program] =~ /httpd/ and [message] =~ /^WordPress login:/ {
10 | #WordPress login: username@domain (role) from srcip -> result
11 | grok {
12 | match => [ "message", "^WordPress login: %{NOTSPACE:user} \((?:%{NOTSPACE:role})?\) from %{IPORHOST:source_ip} -> %{NOTSPACE:login_result}$" ]
13 | add_tag => [ "got_wp_login", "parse_done" ]
14 | }
15 | if ! [role] {
16 | mutate {
17 | add_field => { "role" => "None" }
18 | }
19 | }
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6022-courier.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for Courier IMAP/POP3 messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | filter {
8 | if [type] == "syslog" {
9 | if [syslog_program] == "courier-pop3d" or [syslog_program] =~ /courier-imap(d|s)/ {
10 | # LOGIN, user=user@host.tld, ip=[::ffff:1.2.3.4], port=[63641]
11 | # LOGIN FAILED, user=user@host.tld, ip=[::ffff:1.2.3.4]
12 | # TIMEOUT, user=user@host.tld, ip=[::ffff:1.2.3.4], headers=0, body=0, rcvd=740, sent=4580, time=7558, starttls=1
13 | # Connection, ip=[::ffff:1.2.3.4]
14 | # Disconnected, ip=[::ffff:1.2.3.4]
15 | # Maximum connection limit reached for ::ffff:1.2.3.4
16 | grok {
17 | match => [ "message", "^%{DATA:event},(?: user=%{NOTSPACE:user},)? ip=\[::ffff:%{IP:source_ip}\](?:, port=\[%{POSINT:source_port}\])?" ]
18 | match => [ "message", "^%{DATA:event} for ::ffff:%{IP:source_ip}" ]
19 | add_tag => [ "got_courier_event", "parse_done" ]
20 | tag_on_failure => [ "_gpfail_courierevent" ]
21 | }
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6024-xinetd.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for xinetd messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | filter {
8 | if [type] == "syslog" {
9 | if [syslog_program] == "xinetd" {
10 | # START: smtp pid=14902 from=::ffff:182.73.192.201
11 | # EXIT: smtp status=0 pid=14902 duration=5(sec)
12 | grok {
13 | match => [ "message", "%{WORD:event}: %{WORD:service} %{GREEDYDATA:message_remainder}" ]
14 | tag_on_failure => [ "gpfail_xi1" ]
15 | }
16 |
17 | if [message_remainder] {
18 | kv {
19 | field_split => " "
20 | add_tag => [ "parse_done" ]
21 | }
22 | mutate {
23 | remove_field => [ "message_remainder" ]
24 | }
25 | }
26 |
27 | if [from] {
28 | grok {
29 | match => [ "from", "(?:::ffff:)?%{IP:source_ip}" ]
30 | tag_on_failure => [ "gpfail_xi2" ]
31 | }
32 | }
33 |
34 | if [duration] {
35 | grok {
36 | match => [ "duration", "^%{INT:durint}" ]
37 | }
38 | if [durint] {
39 | mutate {
40 | replace => { "duration" => "%{durint}" }
41 | }
42 | mutate {
43 | remove_field => [ "durint" ]
44 | }
45 | }
46 | }
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6025-crond.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for cron job scheduler messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # run-parts(/etc/cron.hourly):
8 | # starting 0anacron
9 | # starting dynip_update.sh
10 | # finished dynip_update.sh
11 | # finished 0yum-hourly.cron
12 |
13 | # crond:
14 | # (root) CMD (/usr/lib64/sa/sa1 1 1)
15 | # (root) CMD (run-parts /etc/cron.hourly)
16 |
17 | filter {
18 | if [type] == "syslog" {
19 | if [syslog_program] == "crond" {
20 | grok {
21 | match => [ "message", "\(%{USER:cron_user}\) CMD \(%{GREEDYDATA:cron_command}\)" ]
22 | add_tag => [ "parse_done" ]
23 | tag_on_failure => [ "gpfail_crond" ]
24 | }
25 | }
26 | }
27 | }
28 |
29 | # run-parts(/etc/cron.hourly):
30 | # starting 0anacron
31 | # starting dynip_update.sh
32 | # finished dynip_update.sh
33 | # finished 0yum-hourly.cron
34 |
35 | # crond:
36 | # (root) CMD (/usr/lib64/sa/sa1 1 1)
37 | # (root) CMD (run-parts /etc/cron.hourly)
38 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6027-spamassassin.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for spamassassin messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # spamd: result: . 0 - AWL,BAYES_00,HTML_IMAGE_RATIO_02,HTML_MESSAGE,SPF_PASS,SUBJ_DOLLARS,T_RP_MATCHES_RCVD scantime=1.1,size=70246,user=amy,uid=105,required_score=5.0,rhost=localhost.localdomain,raddr=127.0.0.1,rport=52689,mid=,bayes=0.000000,autolearn=no
8 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6028-fail2ban.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for fail2ban logwatch/action messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # WARNING [ssh-iptables] Ban 190.60.211.94
8 | # WARNING Determined IP using DNS Lookup: ec2-54-93-46-228.eu-central-1.compute.amazonaws.com = ['54.93.46.228']
9 | # ERROR Unable to remove PID file: [Errno 2] No such file or directory: '/var/run/fail2ban/fail2ban.pid'
10 | # INFO Jail 'roundcube' started
11 | # INFO Added logfile = /var/log/roundcube/logs/userlogins
12 | # INFO Jail 'sendmail-spam' uses pyinotify
13 | # INFO Set maxRetry = 5
14 | # INFO Set findtime = 3600
15 | # INFO [ssh-iptables] 111.73.46.22 already banned
16 | # INFO Creating new jail 'sendmail'
17 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6029-sudo.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for sudo privilege elevation messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # ansible : TTY=pts/0 ; PWD=/home/ansible ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-ppdnyrutbxjzntymofadmhlygksqeqet; LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/ansible/.ansible/tmp/ansible-tmp-1462890297.9-153280200789687/command; rm -rf "/home/ansible/.ansible/tmp/ansible-tmp-1462890297.9-153280200789687/" > /dev/null 2>&1
8 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6030-ansible.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for ansible system configuration messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # ansible-command:
8 | # Invoked with warn=True executable=None _uses_shell=True _raw_params=if [ $( rpm -q kernel | sort -nr | head -n 1 ) != kernel-$( uname -r ) ]; then echo 'reboot'; else echo 'no'; fi removes=None creates=None chdir=None
9 | # ansible-yum:
10 | # Invoked with name=['*'] list=None disable_gpg_check=False conf_file=None install_repoquery=True state=latest disablerepo=None update_cache=True enablerepo=None exclude=None
11 | # ansible-setup:
12 | # Invoked with filter=* fact_path=/etc/ansible/facts.d
13 | # ansible-service:
14 | # Invoked with name=rsyslog pattern=None enabled=True state=started sleep=None arguments= runlevel=default
15 | # ansible-file:
16 | # Invoked with directory_mode=None force=False remote_src=None path=/etc/ owner=root follow=False group=root state=None content=NOT_LOGGING_PARAMETER serole=None diff_peek=None setype=None dest=/etc/ selevel=None original_basename=rsyslog.conf regexp=None validate=None src=rsyslog.conf seuser=None recurse=False delimiter=None mode=0644 backup=None
17 | # ansible-stat:
18 | # Invoked with checksum_algorithm=sha1 mime=False get_checksum=True path=/etc/rsyslog.conf checksum_algo=sha1 follow=False get_md5=False
19 | # ansible-ping:
20 | # Invoked with data=None
21 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6031-yum.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and yum software package management messages
5 | # Note that this file is UNSUPPORTED. By default, it is not enabled in SOF-ELK.
6 |
7 | # Updated: libestr-0.1.10-1.el7.x86_64
8 | # Erased: tmux
9 | # Installed: libevent-1.4.13-1.x86_64
10 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6101-switch_brocade.conf:
--------------------------------------------------------------------------------
1 | filter {
2 | if [type] == "brocade" {
3 | grok {
4 | match => [ "message", "<%{DATA}>%{GREEDYDATA:sys_message}" ]
5 | }
6 | grok {
7 | match => [ "sys_message", "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:log.syslog.hostname} %{DATA:log.syslog.program]}(?:\[%{POSINT:log.syslog.procid:int}\])?: %{GREEDYDATA:syslog_message}" ]
8 | add_field => { "received_at" => "%{@timestamp}" }
9 | }
10 | if [syslog_message] =~ "Interface ethernet" or [log][syslog][program] == "PORT" {
11 | grok {
12 | match => { "syslog_message" => "%{DATA}%{INT:unit}\/%{INT:interface_type}\/%{INT:interface:int}" }
13 | }
14 | mutate {
15 | add_field => { "interface_port" => "%{unit}/%{interface_type}/%{interface}" }
16 | }
17 | }
18 | date {
19 | match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
20 | timezone => "America/Chicago"
21 | remove_field => "syslog_timestamp"
22 | remove_field => "received_at"
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/6302-android.conf:
--------------------------------------------------------------------------------
1 | filter {
2 | if [type] == "android" {
3 | }
4 | }
5 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/8002-postprocess-tags.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | # Last Update: 11/18/2015
4 | #
5 | # This is a configuration file for applying tags across many log sources
6 | filter {
7 | if [SrcIP] {
8 | # Look for broadcast IPs within SrcIP and tag
9 |
10 | # If SrcIP ends in .255 tag as possible_broadcast
11 | # The reason for "possible" broadcast is that an IPv4
12 | # address can be valid with .255 if it is using a subnet
13 | # outside of 255.255.255.0 (/24)
14 | if [SrcIP] =~ "\.255$" {
15 | mutate {
16 | add_tag => [ "possible_broadcast" ]
17 | }
18 | }
19 | # If SrcIP is 255.255.255.255 tag as broadcast
20 | if [SrcIP] == "255.255.255.255" {
21 | mutate {
22 | add_tag => [ "full_broadcast" ]
23 | }
24 | }
25 | }
26 | if [DstIP] {
27 | # Look for broadcast IPs within DstIP and tag
28 |
29 | # If DstIP ends in .255 tag as possible_broadcast
30 | # The reason for "possible" broadcast is that an IPv4
31 | # address can be valid with .255 if it is using a subnet
32 | # outside of 255.255.255.0 (/24)
33 | if [DstIP] =~ "\.255$" {
34 | mutate {
35 | add_tag => [ "possible_broadcast" ]
36 | }
37 | }
38 | # If DstIP is 255.255.255.255 tag as broadcast
39 | # This is most commonly seen in DHCP requests
40 | if [DstIP] == "255.255.255.255" {
41 | mutate {
42 | add_tag => [ "full_broadcast" ]
43 | }
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/8007-postprocess-dns_alexa_tagging.conf:
--------------------------------------------------------------------------------
1 | filter {
2 | if [type] == "dns" {
3 | if [highest_registered_domain] {
4 | elasticsearch {
5 | hosts => [ "172.16.1.8" ]
6 | index => "alexa"
7 | query => 'site:"%{[highest_registered_domain]}"'
8 | fields => { site => "site" }
9 | tag_on_failure => []
10 | }
11 | if [site] != "" and [site] {
12 | mutate {
13 | add_tag => [ "alexa" ]
14 | remove_field => [ "site" ]
15 | }
16 | }
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/8503-postprocess-freq_analysis_zeek_http.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | #
4 | # Updated for new domain_stats functionality (C)2024 Lewes Technology Consulting, LLC
5 | #
6 | # This conf file is based on accepting logs for http.log from Zeek systems
7 | filter {
8 | if [type] == "zeek_http" {
9 | # If uri exists run a frequency analysis against it. In order for this to work you must have
10 | # freq.py and the corresponding frequency table in /opt/freq/. This is a huge boost to security
11 | # and I highly recommend you set this up. Example, if a frequency score less than 6 exists
12 | # then there is a likelihood that something malicious is happening.
13 | #
14 | # For higher accuracy, please generate your own frequency tables. For questions on setup,
15 | # please refer to https://github.com/SMAPPER
16 | if [virtual_host] {
17 | if [sub_domain] and [sub_domain_length] > 5 {
18 | http {
19 | request => {
20 | url => "http://localhost:5730/%{virtual_host}"
21 | }
22 | target_body => "raw_virtual_host_frequency_score"
23 | }
24 | if [raw_virtual_host_frequency_score] {
25 | json {
26 | source => "raw_virtual_host_frequency_score"
27 | target => "json_virtual_host_frequency_score"
28 | }
29 | }
30 | if [json_virtual_host_frequency_score][freq_score] {
31 | mutate {
32 | convert => { "[json_virtual_host_frequency_score][freq_score]" => "float" }
33 | add_field => {
34 | "virtual_host_frequency_score" => "%{[json_virtual_host_frequency_score][freq_score]}"
35 | "frequency_scores" => "%{[json_virtual_host_frequency_score][freq_score]}"
36 | }
37 | }
38 | mutate {
39 | remove_field => [
40 | "raw_virtual_host_frequency_score"
41 | "json_virtual_host_frequency_score"
42 | ]
43 | }
44 | }
45 | }
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/8505-postprocess-freq_analysis_zeek_x509.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.comes
3 | #
4 | # Updated for new domain_stats functionality (C)2024 Lewes Technology Consulting, LLC
5 | #
6 |
7 | filter {
8 | if [type] == "zeek_x509" {
9 | # If SubjectCommonName exists run a frequency analysis against it. In order for this to work you must have
10 | # freq.py and the corresponding frequency table in /opt/freq/. This is a huge boost to security
11 | # and I highly recommend you set this up. Example, if a frequency score less than 6 exists
12 | # then there is a likelihood that something malicious is happening.
13 | #
14 | # For higher accuracy, please generate your own frequency tables. For questions on setup,
15 | # please refer to https://github.com/SMAPPER
16 | if [subject_common_name]{
17 |
18 | http {
19 | request => {
20 | url => "http://localhost:5730/%{subject_common_name}"
21 | }
22 | target_body => "raw_x509_common_name_name_frequency_score"
23 | }
24 | if [raw_x509_common_name_name_frequency_score] {
25 | json {
26 | source => "raw_x509_common_name_name_frequency_score"
27 | target => "json_x509_common_name_name_frequency_score"
28 | }
29 | }
30 | if [json_x509_common_name_frequency_score][freq_score] {
31 | mutate {
32 | convert => { "[json_x509_common_name_frequency_score][freq_score]" => "float" }
33 | add_field => {
34 | "x509_common_name_frequency_score" => "%{[json_x509_common_name_frequency_score][freq_score]}"
35 | "frequency_scores" => "%{[json_x509_common_name_frequency_score][freq_score]}"
36 | }
37 | }
38 | mutate {
39 | remove_field => [ "raw_x509_common_name_frequency_score" "json_x509_common_name_frequency_score" ]
40 | }
41 | }
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/8900-postprocess-ip_tagging.conf:
--------------------------------------------------------------------------------
1 | filter {
2 | if [destination_ip] {
3 | if [destination_ip] == "198.41.0.4" or [destination_ip] == "192.228.79.201" or [destination_ip] == "192.33.4.12" or [destination_ip] == "199.7.91.13" or [destination_ip] == "192.203.230.10" or [destination_ip] == "192.5.5.241" or [destination_ip] == "192.112.36.4" or [destination_ip] == "198.97.190.53" or [destination_ip] == "192.36.148.17" or [destination_ip] == "192.58.128.30" or [destination_ip] == "193.0.14.129" or [destination_ip] == "199.7.83.42" or [destination_ip] == "202.12.27.33" {
4 | mutate {
5 | add_tag => [ "root_dns_server" ]
6 | }
7 | }
8 |
9 | if [destination_ip] == "208.91.112.134" {
10 | mutate {
11 | add_tag => [ "fortinet_fortiguard" ]
12 | }
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/9031-output-iis.conf:
--------------------------------------------------------------------------------
1 | # Author: Justin Henderson
2 | # Email: jhenderson@tekrefresh.com
3 | # Last Update: 10/17/2016
4 | #
5 | # This conf file is based on accepting logs for IIS
6 | output {
7 | if [type] == "iis" {
8 | elasticsearch {
9 | ilm_enabled => false
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/9901-output-alexa.conf:
--------------------------------------------------------------------------------
1 | output {
2 | if [type] == "alexa" {
3 | #stdout { codec => rubydebug }
4 | elasticsearch {
5 | ilm_enabled => false
6 | index => "logstash-alexa-%{+YYYY.MM}"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/9999-output-alerts.conf:
--------------------------------------------------------------------------------
1 | output {
2 | if "alert_data" in [tags] {
3 | # gelf {
4 | # host => "127.0.0.1"
5 | # port => 12201
6 | # short_message => "full_log"
7 | # }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/alexa_readme.txt:
--------------------------------------------------------------------------------
1 | #To setup alexa top 1 million checking against records you have to setup the following:
2 | #copy all alexa files to /etc/logstash/conf.d
3 |
4 | cd /etc/logstash/conf.d
5 | sudo wget https://github.com/SMAPPER/Logstash-Configs/raw/master/configfiles-setup_required/0035_input_alexa.conf
6 | sudo wget https://github.com/SMAPPER/Logstash-Configs/raw/master/configfiles-setup_required/1035_preprocess_alexa.conf
7 | sudo wget https://github.com/SMAPPER/Logstash-Configs/raw/master/configfiles-setup_required/8007_postprocess_dns_alexa_tagging.conf
8 | sudo wget https://github.com/SMAPPER/Logstash-Configs/raw/master/configfiles-setup_required/9901_output_alexa.conf
9 | sudo service logstash restart
10 |
11 | # Then schedule alexa_update.sh to run once a day
12 |
13 | sudo mkdir /scripts
14 | cd /scripts
15 | sudo wget https://github.com/SMAPPER/Logstash-Configs/raw/master/scripts/alexa_update.sh
16 |
17 | crontab < "buffer.test.int:2181"
4 | topic_id => [ "logstash" ]
5 | tags => [ "queue_logstash" ]
6 | }
7 | kafka {
8 | zk_connect => "buffer.test.int:2181"
9 | topic_id => [ "syslog" ]
10 | tags => [ "queue_syslog" ]
11 | }
12 | kafka {
13 | zk_connect => "buffer.test.int:2181"
14 | topic_id => [ "zeek" ]
15 | tags => [ "queue_zeek" ]
16 | }
17 | kafka {
18 | zk_connect => "buffer.test.int:2181"
19 | topic_id => [ "windows" ]
20 | tags => [ "queue_windows" ]
21 | }
22 | kafka {
23 | zk_connect => "buffer.test.int:2181"
24 | topic_id => [ "sflow" ]
25 | tags => [ "queue_sflow" ]
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/broker/0998-input-broker_rabbitmq.conf:
--------------------------------------------------------------------------------
1 | input {
2 | rabbitmq {
3 | key => "logstashkey"
4 | queue => "logstashqueue"
5 | durable => true
6 | exchange => "logstashexchange"
7 | user => "logstash"
8 | password => "password_goes_here"
9 | host => "buffer.test.int"
10 | port => 5672
11 | tags => [ "queue_logstash" ]
12 | }
13 | rabbitmq {
14 | key => "zeek"
15 | queue => "zeek"
16 | durable => true
17 | exchange => "zeek"
18 | user => "logstash"
19 | password => "password_goes_here"
20 | host => "buffer.test.int"
21 | port => 5672
22 | threads => 4
23 | tags => [ "queue_zeek" ]
24 | }
25 | rabbitmq {
26 | key => "windows"
27 | queue => "windows"
28 | durable => true
29 | exchange => "windows"
30 | user => "logstash"
31 | password => "password_goes_here"
32 | host => "buffer.test.int"
33 | port => 5672
34 | tags => [ "queue_windows" ]
35 | }
36 | rabbitmq {
37 | key => "sflow"
38 | queue => "sflow"
39 | durable => true
40 | exchange => "sflow"
41 | user => "logstash"
42 | password => "password_goes_here"
43 | host => "buffer.test.int"
44 | port => 5672
45 | threads => 4
46 | tags => [ "queue_sflow" ]
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/broker/9997-output-broker_kafka.conf:
--------------------------------------------------------------------------------
1 | output {
2 | if [type] == "syslog" {
3 | kafka {
4 | bootstrap_servers => "buffer.test.int:9092"
5 | topic_id => "syslog"
6 | }
7 | }
8 | if [type] == "windows" {
9 | kafka {
10 | bootstrap_servers => "buffer.test.int:9092"
11 | topic_id => "windows"
12 | }
13 | }
14 | if [type] == "zeek" {
15 | kafka {
16 | bootstrap_servers => "buffer.test.int:9092"
17 | topic_id => "zeek"
18 | }
19 | }
20 | if [type] == "sflow" {
21 | kafka {
22 | bootstrap_servers => "buffer.test.int:9092"
23 | topic_id => "sflow"
24 | }
25 | }
26 | if [type] != "syslog" and [type] != "windows" and [type] != "sflow" and "zeek" not in [tags] {
27 | kafka {
28 | bootstrap_servers => "buffer.test.int:9092"
29 | topic_id => "logstash"
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/broker/9998-output-broker_rabbitmq.conf:
--------------------------------------------------------------------------------
1 | output{
2 | rabbitmq {
3 | key => "routing_key_goes_here"
4 | exchange => "exchange_name_goes_here"
5 | exchange_type => "direct"
6 | user => "user_name_goes_here"
7 | password => "password_goes_here"
8 | host => "rabbitmq.test.int"
9 | port => 5672
10 | durable => true
11 | persistent => true
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/prereq_readme.txt:
--------------------------------------------------------------------------------
1 | # In order for ALL of these configuration files to work you will need to have logstash installed
2 | # and place these conf files into /etc/logstash/conf.d
3 |
4 | # Also, you will need to install the logstash translate community plugin. This can be done
5 | # by running this command:
6 | #
7 | # These have been tested on Logstash 2.4. Older versions that do not have bin/logstash-plugin and
8 | # instead use bin/plugin have issues.
9 |
10 | sudo /opt/logstash/bin/logstash-plugin install logstash-filter-translate
11 | sudo /opt/logstash/bin/logstash-plugin install logstash-filter-tld
12 | sudo /opt/logstash/bin/logstash-plugin install logstash-filter-elasticsearch
13 |
14 | # Also follow the readmes for specific things you wish to setup such as frequency_analysis, alexa lookups,
15 | # message queuing, etc.
16 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/sflow/0004-input-pipe_sflow.conf:
--------------------------------------------------------------------------------
1 | input {
2 | pipe {
3 | type => "sflow"
4 | command => "/usr/local/bin/sflowtool_wrapper.sh -l -p 6343"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/configfiles-UNSUPPORTED/sflow/sflowtool_readme.txt:
--------------------------------------------------------------------------------
1 | # If you have sflow and want to collect the data to logstash I recommend downloading and installing
2 | # sflowtool on one or more systems. Then download my sflowtool_init_script.txt and rename it and
3 | # put it in /etc/init.d/. I recommend just naming it sflowtool. Then set it to auto start and point
4 | # all your syslog devices at it using port 6343. This will convert the sflow into netflow v5 data
5 | # and logstash can parse it.
6 |
7 | # Installing sflowtool
8 | # May need to change autoreconf-2.13 to whatever your distro has for autoreconf
9 | apt-get install build-essential make autoreconf-2.13 git
10 | git clone https://github.com/sflow/sflowtool.git
11 | cd sflowtool
12 | ./boot.sh
13 | ./configure
14 | make
15 | make install
16 |
17 | # In the current incarnation of the sflowtool_init_script I'm assuming you put sflowtool
18 | # on the logstash hosts so it sends the converted netflow v5 data to 127.0.0.1 on the port
19 | # logstash is expecting it (based on my netflow.conf file).
20 |
21 | # Example of setup (assumes you've installed sflowtool first):
22 |
23 | wget https://github.com/SMAPPER/Logstash-Configs/raw/master/misc/sflowtool_init_script.txt
24 | mv sflowtool_init_script.txt /etc/init.d/sflowtool
25 | update-rc.d sflowtool defaults
26 | chmod +x /etc/init.d/sflowtool
27 | service sflowtool start
28 |
--------------------------------------------------------------------------------
/configfiles-templates/04xx-input-google_pubsub.conf.sample:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2021 Lewes Technology Consulting, LLC
3 | #
4 | # This template file can be used to build inputs for Google Cloud Platform
5 | # (GCP) sources using the pub/sub method.
6 |
7 | # To use this file:
8 | # 1. Create a copy and place it in the /etc/logstash/conf.d/ directory.
9 | # Rename the new file with an incremental number and remove the ".sample"
10 | # suffix. For example: "0400-input-google_pubsub.conf". Use file
11 | # permissions of at least 0444.
12 | # 2. From your GCP account, follow the platform's instructions to create a
13 | # service account (https://cloud.google.com/solutions/exporting-stackdriver-logging-elasticsearch).
14 | # 3. Using the project_id name (not number), subscription name, topic, and
15 | # the JSON key generated using the GCP instructions, uncomment and populate
16 | # 4. If you wish to use multiple inputs, add additional input {} stanzas in
17 | # this file or create additional files. Using incremental numbers in the
18 | # filename is recommended but not required (e.g. they could all be
19 | # "0400-...conf" without affecting functionality.)
20 | # the contents of the copy of this file template. Complete all template
21 | # fields. Note that the "tags" field is optional and the "type" field"
22 | # MUST be "gcp"
23 | # 5. Restart the logstash service
24 |
25 | input {
26 | # google_pubsub {
27 | # project_id => "<%PROJECT_ID%>"
28 | # topic => "<%TOPIC_NAME%>"
29 | # subscription => "<%SUBSCRIPTION_NAME%>"
30 | # json_key_file => "/path/to/gcp-es-service-account.json"
31 | # add_field => { "[labels][type]" => "gcp" }
32 | # tags => [ "json" ]
33 | # }
34 | }
35 |
--------------------------------------------------------------------------------
/configfiles/0000-input-beats.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains the Elastic Beats input
5 |
6 | input {
7 | # live beats protocol via tcp port
8 | # assign [labels][type] on the shipper!
9 | beats {
10 | port => 5044
11 | tags => [ "process_archive", "filebeat" ]
12 | }
13 | }
--------------------------------------------------------------------------------
/configfiles/0000-input-stdin.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2020 Lewes Technology Consulting, LLC
3 | #
4 | # This file is a stub for the stdin input
5 |
6 | input {
7 | # stdin {
8 | # type => "test"
9 | # tags => [ "process_stdin" ]
10 | # }
11 | }
12 |
--------------------------------------------------------------------------------
/configfiles/0004-input-syslog.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains inputs for the syslog parsers
5 |
6 | input {
7 | # live syslog protocol via udp and tcp port
8 | syslog {
9 | port => 5514
10 | add_field => { "[labels][type]" => "syslog" }
11 | tags => [ "process_live", "syslog" ]
12 | }
13 |
14 | # live relp protocol via tcp port
15 | relp {
16 | port => 5516
17 | add_field => { "[labels][type]" => "syslog" }
18 | tags => [ "process_live", "relp" ]
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/configfiles/1001-preprocess-json.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file preprocesses JSON-formatted logs
5 |
6 | filter {
7 | if "json" in [tags] {
8 |
9 | # lowercase and unquote all true/false values so downstream picks them up as booleans
10 | mutate {
11 | gsub => [
12 | "message", '(?i)""true""', 'true',
13 | "message", '(?i)""false""', 'false',
14 | "message", '(?i)"true"', 'true',
15 | "message", '(?i)"false"', 'false'
16 | ]
17 | }
18 |
19 | # when the input is an array of json objects on a multiline input, we need to unwrap the array items
20 | # this is a mess.. super annoying and prone to collateral damage
21 | # therefore, only apply this to types where the special attention is known to be required
22 | # maybe this is better suited to a separate pre-pre-processor? ugh...
23 | if [labels][type] in [ "gcp", "archive-netflow" ] {
24 | if [message] =~ "^ },$" {
25 | mutate {
26 | gsub => [ "message", "},\z", "}" ]
27 | }
28 | if [message] =~ "^$" {
29 | drop { }
30 | }
31 | }
32 | }
33 |
34 | # this will take the entire json object and place it under a "raw" field.
35 | # then the processors can pick and pull from raw.* to keep fields, then drop "raw" when done.
36 | json {
37 | source => "message"
38 | target => "raw"
39 | remove_tag => [ "json" ]
40 | remove_field => [ "message" ]
41 | }
42 |
43 | # if tags were in the original json record, merge them to the top-level field and remove from [raw]
44 | if [raw][tags] {
45 | mutate {
46 | merge => { "tags" => "[raw][tags]" }
47 | remove_field => [ "[raw][tags]" ]
48 | }
49 | }
50 |
51 | # conditionally tag on successful json parsing
52 | if "zeek" in [tags] and [raw] {
53 | mutate {
54 | add_tag => [ "zeek_json" ]
55 | }
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/configfiles/1002-preprocess-xml.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file preprocesses XML-formatted logs
5 |
6 | filter {
7 | if "xml" in [tags] {
8 | mutate {
9 | gsub => [
10 | "message", "^\"", "",
11 | "message", "\"$", ""
12 | ]
13 | }
14 | xml {
15 | source => "message"
16 | target => "raw"
17 | force_content => true
18 | force_array => false
19 | remove_tag => [ "xml" ]
20 | remove_field => [ "message" ]
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/configfiles/1010-preprocess-snare.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains preprocessing steps for Windows Event Log messages sent via the Snare utility
5 |
6 | filter {
7 | if [labels][type] == "syslog" and [message] =~ /MSWinEventLog/ {
8 | mutate {
9 | gsub => [
10 | # delete this field, as it's static and no longer needed
11 | "message", "MSWinEventLog", "",
12 | # protect quotes since we use the csv processor downstream
13 | # "message", "\"", "'",
14 | # replace hex-encoded tabs and carriage returns and literal tabs with double-pipe for CSV handling later
15 | "message", "(?:#011)+", "||",
16 | "message", "\t+", "||",
17 | # remove any carriage return characters
18 | "message", "#015$", ""
19 | ]
20 | add_tag => [ "snare_log" ]
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/configfiles/1200-preprocess-zeek.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains preprocessing steps for Zeek logs
5 | # Some portions orginally contributed by Justin Henderson
6 |
7 | filter {
8 | if "zeek" in [tags] {
9 | # this tag is added when json is detected and has been successfully parsed.
10 | # otherwise, we're dealing with (ugh) TSV...
11 | if "zeek_json" not in [tags] {
12 | if [Message] {
13 | mutate {
14 | rename => { "Message" => "message" }
15 | }
16 | }
17 |
18 | # If a log comes in with a message starting with # then drop it as it doesn't
19 | # contain anything and is the header of a rotated Zeek log
20 | if [message] =~ /^#/ {
21 | drop { }
22 | }
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/configfiles/1601-preprocess-plaso.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains preprocessing steps for Plaso
5 |
6 | filter {
7 | if [labels][type] == "plaso" {
8 | mutate {
9 | gsub => [
10 | # protect quotes since we use the csv processor downstream
11 | "message", "\"", "'"
12 | ]
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/configfiles/1701-preprocess-microsoft365.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains preprocessors for Microsoft Office 365 logs
5 |
6 | # moved the lowercasing of True/False and equivalent strings to 1001-preprocess-json.conf
7 |
8 | filter {
9 | if [labels][type] == "microsoft365" {
10 |
11 | # this is very noisy and not useful at this time
12 | if [raw][Operation] == "MDCRegulatoryComplianceAssessments" {
13 | drop {}
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/configfiles/1950-preprocess-gcp.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2025 Lewes Technology Consulting, LLC
3 | #
4 | # This file preprocesses JSON-formatted GCP logs
5 |
6 | filter {
7 | if [labels][type] == "gcp" {
8 |
9 | # these seem to be records from various GCP-internal events that do not have intrinsic meaning (that we know of)
10 | # since the strings are not valid IP addresses, the ES insert will fail, so drop them instead.
11 | if [raw][protoPayload][requestMetadata][callerIp] and ([raw][protoPayload][requestMetadata][callerIp] in [ "private", "gce-internal-ip"]) {
12 | drop { }
13 | }
14 |
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/configfiles/6012-dhcpd.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for DHCP messages
5 |
6 | filter {
7 | # dhcp messages (NAK, REQUEST, ACK, etc)
8 | if ([labels][type] == "syslog") and [log][syslog][appname] == "dhcpd" and [message] =~ /^DHCP[A-Z]+ / {
9 | grok {
10 | match => [ "message", "%{WORD:[dhcp][message_type]} %{NOTSPACE} %{IP:[client][ip]}(?: %{NOTSPACE})? (?:\()?%{MAC:[client][mac]}(?:\))? (\(%{HOSTNAME:[client][domain]}\))?" ]
11 | match => [ "message", "%{WORD:[dhcp][message_type]} %{NOTSPACE} %{IP:[client][ip]}" ]
12 | add_tag => [ "parse_done" ]
13 | tag_on_failure => [ "_grokparsefailure_sl09" ]
14 | }
15 |
16 | if [dhcp][message_type] and [client][mac] and [client][ip] {
17 | mutate {
18 | replace => { "message" => "%{[dhcp][message_type]}: %{[client][mac]} / %{[client][ip]}" }
19 | }
20 | if [client][domain] {
21 | mutate {
22 | replace => { "message" => "%{message} (%{[client][domain]})" }
23 | }
24 | }
25 |
26 | } else if [dhcp][message_type] and [client][ip] {
27 | mutate {
28 | replace => { "message" => "%{[dhcp][message_type]}: %{[client][ip]}" }
29 | }
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/configfiles/6013-bindquery.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for BIND query log messages
5 |
6 | filter {
7 | # dns query log messages
8 | if [labels][type] == "syslog" and [log][syslog][appname] == "named" and [message] =~ / query: / {
9 | grok {
10 | match => [ "message", "client %{IP:[source][ip]}#%{POSINT}: query: %{HOSTNAME:[dns][question][name]} %{NOTSPACE} %{NOTSPACE:[dns][question][type]}" ]
11 | add_tag => [ "dns_record", "parse_done" ]
12 | tag_on_failure => [ "_grokparsefailure_sl10" ]
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/configfiles/6016-pam.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for Linux PAM messages
5 |
6 | filter {
7 | # PAM authentication messages
8 | if [labels][type] == "syslog" {
9 | if [message] =~ /^pam_*/ {
10 | grok {
11 | match => [ "message", "%{WORD:[pam][module]}\(%{DATA:[pam][service]}:%{WORD:[pam][sessiontype]}\): %{GREEDYDATA:message_remainder}" ]
12 |
13 | add_tag => [ "got_pam", "parse_done" ]
14 | tag_on_failure => [ "_grokparsefailure_pam" ]
15 | }
16 | }
17 |
18 | if [message_remainder] {
19 | grok {
20 | match => [ "message_remainder", "session %{WORD:[pam][event]} for user %{USER:[user][name]}(?: by \(uid=%{INT:[user][id]}\))?" ]
21 | match => [ "message_remainder", "error retrieving information about user %{USER:[user][name]}"]
22 | }
23 | }
24 | mutate {
25 | remove_field => [ "message_remainder" ]
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/configfiles/6017-iptables.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for iptables firewall messages
5 |
6 | filter {
7 | if [labels][type] == "syslog" and [log][syslog][appname] == "kernel" and !("parse_done" in [tags]) {
8 | grok {
9 | patterns_dir => [ "/usr/local/sof-elk/grok-patterns" ]
10 | match => [
11 | # iptables firewall messages
12 | "message", "(?:\[%{BASE10NUM:[iptables][uptime]}\]%{SPACE})?%{IPTABLES}"
13 | ]
14 | add_tag => [ "got_iptables", "parse_done" ]
15 | tag_on_failure => [ "_defaultparse" ]
16 | }
17 | }
18 | if "got_iptables" in [tags] {
19 | mutate {
20 | convert => {
21 | "[iptables][uptime]" => "float"
22 | "[source][bytes]" => "integer"
23 | }
24 | lowercase => [ "[network][transport]" ]
25 | copy => { "[source][bytes]" => "[network][bytes]" }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/configfiles/6018-cisco_asa.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains filters, transforms, and enrichments for Cisco ASA messages
5 |
6 | filter {
7 | if [labels][type] == "syslog" and !("parse_done" in [tags]) {
8 | # Cisco ASA messages
9 | grok {
10 | patterns_dir => [ "/usr/local/sof-elk/grok-patterns" ]
11 | match => [ "message", "%{CISCO_ASA_MSG}" ]
12 | add_tag => [ "got_cisco", "parse_done" ]
13 | }
14 | }
15 |
16 | if "got_cisco" in [tags] {
17 | mutate {
18 | lowercase => [
19 | "[cisco][asa][action]",
20 | "[network][direction]",
21 | "[network][transport]",
22 | "[network][tcp_flags]"
23 | ]
24 | }
25 |
26 | if [network][transport] {
27 | translate {
28 | dictionary_path => "/usr/local/sof-elk/lib/dictionaries/ip_proto_name2int.yaml"
29 | source => "[network][transport]"
30 | target => "[network][iana_number]"
31 | }
32 | }
33 |
34 | if [network][tcp_flags] {
35 | mutate {
36 | split => { "[network][tcp_flags]" => " " }
37 | }
38 |
39 | ruby {
40 | path => "/usr/local/sof-elk/supporting-scripts/tcp_flags_to_array.rb"
41 | script_params => {
42 | "source_field" => "[network][tcp_flags]"
43 | "source_type" => "arr"
44 | }
45 | }
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/configfiles/6202-zeek_files.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains processing steps for Zeek's files.log
5 |
6 | # Reference: https://docs.zeek.org/en/master/scripts/base/frameworks/files/main.zeek.html#type-Files::Info
7 |
8 | filter {
9 | if [labels][type] == "zeek_files" and "zeek_json" in [tags] {
10 | mutate {
11 | rename => {
12 | "[raw][fuid]" => "[zeek][files][fuid]"
13 | "[raw][uid]" => "[zeek][session_id]"
14 | "[raw][id.orig_h]" => "[source][ip]"
15 | "[raw][id.orig_p]" => "[source][port]"
16 | "[raw][id.resp_h]" => "[destination][ip]"
17 | "[raw][id.resp_p]" => "[destination][port]"
18 | "[raw][source]" => "[zeek][files][source]"
19 | "[raw][depth]" => "[zeek][files][depth]"
20 | "[raw][analyzers]" => "[zeek][files][analyzers]"
21 | "[raw][mime_type]" => "[zeek][files][mime_type]"
22 | "[raw][filename]" => "[zeek][files][filename]"
23 | "[raw][duration]" => "[zeek][files][duration]"
24 | "[raw][local_orig]" => "[zeek][files][local_orig]"
25 | "[raw][is_orig]" => "[zeek][files][is_orig]"
26 | "[raw][seen_bytes]" => "[zeek][files][seen_bytes]"
27 | "[raw][missing_bytes]" => "[zeek][files][missing_bytes]"
28 | "[raw][overflow_bytes]" => "[zeek][files][overflow_bytes]"
29 | "[raw][parent_fuid]" => "[zeek][files][parent_fuid]"
30 | "[raw][md5]" => "[zeek][files][md5]"
31 | "[raw][sha1]" => "[zeek][files][sha1]"
32 | "[raw][sha256]" => "[zeek][files][sha256]"
33 | "[raw][extracted]" => "[zeek][files][extracted]"
34 | "[raw][extracted_cutoff]" => "[zeek][files][extracted_cutoff]"
35 | "[raw][extracted_size]" => "[zeek][files][extracted_size]"
36 | "[raw][entropy]" => "[zeek][files][entropy]"
37 | }
38 | }
39 |
40 | # populate the @timestamp field with the ts value
41 | date {
42 | match => [ "[raw][ts]", "UNIX" ]
43 | }
44 |
45 | mutate {
46 | convert => {
47 | "[zeek][files][depth]" => "integer"
48 | "[zeek][files][duration]" => "float"
49 | "[zeek][files][seen_bytes]" => "integer"
50 | "[zeek][files][missing_bytes]" => "integer"
51 | "[zeek][files][overflow_bytes]" => "integer"
52 | "[zeek][files][extracted_size]" => "integer"
53 | }
54 | remove_field => "raw"
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/configfiles/6203-zeek_ssl.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains processing steps for Zeek's ssl.log
5 |
6 | # Reference: https://docs.zeek.org/en/master/scripts/base/protocols/ssl/main.zeek.html#type-SSL::Info
7 |
8 | filter {
9 | if [labels][type] == "zeek_ssl" and "zeek_json" in [tags] {
10 | mutate {
11 | rename => {
12 | "[raw][uid]" => "[zeek][session_id]"
13 | "[raw][id.orig_h]" => "[source][ip]"
14 | "[raw][id.orig_p]" => "[source][port]"
15 | "[raw][id.resp_h]" => "[destination][ip]"
16 | "[raw][id.resp_p]" => "[destination][port]"
17 | "[raw][version]" => "[tls][version]"
18 | "[raw][cipher]" => "[tls][cipher]"
19 | "[raw][curve]" => "[tls][curve]"
20 | "[raw][server_name]" => "[tls][client][server_name]"
21 | "[raw][resumed]" => "[tls][resumed]"
22 | "[raw][next_protocol]" => "[tls][next_protocol]"
23 | "[raw][established]" => "[tls][established]"
24 | "[raw][logged]" => "[zeek][ssl][already_logged]"
25 | "[raw][ssl_history]" => "[zeek][ssl][history]"
26 | "[raw][cert_chain]" => "[tls][server][certificate_chain]"
27 | "[raw][cert_chain_fps]" => "[tls][server][hash][sha256]"
28 | "[raw][client_cert_chain]" => "[tls][client][certificate_chain]"
29 | "[raw][client_cert_chain_fps]" => "[tls][client][hash][sha256]"
30 | "[raw][subject]" => "[tls][server][subject]"
31 | "[raw][issuer]" => "[tls][server][issuer]"
32 | "[raw][client_subject]" => "[tls][client][subject]"
33 | "[raw][client_issuer]" => "[tls][client][issuer]"
34 | "[raw][sni_matches_cert]" => "[tls][ssl_sni_matches_cert]"
35 | }
36 | }
37 |
38 | # populate the @timestamp field with the ts value
39 | date {
40 | match => [ "[raw][ts]", "UNIX" ]
41 | }
42 |
43 | mutate {
44 | remove_field => "raw"
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/configfiles/6204-zeek_x509.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains processing steps for Zeek's x509.log
5 |
6 | # Reference: https://docs.zeek.org/en/master/scripts/base/files/x509/main.zeek.html#type-X509::Info
7 |
8 | filter {
9 | if [labels][type] == "zeek_x509" and "zeek_json" in [tags] {
10 | mutate {
11 | rename => {
12 | "[raw][fingerprint]" => "[tls][cert_container][x509][hash][sha256]"
13 | "[raw][certificate.version]" => "[tls][cert_container][x509][version_number]"
14 | "[raw][certificate.serial]" => "[tls][cert_container][x509][serial_number]"
15 | "[raw][certificate.subject]" => "[tls][cert_container][x509][subject][common_name]"
16 | "[raw][certificate.key_alg]" => "[tls][cert_container][x509][certificate_key_algorithm]"
17 | "[raw][certificate.sig_alg]" => "[tls][cert_container][x509][signature_algorithm]"
18 | "[raw][certificate.key_type]" => "[tls][cert_container][x509][public_key_algorithm]"
19 | "[raw][certificate.key_length]" => "[tls][cert_container][x509][public_key_size]"
20 | "[raw][certificate.exponent]" => "[tls][cert_container][x509][public_key_exponent]"
21 | "[raw][san.dns]" => "[tls][cert_container][x509][alternative_names]"
22 | }
23 | }
24 |
25 | # populate the @timestamp field with the ts value
26 | date {
27 | match => [ "[raw][ts]", "UNIX" ]
28 | }
29 |
30 | # handle cert validity dates
31 | date {
32 | match => [ "[raw][certificate.not_valid_before]", "UNIX" ]
33 | target => "[tls][cert_container][x509][not_before]"
34 | }
35 | date {
36 | match => [ "[raw][certificate.not_valid_after]", "UNIX" ]
37 | target => "[tls][cert_container][x509][not_after]"
38 | }
39 |
40 | # reparent the fields based on the certificate type
41 | if [raw][host_cert] == "true" {
42 | mutate { rename => { "[tls][cert_container]" => "[tls][server]" } }
43 | } else if [raw][basic_constraints.ca] == "true" {
44 | mutate {rename => { "[tls][cert_container]" => "[tls][ca]" } }
45 | } else if [raw][client_cert] {
46 | mutate { rename => { "[tls][cert_container]" => "[tls][client]" } }
47 | }
48 |
49 | mutate {
50 | remove_field => "raw"
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/configfiles/6276-zeek_weird.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains processing steps for Zeek's weird.log
5 |
6 | # Reference: https://docs.zeek.org/en/master/scripts/base/frameworks/notice/weird.zeek.html#type-Weird::Info
7 |
8 | filter {
9 | if [labels][type] == "zeek_weird" and "zeek_json" in [tags] {
10 | mutate {
11 | # unsure how passive differs from data_channel.passive
12 | rename => {
13 | "[raw][uid]" => "[zeek][session_id]"
14 | "[raw][id.orig_h]" => "[source][ip]"
15 | "[raw][id.orig_p]" => "[source][port]"
16 | "[raw][id.resp_h]" => "[destination][ip]"
17 | "[raw][id.resp_p]" => "[destination][port]"
18 | "[raw][name]" => "[zeek][weird][name]"
19 | "[raw][addl]" => "[zeek][weird][additional_info]"
20 | "[raw][notice]" => "[zeek][weird][notice]"
21 | "[raw][peer]" => "[zeek][weird][peer]"
22 | "[raw][source]" => "[zeek][weird][source]"
23 | "[raw][identifier]" => "[zeek][weird][identifier]"
24 | }
25 | }
26 |
27 | # populate the @timestamp field with the ts value
28 | date {
29 | match => [ "[raw][ts]", "UNIX" ]
30 | }
31 |
32 | mutate {
33 | remove_field => "raw"
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/configfiles/6952-kubernetes.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file parses timestamp+KV Kubernetes logs
5 |
6 | filter {
7 | if [labels][type] == "kubernetes" {
8 |
9 | if ( "k8s_json" in [tags] ) {
10 |
11 | date {
12 | match => [ "[k8s][requestReceivedTimestamp]", "ISO8601" ]
13 | }
14 |
15 | mutate {
16 | rename => {
17 | "[k8s][sourceIPs]" => "[source][ip]"
18 | "[k8s][verb]" => "[http][request][method]"
19 | "[k8s][requestURI]" => "[url][path]"
20 | "[k8s][userAgent]" => "[user_agent][original]"
21 | "[k8s][responseStatus][code]" => "[http][response][status_code]"
22 | }
23 | uppercase => [ "[http][request][method]"]
24 | }
25 |
26 | } else {
27 | # pull client IP and optional port apart.
28 | if [k8s][client] {
29 | grok {
30 | patterns_dir => [ "/usr/local/sof-elk/grok-patterns" ]
31 | match => {
32 | "[k8s][client]" => [ "\[?%{IP:[client][ip]}\]?(?::%{INT:[client][port]})?" ]
33 | }
34 | }
35 | }
36 |
37 | # rename remaining desired subfields from the k8s[] array
38 | mutate {
39 | rename => {
40 | "[k8s][accesskeyid]" => "[k8s][access_key_id]"
41 | "[k8s][accountid]" => "[k8s][account_id]"
42 | "[k8s][arn]" => "[k8s][arn]"
43 | "[k8s][userid]" => "[user][id]"
44 | "[k8s][username]" => "[user][name]"
45 | }
46 | }
47 |
48 | # split "groups" into an array if it exists but if it's an empty "[]", drop it
49 | if [k8s][groups] == "[]" {
50 | mutate {
51 | remove_field => [ "[k8s][groups]" ]
52 | }
53 | } else if [k8s][groups] {
54 | mutate {
55 | gsub => [ "[k8s][groups]", "[\[\]]", "" ]
56 | split => { "[k8s][groups]" => " " }
57 | }
58 | }
59 |
60 | # # remove the raw[] array and anything left in it
61 | # mutate {
62 | # remove_field => [ "raw" ]
63 | # }
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/configfiles/8000-postprocess-zeek.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file handles postprocessing for Zeek logs
5 |
6 | filter {
7 | if "zeek" in [tags] {
8 | # if "_grokparsefailure" not in [tags] and "_csvparsefailure" not in [tags] and "_jsonparsefailure" not in [tags] and "_dateparsefailure" not in [tags] {
9 | # mutate {
10 | # remove_field => [ "message" ]
11 | # }
12 | # }
13 |
14 | if [log][file][path] {
15 | mutate {
16 | add_field => { "[sof-elk][path]" => "Zeek log: %{[log][file][path]}" }
17 | }
18 | } else {
19 | mutate {
20 | add_field => { "[sof-elk][path]" => "Unknown Zeek log" }
21 | }
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/configfiles/8006-postprocess-dns.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing steps for DNS records
5 |
6 | filter {
7 | if "dns_record" in [tags] {
8 | if [source][ip] and [dns][question][name] and [dns][answers][data] {
9 | mutate {
10 | replace => { "message" => "DNS: %{[source][ip]} (%{[dns][question][name]} %{[dns][question][type]} -> %{[dns][answers][data]})" }
11 | }
12 | } else if [source][ip] and [dns][question][name] and [dns][rejected] {
13 | mutate {
14 | replace => { "message" => "DNS: %{[source][ip]} (%{[dns][question][name]} %{[dns][question][type]} -> Rejected)"}
15 | }
16 | } else if [source][ip] and [dns][question][name] {
17 | mutate {
18 | replace => { "message" => "DNS: %{[source][ip]} (%{[dns][question][name]} %{[dns][question][type]} -> No response)"}
19 | }
20 | } else {
21 | mutate {
22 | replace => { "message" => "DNS: Unhandled input. See event.original." }
23 | }
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/configfiles/8051-postprocess-ports.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains transforms and enrichments to be applied in postprocessing
5 |
6 | filter {
7 | mutate {
8 | convert => {
9 | "[source][port]" => "integer"
10 | "[destination][port]" => "integer"
11 | "[client][port]" => "integer"
12 | "[remote][port]" => "integer"
13 | "[backend][port]" => "integer"
14 | "[ftp_data_destination][port]" => "integer"
15 | }
16 | }
17 |
18 | if [source][port] {
19 | mutate { merge => { "[related][port]" => "[source][port]" } }
20 | }
21 |
22 | if [destination][port] {
23 | mutate { merge => { "[related][port]" => "[destination][port]" } }
24 | }
25 |
26 | if [client][port] {
27 | mutate { merge => { "[related][port]" => "[client][port]" } }
28 | }
29 |
30 | if [remote][port] {
31 | mutate { merge => { "[related][port]" => "[remote][port]" } }
32 | }
33 |
34 | if [backend][port] {
35 | mutate { merge => { "[related][port]" => "[backend][port]" } }
36 | }
37 |
38 | if [ftp_data_destination][port] {
39 | mutate { merge => { "[related][port]" => "[zeek][ftp][data_channel][response][port]" } }
40 | }
41 |
42 | # de-duplicate the [related][port] array
43 | ruby {
44 | code => "
45 | a = event.get('[related][port]')
46 | if a.is_a? Array
47 | event.remove('[related][port]')
48 | event.set('related.port', a.uniq)
49 | end"
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/configfiles/8052-postprocess-community-id.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file adds a Community ID field for records with all of the necessary components
5 | # Original script from RockNSM was used as a basis for this functionality
6 |
7 | filter {
8 | if ([source][ip] and [destination][ip] and [network][iana_number] and [source][port] and [destination][port]) and ![network][community_id] {
9 | ruby {
10 | path => "/usr/local/sof-elk/supporting-scripts/community-id.rb"
11 | script_params => {
12 | "target_field" => "[network][community_id]"
13 | }
14 | tag_on_exception => "_rubyexception-community-id"
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/configfiles/8053-postprocess-mac.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains transforms and enrichments to be applied in postprocessing
5 |
6 | filter {
7 | ### why not use merge?
8 | # from ip processing:
9 | # mutate { merge => { "[related][ip]" => "[source][ip]" } }
10 | # would be like this here... cleaner:
11 | # mutate { merge => { "[related][mac]" => "[source][mac]" } }
12 |
13 | mutate {
14 | gsub => [
15 | "[source][mac]", ":", "-",
16 | "[destination][mac]", ":", "-",
17 | "[client][mac]", ":", "-"
18 | ]
19 | uppercase => [ "[source][mac]", "[destination][mac]", "[client][mac]" ]
20 | }
21 | if [source][mac] {
22 | mutate { merge => { "[related][mac]" => "[source][mac]" } }
23 | }
24 |
25 | if [destination][mac] {
26 | mutate { merge => { "[related][mac]" => "[destination][mac]" } }
27 | }
28 |
29 | if [client][mac] {
30 | mutate { merge => { "[related][mac]" => "[client][mac]" } }
31 | }
32 |
33 | # de-duplicate the [related][mac] array
34 | ruby {
35 | code => "
36 | a = event.get('[related][mac]')
37 | if a.is_a? Array
38 | event.remove('[related][mac]')
39 | event.set('related.mac', a.uniq)
40 | end"
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/configfiles/8054-postprocess-netflow.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing steps for NetFlow records
5 |
6 | filter {
7 | if [labels][type] in [ "netflow", "archive-netflow", "zeek_conn" ] {
8 | # create copies for visualized fields (e.g. 1024 -> 1k)
9 | mutate {
10 | copy => {
11 | "[netflow][delta_flow_count]" => "[netflow][delta_flow_count_vis]"
12 | "[destination][bytes]" => "[destination][bytes_vis]"
13 | "[destination][packets]" => "[destination][packets_vis]"
14 | "[network][missed_bytes]" => "[network][missed_bytes_vis]"
15 | "[source][bytes]" => "[source][bytes_vis]"
16 | "[source][packets]" => "[source][packets_vis]"
17 | }
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/configfiles/8060-postprocess-useragent.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file enriches a useragent field
5 |
6 | filter {
7 | # enrich the useragent field
8 | if [user_agent][original] {
9 | mutate {
10 | # remove quotes from quoted string
11 | # convert + sign to space
12 | gsub => [
13 | "[user_agent][original]", "\"", "",
14 | "[user_agent][original]", "\+", " "
15 | ]
16 | }
17 | useragent {
18 | source => "[user_agent][original]"
19 | target => "[user_agent]"
20 | }
21 | }
22 | }
--------------------------------------------------------------------------------
/configfiles/8100-postprocess-syslog.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains final (cleanup) processing for syslog messages
5 |
6 | filter {
7 | if "parse_done" in [tags] and "_defaultparse" in [tags] {
8 | mutate {
9 | remove_tag => [ "_defaultparse" ]
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/configfiles/8110-postprocess-httpd.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing for HTTPD access log messages
5 |
6 | filter {
7 | if [labels][type] in [ "httpdlog", "zeek_http" ] {
8 |
9 | if [http][request][referrer] {
10 | mutate {
11 | # remove quotes from quoted string
12 | gsub => [
13 | "[http][request][referrer]", "\"", ""
14 | ]
15 | }
16 | }
17 |
18 | if [http][request][method] == "CONNECT" {
19 | mutate {
20 | replace => { "[url][path]" => "(Tunnel setup request)" }
21 | }
22 | }
23 |
24 | if [destination][bytes] == "-" {
25 | mutate {
26 | replace => { "[destination][bytes]" => 0 }
27 | }
28 | }
29 | if [aws][s3access][object_size] == "-" {
30 | mutate {
31 | replace => { "[aws][s3access][object_size]" => 0 }
32 | }
33 | }
34 | if [event][duration] == "-" {
35 | mutate {
36 | replace => { "[event][duration]" => 0 }
37 | }
38 | }
39 | mutate {
40 | convert => {
41 | "[http][version]" => "float"
42 | "[http][response][status_code]" => "integer"
43 | "[source][bytes]" => "integer"
44 | "[destination][bytes]" => "integer"
45 | "[aws][s3access][object_size]" => "integer"
46 | "[event][duration]" => "integer"
47 | }
48 | }
49 |
50 | # look at %{[url][path]} and see if it ends in a "not page resource" extension. Issue #25.
51 | if [url][path] {
52 | grok {
53 | match => [ "[url][path]", "%{URIPATH:requestpath}" ]
54 | add_tag => [ "find_pagenotpage" ]
55 | }
56 | if "find_pagenotpage" in [tags] {
57 | grok {
58 | match => [ "requestpath", "\.(css|js|class|gif|jpg|jpeg|png|bmp|ico|rss|xml|swf|woff2|ttf|eot)$" ]
59 | add_tag => [ "not_page" ]
60 | tag_on_failure => [ "page" ]
61 | }
62 | mutate {
63 | remove_field => [ "requestpath" ]
64 | remove_tag => [ "find_pagenotpage" ]
65 | }
66 | }
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/configfiles/8701-postprocess-microsoft365.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing for Microsoft365 log messages
5 |
6 | filter {
7 | if [labels][type] == "microsoft365" {
8 | mutate {
9 | # forcing many to strings so the first auto-assigned type doesn't cause problems when later types conflict
10 | convert => {
11 | "[o365][audit][Parameters]" => "string"
12 | "[o365][audit][OperationProperties]" => "string"
13 | "[o365][audit][ExtendedProperties]" => "string"
14 | "[o365][audit][DeviceProperties]" => "string"
15 | "[o365][audit][ModifiedProperties]" => "string"
16 | }
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/configfiles/8951-postprocess-gws.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing for Google Compute Platform log messages
5 |
6 | filter {
7 | if [labels][type] == "gws" {
8 | mutate {
9 | # forcing many to strings so the first auto-assigned type doesn't cause problems when later types conflict
10 | convert => {
11 | "[email][bytes]" => "integer"
12 | "[google_workspace][admin][event_parameters]" => "string"
13 | "[google_workspace][drive][event_parameters]" => "string"
14 | "[google_workspace][login][event_parameters]" => "string"
15 | "[google_workspace][user_accounts][event_parameters]" => "string"
16 | "[google_workspace][token][event_parameters]" => "string"
17 | "[google_workspace][mobile][event_parameters]" => "string"
18 | "[google_workspace][chat][event_parameters]" => "string"
19 | }
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/configfiles/8952-postprocess-kubernetes.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing for Google Compute Platform log messages
5 |
6 | filter {
7 | if [labels][type] == "kubernetes" {
8 | mutate {
9 | convert => {
10 | "[k8s][line]" => "integer"
11 | "[k8s][thread]" => "integer"
12 | "[k8s][evaluatedNodes]" => "integer"
13 | "[k8s][feasibleNodes]" => "integer"
14 | "[k8s][node_filesystem_utilization]" => "float"
15 | }
16 | }
17 | }
18 | }
--------------------------------------------------------------------------------
/configfiles/8999-postprocess-all.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains postprocessing for all items
5 |
6 | filter {
7 |
8 | ruby {
9 | code => "event.set('sof-elk.processing_time', (Time.now.to_f - event.get('[@metadata][ingest_start]')).round(8))"
10 | }
11 |
12 | # if [source] =~ ^/logstash/ {
13 | # mutate {
14 | # gsub => {
15 | # "source", "^/logstash/\w/", ""
16 | # }
17 | # }
18 | # }
19 | }
20 |
--------------------------------------------------------------------------------
/configfiles/9000-output-elastic-consolidated.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains outputs to Elasticsearch for the "common" ES outputs
5 | # Using a single ES output stanza is more resource efficient than running many
6 | # the [sof-elk][base_index] field is populated upstream with the base index name
7 |
8 | output {
9 | if [sof-elk][base_index] {
10 | elasticsearch {
11 | ilm_enabled => false
12 | index => "%{[sof-elk][base_index]}-%{+YYYY.MM}"
13 | ecs_compatibility => "disabled"
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/configfiles/9999-output-stdout.conf:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains output to STDOUT, which is only used for testing purposes. It should generally not be enabled, so everything is commented out.
5 |
6 | output {
7 | # stdout {
8 | # codec => rubydebug
9 | # codec => json
10 | # }
11 | }
12 |
--------------------------------------------------------------------------------
/grok-patterns/archive-netflow:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Supporting file
2 | # (C)2016 Lewes Technology Consulting, LLC
3 | #
4 | # This file contains grok patterns used for NetFlow parsers
5 |
6 | TIMESTAMP_NETFLOW %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}\.%{INT:MSECOND}
7 | ICMP_TYPECODE %{NONNEGINT:icmp_type:int}.%{NONNEGINT:icmp_code:int}
--------------------------------------------------------------------------------
/kibana/data_views/all-records.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": false,
5 | "fieldFormats": {},
6 | "id": "all-records",
7 | "name": "all-records",
8 | "timeFieldName": "@timestamp",
9 | "title": "logstash-*,netflow-*,httpdlog-*,aws-*,azure-*,gws-*,evtxlogs-*,filesystem-*,lnkfiles-*,kubernetes-*,microsoft365-*,plaso-*,zeek-*"
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/kibana/data_views/aws.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "aws",
14 | "name": "aws-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "aws-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/azure.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "azure",
14 | "name": "azure-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "azure-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/evtxlogs.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "evtxlogs",
14 | "name": "evtxlogs-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "evtxlogs-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/filesystem.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | },
12 | "file.size_vis": {
13 | "id": "bytes",
14 | "params": {
15 | "pattern": "0,0.[0]b"
16 | }
17 | }
18 | },
19 | "id": "filesystem",
20 | "name": "filesystem-*",
21 | "timeFieldName": "@timestamp",
22 | "title": "filesystem-*"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/kibana/data_views/gcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "gcp",
14 | "name": "gcp-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "gcp-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/gws.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "gws",
14 | "name": "gws-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "gws-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/httpdlog.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | },
12 | "destination.port": {
13 | "id": "number",
14 | "params": {
15 | "pattern": "00"
16 | }
17 | },
18 | "related.port": {
19 | "id": "number",
20 | "params": {
21 | "pattern": "00"
22 | }
23 | },
24 | "source.geo.asnstr": {
25 | "id": "string",
26 | "params": {
27 | "transform": "false"
28 | }
29 | },
30 | "source.port": {
31 | "id": "number",
32 | "params": {
33 | "pattern": "00"
34 | }
35 | }
36 | },
37 | "id": "httpdlog",
38 | "name": "httpdlog-*",
39 | "timeFieldName": "@timestamp",
40 | "title": "httpdlog-*"
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/kibana/data_views/kape.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": false,
5 | "fieldFormats": {},
6 | "id": "kape",
7 | "name": "kape-*",
8 | "timeFieldName": "@timestamp",
9 | "title": "filefolderaccess-*,filesystem-*,lnkfiles-*,evtxlogs-*"
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/kibana/data_views/kubernetes.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "kubernetes",
14 | "name": "kubernetes-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "kubernetes-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/lnkfiles.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "lnkfiles",
14 | "name": "lnkfiles-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "lnkfiles-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/logstash.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | },
12 | "destination.port": {
13 | "id": "number",
14 | "params": {
15 | "pattern": "00"
16 | }
17 | },
18 | "related.port": {
19 | "id": "number",
20 | "params": {
21 | "pattern": "00"
22 | }
23 | },
24 | "source.geo.asnstr": {
25 | "id": "string",
26 | "params": {
27 | "transform": "false"
28 | }
29 | },
30 | "source.port": {
31 | "id": "number",
32 | "params": {
33 | "pattern": "00"
34 | }
35 | }
36 | },
37 | "id": "logstash",
38 | "name": "logstash-*",
39 | "timeFieldName": "@timestamp",
40 | "title": "logstash-*"
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/kibana/data_views/microsoft365.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "microsoft365",
14 | "name": "microsoft365-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "microsoft365-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/plaso.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "plaso",
14 | "name": "plaso-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "plaso-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/data_views/zeek.json:
--------------------------------------------------------------------------------
1 | {
2 | "data_view": {
3 | "allowHidden": false,
4 | "allowNoIndex": true,
5 | "fieldFormats": {
6 | "@timestamp": {
7 | "id": "date",
8 | "params": {
9 | "pattern": "YYYY-MM-DD HH:mm:ss.SSS\\Z"
10 | }
11 | }
12 | },
13 | "id": "zeek",
14 | "name": "zeek-*",
15 | "timeFieldName": "@timestamp",
16 | "title": "zeek-*"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/kibana/lens/02a030a4-0331-41c1-af27-670ffc546b86.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "state": {
5 | "adHocDataViews": {},
6 | "datasourceStates": {
7 | "formBased": {
8 | "layers": {
9 | "0ead8c18-9b3d-466c-9423-0f77749ea4ec": {
10 | "columnOrder": [
11 | "8d00caaa-8c1f-4d09-aea6-b872f5e5fcd5"
12 | ],
13 | "columns": {
14 | "8d00caaa-8c1f-4d09-aea6-b872f5e5fcd5": {
15 | "customLabel": true,
16 | "dataType": "number",
17 | "isBucketed": false,
18 | "label": "Total Bytes",
19 | "operationType": "sum",
20 | "params": {
21 | "emptyAsNull": true
22 | },
23 | "scale": "ratio",
24 | "sourceField": "network.bytes_vis"
25 | }
26 | },
27 | "incompleteColumns": {}
28 | }
29 | }
30 | },
31 | "indexpattern": {
32 | "layers": {}
33 | },
34 | "textBased": {
35 | "layers": {}
36 | }
37 | },
38 | "filters": [],
39 | "internalReferences": [],
40 | "query": {
41 | "language": "kuery",
42 | "query": ""
43 | },
44 | "visualization": {
45 | "layerId": "0ead8c18-9b3d-466c-9423-0f77749ea4ec",
46 | "layerType": "data",
47 | "metricAccessor": "8d00caaa-8c1f-4d09-aea6-b872f5e5fcd5",
48 | "subtitle": "",
49 | "titlesTextAlign": "center",
50 | "valueFontMode": "fit",
51 | "valuesTextAlign": "center"
52 | }
53 | },
54 | "title": "Total Bytes",
55 | "visualizationType": "lnsMetric"
56 | },
57 | "coreMigrationVersion": "8.8.0",
58 | "id": "02a030a4-0331-41c1-af27-670ffc546b86",
59 | "managed": false,
60 | "namespaces": [
61 | "default"
62 | ],
63 | "references": [
64 | {
65 | "id": "netflow",
66 | "name": "indexpattern-datasource-layer-0ead8c18-9b3d-466c-9423-0f77749ea4ec",
67 | "type": "index-pattern"
68 | }
69 | ],
70 | "type": "lens",
71 | "typeMigrationVersion": "8.9.0"
72 | }
73 |
--------------------------------------------------------------------------------
/kibana/lens/138278b4-4534-4aa2-a1db-41e37631a092.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "state": {
5 | "adHocDataViews": {},
6 | "datasourceStates": {
7 | "formBased": {
8 | "layers": {
9 | "765ebfc1-e38b-45f1-9038-e865463eda93": {
10 | "columnOrder": [
11 | "7e0e1d44-0118-46ef-a7b0-ceae171b6895"
12 | ],
13 | "columns": {
14 | "7e0e1d44-0118-46ef-a7b0-ceae171b6895": {
15 | "customLabel": true,
16 | "dataType": "number",
17 | "isBucketed": false,
18 | "label": "Bytes",
19 | "operationType": "sum",
20 | "params": {
21 | "emptyAsNull": true
22 | },
23 | "scale": "ratio",
24 | "sourceField": "file.size_vis"
25 | }
26 | },
27 | "ignoreGlobalFilters": false,
28 | "incompleteColumns": {}
29 | }
30 | }
31 | },
32 | "indexpattern": {
33 | "layers": {}
34 | },
35 | "textBased": {
36 | "layers": {}
37 | }
38 | },
39 | "filters": [],
40 | "internalReferences": [],
41 | "query": {
42 | "language": "kuery",
43 | "query": ""
44 | },
45 | "visualization": {
46 | "layerId": "765ebfc1-e38b-45f1-9038-e865463eda93",
47 | "layerType": "data",
48 | "metricAccessor": "7e0e1d44-0118-46ef-a7b0-ceae171b6895"
49 | }
50 | },
51 | "title": "Total File Size",
52 | "visualizationType": "lnsMetric"
53 | },
54 | "coreMigrationVersion": "8.8.0",
55 | "id": "138278b4-4534-4aa2-a1db-41e37631a092",
56 | "managed": false,
57 | "namespaces": [
58 | "default"
59 | ],
60 | "references": [
61 | {
62 | "id": "filesystem",
63 | "name": "indexpattern-datasource-layer-765ebfc1-e38b-45f1-9038-e865463eda93",
64 | "type": "index-pattern"
65 | }
66 | ],
67 | "type": "lens",
68 | "typeMigrationVersion": "8.9.0"
69 | }
70 |
--------------------------------------------------------------------------------
/kibana/lens/e1683eea-191a-4b6a-bf27-cd96595ffae0.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "state": {
5 | "adHocDataViews": {},
6 | "datasourceStates": {
7 | "formBased": {
8 | "layers": {
9 | "21d6e708-8466-49b2-8314-8f512b717bd6": {
10 | "columnOrder": [
11 | "3b851116-1a9d-43a7-9c7d-c0a68e85cf62"
12 | ],
13 | "columns": {
14 | "3b851116-1a9d-43a7-9c7d-c0a68e85cf62": {
15 | "customLabel": true,
16 | "dataType": "number",
17 | "isBucketed": false,
18 | "label": " ",
19 | "operationType": "count",
20 | "params": {
21 | "emptyAsNull": true,
22 | "format": {
23 | "id": "number",
24 | "params": {
25 | "decimals": 0
26 | }
27 | }
28 | },
29 | "scale": "ratio",
30 | "sourceField": "___records___"
31 | }
32 | },
33 | "ignoreGlobalFilters": false,
34 | "incompleteColumns": {},
35 | "sampling": 1
36 | }
37 | }
38 | },
39 | "indexpattern": {
40 | "layers": {}
41 | },
42 | "textBased": {
43 | "layers": {}
44 | }
45 | },
46 | "filters": [],
47 | "internalReferences": [],
48 | "query": {
49 | "language": "kuery",
50 | "query": ""
51 | },
52 | "visualization": {
53 | "layerId": "21d6e708-8466-49b2-8314-8f512b717bd6",
54 | "layerType": "data",
55 | "metricAccessor": "3b851116-1a9d-43a7-9c7d-c0a68e85cf62"
56 | }
57 | },
58 | "title": "Syslog Record Count",
59 | "visualizationType": "lnsMetric"
60 | },
61 | "coreMigrationVersion": "8.8.0",
62 | "id": "e1683eea-191a-4b6a-bf27-cd96595ffae0",
63 | "managed": false,
64 | "namespaces": [
65 | "default"
66 | ],
67 | "references": [
68 | {
69 | "id": "logstash",
70 | "name": "indexpattern-datasource-layer-21d6e708-8466-49b2-8314-8f512b717bd6",
71 | "type": "index-pattern"
72 | }
73 | ],
74 | "type": "lens",
75 | "typeMigrationVersion": "8.9.0"
76 | }
77 |
--------------------------------------------------------------------------------
/kibana/lens/e3e7b3e3-ac26-4a8c-be6b-6a3bdf97b1f5.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "state": {
5 | "adHocDataViews": {},
6 | "datasourceStates": {
7 | "formBased": {
8 | "layers": {
9 | "eb6a1c32-a998-44dc-8603-f481ae7e35d0": {
10 | "columnOrder": [
11 | "888e2e0d-59eb-4195-852e-412ab1081fc3"
12 | ],
13 | "columns": {
14 | "888e2e0d-59eb-4195-852e-412ab1081fc3": {
15 | "customLabel": true,
16 | "dataType": "number",
17 | "isBucketed": false,
18 | "label": "Files",
19 | "operationType": "count",
20 | "params": {
21 | "emptyAsNull": true
22 | },
23 | "scale": "ratio",
24 | "sourceField": "___records___"
25 | }
26 | },
27 | "ignoreGlobalFilters": false,
28 | "incompleteColumns": {}
29 | }
30 | }
31 | },
32 | "indexpattern": {
33 | "layers": {}
34 | },
35 | "textBased": {
36 | "layers": {}
37 | }
38 | },
39 | "filters": [],
40 | "internalReferences": [],
41 | "query": {
42 | "language": "kuery",
43 | "query": ""
44 | },
45 | "visualization": {
46 | "layerId": "eb6a1c32-a998-44dc-8603-f481ae7e35d0",
47 | "layerType": "data",
48 | "metricAccessor": "888e2e0d-59eb-4195-852e-412ab1081fc3"
49 | }
50 | },
51 | "title": "File Count",
52 | "visualizationType": "lnsMetric"
53 | },
54 | "coreMigrationVersion": "8.8.0",
55 | "id": "e3e7b3e3-ac26-4a8c-be6b-6a3bdf97b1f5",
56 | "managed": false,
57 | "namespaces": [
58 | "default"
59 | ],
60 | "references": [
61 | {
62 | "id": "filesystem",
63 | "name": "indexpattern-datasource-layer-eb6a1c32-a998-44dc-8603-f481ae7e35d0",
64 | "type": "index-pattern"
65 | }
66 | ],
67 | "type": "lens",
68 | "typeMigrationVersion": "8.9.0"
69 | }
70 |
--------------------------------------------------------------------------------
/kibana/search/80bd16f0-51bf-11e9-8b54-fde301eff14e.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "file.directory",
5 | "file.name",
6 | "file.size"
7 | ],
8 | "description": "",
9 | "grid": {},
10 | "hideChart": false,
11 | "hits": 0,
12 | "isTextBasedQuery": false,
13 | "kibanaSavedObjectMeta": {
14 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
15 | },
16 | "sort": [
17 | [
18 | "@timestamp",
19 | "desc"
20 | ]
21 | ],
22 | "timeRestore": false,
23 | "title": "Filesystem Discovery",
24 | "version": 1
25 | },
26 | "coreMigrationVersion": "8.8.0",
27 | "id": "80bd16f0-51bf-11e9-8b54-fde301eff14e",
28 | "managed": false,
29 | "namespaces": [
30 | "default"
31 | ],
32 | "references": [
33 | {
34 | "id": "filesystem",
35 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
36 | "type": "index-pattern"
37 | }
38 | ],
39 | "type": "search",
40 | "typeMigrationVersion": "10.5.0"
41 | }
42 |
--------------------------------------------------------------------------------
/kibana/search/8d421a80-7a76-11e9-92d0-dfa499d9fb15.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "file.path",
5 | "file.mtime",
6 | "file.target.absolute_path",
7 | "file.target.mtime"
8 | ],
9 | "description": "",
10 | "grid": {},
11 | "hideChart": false,
12 | "hits": 0,
13 | "isTextBasedQuery": false,
14 | "kibanaSavedObjectMeta": {
15 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
16 | },
17 | "sort": [
18 | [
19 | "@timestamp",
20 | "desc"
21 | ]
22 | ],
23 | "timeRestore": false,
24 | "title": "LNKfile Discovery",
25 | "version": 1
26 | },
27 | "coreMigrationVersion": "8.8.0",
28 | "id": "8d421a80-7a76-11e9-92d0-dfa499d9fb15",
29 | "managed": false,
30 | "namespaces": [
31 | "default"
32 | ],
33 | "references": [
34 | {
35 | "id": "lnkfiles",
36 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
37 | "type": "index-pattern"
38 | }
39 | ],
40 | "type": "search",
41 | "typeMigrationVersion": "10.5.0"
42 | }
43 |
--------------------------------------------------------------------------------
/kibana/search/9744c560-2f17-11e8-9faf-f77fbc81a50d.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "source.ip",
5 | "http.request.method",
6 | "url.domain",
7 | "url.path",
8 | "http.response.status_code"
9 | ],
10 | "description": "",
11 | "hits": 0,
12 | "kibanaSavedObjectMeta": {
13 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
14 | },
15 | "sort": [
16 | [
17 | "@timestamp",
18 | "desc"
19 | ]
20 | ],
21 | "title": "HTTPD Discovery",
22 | "version": 1
23 | },
24 | "coreMigrationVersion": "8.8.0",
25 | "id": "9744c560-2f17-11e8-9faf-f77fbc81a50d",
26 | "managed": false,
27 | "namespaces": [
28 | "default"
29 | ],
30 | "references": [
31 | {
32 | "id": "httpdlog",
33 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
34 | "type": "index-pattern"
35 | }
36 | ],
37 | "type": "search",
38 | "typeMigrationVersion": "10.5.0"
39 | }
40 |
--------------------------------------------------------------------------------
/kibana/search/c80386c0-286e-11eb-bf98-fb32763e0d19.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "operation_name",
5 | "status",
6 | "initiator",
7 | "resource_group",
8 | "resource_type",
9 | "resource"
10 | ],
11 | "description": "",
12 | "hits": 0,
13 | "kibanaSavedObjectMeta": {
14 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
15 | },
16 | "sort": [
17 | [
18 | "@timestamp",
19 | "desc"
20 | ]
21 | ],
22 | "title": "Azure Activity Log Discovery",
23 | "version": 1
24 | },
25 | "coreMigrationVersion": "8.8.0",
26 | "id": "c80386c0-286e-11eb-bf98-fb32763e0d19",
27 | "managed": false,
28 | "namespaces": [
29 | "default"
30 | ],
31 | "references": [
32 | {
33 | "id": "azure",
34 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
35 | "type": "index-pattern"
36 | }
37 | ],
38 | "type": "search",
39 | "typeMigrationVersion": "10.5.0"
40 | }
41 |
--------------------------------------------------------------------------------
/kibana/search/e45686d0-3021-11e8-9faf-f77fbc81a50d.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "log.syslog.hostname",
5 | "log.syslog.appname",
6 | "message"
7 | ],
8 | "description": "",
9 | "grid": {
10 | "columns": {
11 | "log.syslog.appname": {
12 | "width": 140
13 | },
14 | "log.syslog.hostname": {
15 | "width": 140
16 | }
17 | }
18 | },
19 | "hideChart": false,
20 | "hits": 0,
21 | "isTextBasedQuery": false,
22 | "kibanaSavedObjectMeta": {
23 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
24 | },
25 | "sort": [
26 | [
27 | "@timestamp",
28 | "desc"
29 | ]
30 | ],
31 | "timeRestore": false,
32 | "title": "Syslog Discovery",
33 | "usesAdHocDataView": false,
34 | "version": 1
35 | },
36 | "coreMigrationVersion": "8.8.0",
37 | "id": "e45686d0-3021-11e8-9faf-f77fbc81a50d",
38 | "managed": false,
39 | "namespaces": [
40 | "default"
41 | ],
42 | "references": [
43 | {
44 | "id": "logstash",
45 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
46 | "type": "index-pattern"
47 | }
48 | ],
49 | "type": "search",
50 | "typeMigrationVersion": "10.5.0"
51 | }
52 |
--------------------------------------------------------------------------------
/kibana/search/eb2ab450-8d08-11e9-a03c-33ad7fc2c4d0.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "winlog.computer_name",
5 | "winlog.user.name",
6 | "winlog.event_id",
7 | "winlog.channel",
8 | "message"
9 | ],
10 | "description": "",
11 | "grid": {
12 | "columns": {
13 | "winlog.event_id": {
14 | "width": 154
15 | }
16 | }
17 | },
18 | "hideChart": false,
19 | "hits": 0,
20 | "isTextBasedQuery": false,
21 | "kibanaSavedObjectMeta": {
22 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
23 | },
24 | "sort": [
25 | [
26 | "@timestamp",
27 | "desc"
28 | ]
29 | ],
30 | "timeRestore": false,
31 | "title": "Eventlog Discovery",
32 | "version": 1
33 | },
34 | "coreMigrationVersion": "8.8.0",
35 | "id": "eb2ab450-8d08-11e9-a03c-33ad7fc2c4d0",
36 | "managed": false,
37 | "namespaces": [
38 | "default"
39 | ],
40 | "references": [
41 | {
42 | "id": "evtxlogs",
43 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
44 | "type": "index-pattern"
45 | }
46 | ],
47 | "type": "search",
48 | "typeMigrationVersion": "10.5.0"
49 | }
50 |
--------------------------------------------------------------------------------
/kibana/search/fa3332b0-72b0-11e8-9159-894bd7d62352.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "columns": [
4 | "netflow.exporter.ip",
5 | "network.transport",
6 | "source.ip",
7 | "source.port",
8 | "destination.ip",
9 | "destination.port",
10 | "network.bytes",
11 | "network.packets"
12 | ],
13 | "description": "",
14 | "hits": 0,
15 | "kibanaSavedObjectMeta": {
16 | "searchSourceJSON": "{\"filter\":[],\"highlightAll\":true,\"indexRefName\":\"kibanaSavedObjectMeta.searchSourceJSON.index\",\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"version\":true}"
17 | },
18 | "sort": [
19 | [
20 | "@timestamp",
21 | "desc"
22 | ]
23 | ],
24 | "title": "NetFlow Discovery",
25 | "version": 1
26 | },
27 | "coreMigrationVersion": "8.8.0",
28 | "id": "fa3332b0-72b0-11e8-9159-894bd7d62352",
29 | "managed": false,
30 | "namespaces": [
31 | "default"
32 | ],
33 | "references": [
34 | {
35 | "id": "netflow",
36 | "name": "kibanaSavedObjectMeta.searchSourceJSON.index",
37 | "type": "index-pattern"
38 | }
39 | ],
40 | "type": "search",
41 | "typeMigrationVersion": "10.5.0"
42 | }
43 |
--------------------------------------------------------------------------------
/kibana/sof-elk_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "dateFormat": "YYYY-MM-DD HH:mm:ss.SSS Z",
4 | "dateFormat:tz": "UTC",
5 | "dateNanosFormat": "YYYY-MM-DD HH:mm:ss.SSSSSSSSS Z",
6 | "defaultIndex": "logstash",
7 | "defaultRoute": "/app/dashboards#/view/6d272a20-7319-11e8-9f32-cf7527ac183d",
8 | "securitySolution:enableNewsFeed": false,
9 | "theme:darkMode": "system",
10 | "securitySolution:ipReputationLinks": "[{ \"name\": \"virustotal.com\", \"url_template\": \"https://www.virustotal.com/gui/search/{{ip}}\" }, { \"name\": \"talosIntelligence.com\", \"url_template\": \"https://talosintelligence.com/reputation_center/lookup?search={{ip}}\" }, { \"name\": \"SANS ISC\", \"url_template\": \"https://isc.sans.edu/ipinfo.html?ip={{ip}}\" }]",
11 | "discover:rowHeightOption": -1,
12 | "discover:sampleSize": 5000
13 | }
14 | }
--------------------------------------------------------------------------------
/kibana/visualization/4061ca00-2874-11eb-bf98-fb32763e0d19.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "kibanaSavedObjectMeta": {
5 | "searchSourceJSON": "{}"
6 | },
7 | "savedSearchRefName": "search_0",
8 | "title": "Azure Activity Log Initiator",
9 | "uiStateJSON": "{\"vis\":{\"params\":{\"sort\":{\"columnIndex\":null,\"direction\":null}}}}",
10 | "version": 1,
11 | "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customLabel\":\"Initiator\",\"field\":\"initiator.keyword\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"size\":10},\"schema\":\"bucket\",\"type\":\"terms\"}],\"params\":{\"dimensions\":{\"buckets\":[],\"metrics\":[{\"accessor\":0,\"aggType\":\"count\",\"format\":{\"id\":\"number\"},\"label\":\"Count\",\"params\":{}}]},\"perPage\":10,\"percentageCol\":\"\",\"showMetricsAtAllLevels\":false,\"showPartialRows\":false,\"showToolbar\":true,\"showTotal\":false,\"sort\":{\"columnIndex\":null,\"direction\":null},\"totalFunc\":\"sum\"},\"title\":\"Azure Activity Log Initiator\",\"type\":\"table\"}"
12 | },
13 | "coreMigrationVersion": "8.8.0",
14 | "id": "4061ca00-2874-11eb-bf98-fb32763e0d19",
15 | "managed": false,
16 | "namespaces": [
17 | "default"
18 | ],
19 | "references": [
20 | {
21 | "id": "c80386c0-286e-11eb-bf98-fb32763e0d19",
22 | "name": "search_0",
23 | "type": "search"
24 | }
25 | ],
26 | "type": "visualization",
27 | "typeMigrationVersion": "8.5.0"
28 | }
29 |
--------------------------------------------------------------------------------
/kibana/visualization/98309f20-8873-11e8-b4c2-17db52b8990d.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "kibanaSavedObjectMeta": {
5 | "searchSourceJSON": "{\"filter\":[],\"query\":{\"language\":\"kuery\",\"query\":\"\"}}"
6 | },
7 | "title": "SOF-ELK®",
8 | "uiStateJSON": "{}",
9 | "version": 1,
10 | "visState": "{\"aggs\":[],\"params\":{\"fontSize\":12,\"markdown\":\"\",\"openLinksInNewTab\":false},\"title\":\"SOF-ELK®\",\"type\":\"markdown\"}"
11 | },
12 | "coreMigrationVersion": "8.8.0",
13 | "id": "98309f20-8873-11e8-b4c2-17db52b8990d",
14 | "managed": false,
15 | "namespaces": [
16 | "default"
17 | ],
18 | "references": [],
19 | "type": "visualization",
20 | "typeMigrationVersion": "8.5.0"
21 | }
22 |
--------------------------------------------------------------------------------
/kibana/visualization/afda9520-2873-11eb-bf98-fb32763e0d19.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "kibanaSavedObjectMeta": {
5 | "searchSourceJSON": "{}"
6 | },
7 | "savedSearchRefName": "search_0",
8 | "title": "Azure Activity Log Operations",
9 | "uiStateJSON": "{}",
10 | "version": 1,
11 | "visState": "{\"aggs\":[{\"enabled\":true,\"id\":\"1\",\"params\":{},\"schema\":\"metric\",\"type\":\"count\"},{\"enabled\":true,\"id\":\"2\",\"params\":{\"customLabel\":\"Event Category\",\"field\":\"event_category.keyword\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":false,\"otherBucketLabel\":\"Other\",\"size\":15},\"schema\":\"segment\",\"type\":\"terms\"},{\"enabled\":true,\"id\":\"3\",\"params\":{\"customLabel\":\"Operation Name\",\"field\":\"operation_name.keyword\",\"missingBucket\":false,\"missingBucketLabel\":\"Missing\",\"order\":\"desc\",\"orderBy\":\"1\",\"otherBucket\":true,\"otherBucketLabel\":\"Other\",\"size\":15},\"schema\":\"segment\",\"type\":\"terms\"}],\"params\":{\"addTooltip\":true,\"dimensions\":{\"metric\":{\"accessor\":0,\"aggType\":\"count\",\"format\":{\"id\":\"number\"},\"label\":\"Count\",\"params\":{}}},\"distinctColors\":true,\"isDonut\":true,\"labels\":{\"last_level\":true,\"show\":false,\"truncate\":100,\"values\":true},\"legendDisplay\":\"hide\",\"legendPosition\":\"right\",\"palette\":{\"name\":\"kibana_palette\",\"type\":\"palette\"},\"type\":\"pie\"},\"title\":\"Azure Activity Log Operations\",\"type\":\"pie\"}"
12 | },
13 | "coreMigrationVersion": "8.8.0",
14 | "id": "afda9520-2873-11eb-bf98-fb32763e0d19",
15 | "managed": false,
16 | "namespaces": [
17 | "default"
18 | ],
19 | "references": [
20 | {
21 | "id": "c80386c0-286e-11eb-bf98-fb32763e0d19",
22 | "name": "search_0",
23 | "type": "search"
24 | }
25 | ],
26 | "type": "visualization",
27 | "typeMigrationVersion": "8.5.0"
28 | }
29 |
--------------------------------------------------------------------------------
/kibana/visualization/bdecdc70-731e-11e8-9f32-cf7527ac183d.json:
--------------------------------------------------------------------------------
1 | {
2 | "attributes": {
3 | "description": "",
4 | "kibanaSavedObjectMeta": {
5 | "searchSourceJSON": "{\"filter\":[],\"query\":{\"language\":\"lucene\",\"query\":\"\"}}"
6 | },
7 | "title": "SOF-ELK® VM Intro",
8 | "uiStateJSON": "{}",
9 | "version": 1,
10 | "visState": "{\"aggs\":[],\"params\":{\"fontSize\":11,\"markdown\":\"## Welcome to SOF-ELK®\\n\\nThis platform consists of a pre-configured, fully functional Elastic Stack configuration. It ingests various log formats and includes several dashboards to present the data in useful formats. While the platform and project were created for various SANS Courses, SOF-ELK is maintained as community resource.\\n\\nThis dashboard displays a summary of data that has been loaded. If you do not see summary information below, you either haven't loaded any data yet or you may need to adjust the time frame in scope with the time range selector above.\\n\\nWhen you're ready, [select a dashboard from the list](/app/dashboards#/list) to start exploring data.\",\"openLinksInNewTab\":false},\"title\":\"SOF-ELK® VM Intro\",\"type\":\"markdown\"}"
11 | },
12 | "coreMigrationVersion": "8.8.0",
13 | "id": "bdecdc70-731e-11e8-9f32-cf7527ac183d",
14 | "managed": false,
15 | "namespaces": [
16 | "default"
17 | ],
18 | "references": [],
19 | "type": "visualization",
20 | "typeMigrationVersion": "8.5.0"
21 | }
22 |
--------------------------------------------------------------------------------
/lib/configfiles/filebeat.yml:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Supporting script
2 | # (C)2023 Lewes Technology Consulting, LLC
3 | #
4 | # This file configures the filebeat daemon
5 |
6 | ###################### Filebeat Configuration #########################
7 |
8 | # You can find the full configuration reference here:
9 | # https://www.elastic.co/guide/en/beats/filebeat/index.html
10 |
11 | # For more available modules and options, please see the filebeat.reference.yml sample
12 | # configuration file.
13 |
14 | # ============================== Filebeat inputs ===============================
15 | filebeat.config.inputs:
16 | enabled: true
17 | path: /usr/local/sof-elk/lib/filebeat_inputs/*.yml
18 |
19 | #============================= Filebeat modules ===============================
20 |
21 | filebeat.config.modules:
22 | # Glob pattern for configuration loading
23 | path: ${path.config}/modules.d/*.yml
24 |
25 | # Set to true to enable config reloading
26 | reload.enabled: false
27 |
28 | # Period on which files under path should be checked for changes
29 | #reload.period: 10s
30 |
31 | # ================================ Outputs =====================================
32 |
33 | # ------------------------------ Logstash Output -------------------------------
34 | output.logstash:
35 | # The Logstash hosts
36 | hosts: ["localhost:5044"]
37 |
38 | # Optional SSL. By default is off.
39 | # List of root certificates for HTTPS server verifications
40 | #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
41 |
42 | # Certificate for SSL client authentication
43 | #ssl.certificate: "/etc/pki/client/cert.pem"
44 |
45 | # Client Certificate Key
46 | #ssl.key: "/etc/pki/client/cert.key"
47 |
48 | # ================================= Processors =================================
49 | # processors:
50 | # - add_host_metadata:
51 | # when.not.contains.tags: forwarded
52 | # - add_cloud_metadata: ~
53 | # - add_docker_metadata: ~
54 | # - add_kubernetes_metadata: ~
55 |
--------------------------------------------------------------------------------
/lib/dictionaries/dns_type_code2name.yaml:
--------------------------------------------------------------------------------
1 | "0": "Reserved"
2 | "1": "A"
3 | "2": "NS"
4 | "3": "MD"
5 | "4": "MF"
6 | "5": "CNAME"
7 | "6": "SOA"
8 | "7": "MB"
9 | "8": "MG"
10 | "9": "MR"
11 | "10": "NULL"
12 | "11": "WKS"
13 | "12": "PTR"
14 | "13": "HINFO"
15 | "14": "MINFO"
16 | "15": "MX"
17 | "16": "TXT"
18 | "17": "RP"
19 | "18": "AFSDB"
20 | "19": "X25"
21 | "20": "ISDN"
22 | "21": "RT"
23 | "22": "NSAP"
24 | "23": "NSAP-PTR"
25 | "24": "SIG"
26 | "25": "KEY"
27 | "26": "PX"
28 | "27": "GPOS"
29 | "28": "AAAA"
30 | "29": "LOC"
31 | "30": "NXT"
32 | "31": "EID"
33 | "32": "NIMLOC"
34 | "33": "SRV"
35 | "34": "ATMA"
36 | "35": "NAPTR"
37 | "36": "KX"
38 | "37": "CERT"
39 | "38": "A6"
40 | "39": "DNAME"
41 | "40": "SINK"
42 | "41": "OPT"
43 | "42": "APL"
44 | "43": "DS"
45 | "44": "SSHFP"
46 | "45": "IPSECKEY"
47 | "46": "RRSIG"
48 | "47": "NSEC"
49 | "48": "DNSKEY"
50 | "49": "DHCID"
51 | "50": "NSEC3"
52 | "51": "NSEC3PARAM"
53 | "52": "TLSA"
54 | "53": "SMIMEA"
55 | "54": "Unassigned"
56 | "55": "HIP"
57 | "56": "NINFO"
58 | "57": "RKEY"
59 | "58": "TALINK"
60 | "59": "CDS"
61 | "60": "CDNSKEY"
62 | "61": "OPENPGPKEY"
63 | "62": "CSYNC"
64 | "63": "ZONEMD"
65 | "64": "SVCB"
66 | "65": "HTTPS"
67 | "99": "SPF"
68 | "100": "UINFO"
69 | "101": "UID"
70 | "102": "GID"
71 | "103": "UNSPEC"
72 | "104": "NID"
73 | "105": "L32"
74 | "106": "L64"
75 | "107": "LP"
76 | "108": "EUI48"
77 | "109": "EUI64"
78 | "128": "NXNAME"
79 | "249": "TKEY"
80 | "250": "TSIG"
81 | "251": "IXFR"
82 | "252": "AXFR"
83 | "253": "MAILB"
84 | "254": "MAILA"
85 | "255": "*"
86 | "256": "URI"
87 | "257": "CAA"
88 | "258": "AVC"
89 | "259": "DOA"
90 | "260": "AMTRELAY"
91 | "261": "RESINFO"
92 | "262": "WALLET"
93 | "263": "CLA"
94 | "264": "IPN"
95 | "32768": "TA"
96 | "32769": "DLV"
97 | "65535": "Reserved"
98 |
--------------------------------------------------------------------------------
/lib/dictionaries/eventid2desc.yaml:
--------------------------------------------------------------------------------
1 | # https://learn.microsoft.com/en-us/sysinternals/downloads/sysmon
2 | "1": "Process creation"
3 | "2": "Process changed file creation time"
4 | "3": "Network connection"
5 | "4": "Sysmon service state changed"
6 | "5": "Process terminated"
7 | "6": "Driver loaded"
8 | "7": "Image loaded"
9 | "8": "CreateRemoteThread"
10 | "9": "RawAccessRead"
11 | "10": "ProcessAccess"
12 | "11": "FileCreate"
13 | "12": "RegistryEvent (Object create and delete)"
14 | "13": "RegistryEvent (Value set)"
15 | "14": "RegistryEvent (Key and value rename)"
16 | "15": "FileCreateStreamHash"
17 | "16": "ServiceConfigurationChange"
18 | "17": "PipeEvent (Pipe created)"
19 | "18": "PipeEvent (Pipe connected)"
20 | "19": "WmiEvent (WmiEventFilter activity detected)"
21 | "20": "WmiEvent (WmiEventConsumer activity detected)"
22 | "21": "WmiEvent (WmiEventConsumerToFilter activity detected)"
23 | "22": "DNSEvent (DNS query)"
24 | "23": "FileDelete (File Delete archived)"
25 | "24": "ClipboardChange (New content in the clipboard)"
26 | "25": "ProcessTampering (Process image change)"
27 | "26": "FileDeleteDetected (File Delete logged)"
28 | "27": "FileBlockExecutable"
29 | "28": "FileBlockShredding"
30 | "255": "Error"
31 |
--------------------------------------------------------------------------------
/lib/dictionaries/ip_proto_zeek2int.yaml:
--------------------------------------------------------------------------------
1 | "icmp": 1
2 | "tcp": 6
3 | "udp": 17
4 | "unknown_transport": 0
5 |
--------------------------------------------------------------------------------
/lib/dictionaries/makeyaml.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | long = {}
4 | long[1] = 'FIN'
5 | long[2] = 'SYN'
6 | long[4] = 'RST'
7 | long[8] = 'PSH'
8 | long[16] = 'ACK'
9 | long[32] = 'URG'
10 | long[64] = 'ECE'
11 | long[128] = 'CWR'
12 | long[256] = 'NS'
13 |
14 | nfdump = {}
15 | nfdump[1] = 'F'
16 | nfdump[2] = 'S'
17 | nfdump[4] = 'R'
18 | nfdump[8] = 'P'
19 | nfdump[16] = 'A'
20 | nfdump[32] = 'U'
21 |
22 |
23 | def convert(val, type='nfdump'):
24 | longflags = []
25 | nfdumpflags = ''
26 | for flag in (256, 128, 64, 32, 16, 8, 4, 2, 1):
27 | if val & flag != 0:
28 | longflags.append(long[flag])
29 | if flag <= 63:
30 | nfdumpflags += nfdump[flag]
31 | elif flag <= 63:
32 | nfdumpflags += '.'
33 |
34 | if longflags == []:
35 | longflags.append('NULL')
36 | longflags.reverse()
37 | longflags_string = '-'.join(longflags)
38 |
39 | if val > 63:
40 | nfdumpflags = '0x%X' % val
41 |
42 | if type == 'long':
43 | print('"0x%X": "%s"' % (val, longflags_string))
44 | elif type == 'nfdump':
45 | print('"%d": "%s"' % (val, nfdumpflags))
46 |
47 |
48 | for i in range(0, 256):
49 | convert(i, 'nfdump')
50 |
--------------------------------------------------------------------------------
/lib/dictionaries/service_svc2listtype.yaml:
--------------------------------------------------------------------------------
1 | Windows Update: whitelist
2 | SEC555 Service: whitelist
3 | Evil Service: blacklist
4 |
--------------------------------------------------------------------------------
/lib/dictionaries/syslog_facility2int.yaml:
--------------------------------------------------------------------------------
1 | "kernel": 0
2 | "user": 1
3 | "mail": 2
4 | "daemon": 3
5 | "auth": 4
6 | "syslog": 5
7 | "lpr": 6
8 | "news": 7
9 | "uucp": 8
10 | "cron": 9
11 | "authpriv": 10
12 | "ftp": 11
13 | "ntp": 12
14 | "security": 13
15 | "console": 14
16 | "solaris-cron": 15
17 | "local0": 16
18 | "local1": 17
19 | "local2": 18
20 | "local3": 19
21 | "local4": 20
22 | "local5": 21
23 | "local6": 22
24 | "local7": 23
25 |
--------------------------------------------------------------------------------
/lib/dictionaries/syslog_severity2int.yaml:
--------------------------------------------------------------------------------
1 | "emerg": 0
2 | "alert": 1
3 | "crit": 2
4 | "err": 3
5 | "warning": 4
6 | "notice": 5
7 | "info": 6
8 | "debug": 7
9 |
--------------------------------------------------------------------------------
/lib/dictionaries/zeek_connection_state2desc.yaml:
--------------------------------------------------------------------------------
1 | "S0" : "Connection attempt seen, no reply"
2 | "S1" : "Connection established, not terminated"
3 | "S2" : "Connection established and close attempt by originator seen (but no reply from responder)"
4 | "S3" : "Connection established and close attempt by responder seen (but no reply from originator)"
5 | "SF" : "Normal SYN/FIN completion"
6 | "REJ" : "Connection attempt rejected"
7 | "RSTO" : "Connection established, originator aborted (sent a RST)"
8 | "RSTR" : "Established, responder aborted"
9 | "RSTOS0" : "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder"
10 | "RSTRH" : "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator"
11 | "SH" : "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)"
12 | "SHR" : "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator"
13 | "OTH" : "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
--------------------------------------------------------------------------------
/lib/dictionaries/zeek_dns_class2classabbrev.yaml:
--------------------------------------------------------------------------------
1 | "C_INTERNET" : "IN"
2 | "C_CSNET" : "CS"
3 | "C_CHAOS" : "CH"
4 | "C_HESIOD" : "HS"
5 | "C_NONE" : "NONE"
6 | "C_ANY" : "ANY"
--------------------------------------------------------------------------------
/lib/elastalert_rules/README.txt:
--------------------------------------------------------------------------------
1 | any *.yaml files placed in this directory will be activated in elastalert
2 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-corecommon.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "properties": {
5 | "@timestamp": {
6 | "type": "date"
7 | },
8 | "@version": {
9 | "type": "keyword"
10 | },
11 | "prospector": {
12 | "properties": {
13 | "type": {
14 | "type": "keyword"
15 | }
16 | },
17 | "type": "object"
18 | },
19 | "network": {
20 | "properties": {
21 | "transport": {
22 | "type": "keyword"
23 | },
24 | "community_id": {
25 | "type": "keyword"
26 | },
27 | "tcp_flags": {
28 | "type": "keyword"
29 | },
30 | "tcp_flags_hex": {
31 | "type": "keyword"
32 | }
33 | }
34 | },
35 | "original_message": {
36 | "type": "keyword"
37 | },
38 | "path": {
39 | "type": "keyword"
40 | },
41 | "sof-elk": {
42 | "type": "object",
43 | "properties": {
44 | "base_index": {
45 | "type": "keyword"
46 | },
47 | "path": {
48 | "type": "keyword"
49 | }
50 | }
51 | }
52 | },
53 | "dynamic_templates": [
54 | {
55 | "rawvalue_fields": {
56 | "match": "_rawvalue",
57 | "mapping": {
58 | "type": "text"
59 | }
60 | }
61 | }
62 | ]
63 | }
64 | },
65 | "_meta": {
66 | "description": "Core/common fields",
67 | "provider": "SOF-ELK"
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-final.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "dynamic_templates": [
5 | {
6 | "string_fields": {
7 | "mapping": {
8 | "fields": {
9 | "keyword": {
10 | "ignore_above": 256,
11 | "type": "keyword"
12 | }
13 | },
14 | "norms": false,
15 | "type": "text"
16 | },
17 | "match": "*",
18 | "match_mapping_type": "string"
19 | }
20 | }
21 | ]
22 | }
23 | },
24 | "_meta": {
25 | "description": "String fields",
26 | "provider": "SOF-ELK"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-guid.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "dynamic_templates": [
5 | {
6 | "guid_fields": {
7 | "mapping": {
8 | "type": "keyword"
9 | },
10 | "match": "*_guid"
11 | }
12 | }
13 | ]
14 | }
15 | },
16 | "_meta": {
17 | "description": "GUID fields",
18 | "provider": "SOF-ELK"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-ip_address.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "dynamic_templates": [
5 | {
6 | "ip_address_fields": {
7 | "match": "ip",
8 | "mapping": {
9 | "type": "ip"
10 | }
11 | }
12 | }
13 | ]
14 | }
15 | },
16 | "_meta": {
17 | "description": "IP Address fields",
18 | "provider": "SOF-ELK"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-port.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "dynamic_templates": [
5 | {
6 | "port_fields": {
7 | "match": "port",
8 | "mapping": {
9 | "type": "integer"
10 | }
11 | }
12 | }
13 | ]
14 | }
15 | },
16 | "_meta": {
17 | "description": "Port number fields",
18 | "provider": "SOF-ELK"
19 | }
20 | }
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-string.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | }
5 | },
6 | "_meta": {
7 | "description": "String fields",
8 | "provider": "SOF-ELK"
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-mappings-zeekuid.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "mappings": {
4 | "dynamic_templates": [
5 | {
6 | "guid_fields": {
7 | "mapping": {
8 | "type": "keyword"
9 | },
10 | "match": "*uid"
11 | }
12 | }
13 | ]
14 | }
15 | },
16 | "_meta": {
17 | "description": "Zeek UID fields",
18 | "provider": "SOF-ELK"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/component_templates/component-settings-common.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": {
3 | "settings": {
4 | "index": {
5 | "auto_expand_replicas": "0-1",
6 | "codec": "best_compression",
7 | "number_of_shards": "1",
8 | "number_of_replicas": 0
9 | }
10 | }
11 | },
12 | "_meta": {
13 | "description": "Index settings",
14 | "provider": "SOF-ELK"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/000_index-example.json.sample:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "logtype-*"
4 | ],
5 | "composed_of": [
6 | "settings-component_template",
7 | "mappings-corecommon-component_template",
8 | "mappings-string-component_template",
9 | "mappings-ip_address-component_template",
10 | "mappings-geohash-component_template"
11 | ],
12 | "template": {
13 | "settings": {
14 | "index": {
15 | "number_of_replicas": 0,
16 | "mapping": { /* only include this if absolutely needed - keep fields to default of 1000 if at all possible! */
17 | "total_fields": {
18 | "limit": 5000
19 | }
20 | }
21 | }
22 | },
23 | "mappings": {
24 | "properties": {
25 | /* place any required field type definitions here */
26 |
27 | }
28 | }
29 | },
30 | "_meta": {
31 | "provider": "SOF-ELK"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-aws.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "aws-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-final"
12 | ],
13 | "template": {
14 | "mappings": {
15 | "dynamic_templates": [
16 | {
17 | "request_parameters_fields": {
18 | "path_match": "request_parameters.*",
19 | "mapping": {
20 | "type": "keyword"
21 | }
22 | }
23 | }
24 | ],
25 | "properties": {
26 | "type": {
27 | "type": "keyword"
28 | },
29 | "version": {
30 | "type": "integer"
31 | }
32 | }
33 | }
34 | },
35 | "_meta": {
36 | "provider": "SOF-ELK"
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-azure.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "azure-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-guid",
12 | "mappings-final"
13 | ],
14 | "template": {
15 | "mappings": {
16 | "properties": {
17 | "event_data": {
18 | "type": "object"
19 | },
20 | "type": {
21 | "type": "keyword"
22 | },
23 | "version": {
24 | "type": "integer"
25 | }
26 | }
27 | }
28 | },
29 | "_meta": {
30 | "provider": "SOF-ELK"
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-evtxlogs.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "evtxlogs-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-final"
10 | ],
11 | "template": {
12 | "settings": {
13 | "index": {
14 | "mapping": {
15 | "total_fields": {
16 | "limit": 5000
17 | }
18 | }
19 | }
20 | }
21 | },
22 | "_meta": {
23 | "provider": "SOF-ELK"
24 | }
25 | }
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-filesystem.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "filesystem-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-final"
10 | ],
11 | "template": {
12 | "settings": {
13 | "index": {
14 | "mapping": {
15 | "total_fields": {
16 | "limit": 5000
17 | }
18 | }
19 | }
20 | },
21 | "mappings": {
22 | "runtime": {
23 | "file.size_vis": {
24 | "type": "long",
25 | "script": {
26 | "source": "emit(doc['file.size'].value);"
27 | }
28 | }
29 | }
30 | }
31 | },
32 | "_meta": {
33 | "provider": "SOF-ELK"
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-gcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "gcp-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-final"
12 | ],
13 | "template": {
14 | "mappings": {
15 | "properties": {
16 | "type": {
17 | "type": "keyword"
18 | },
19 | "version": {
20 | "type": "integer"
21 | }
22 | }
23 | }
24 | },
25 | "_meta": {
26 | "provider": "SOF-ELK"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-gws.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "gws-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-final"
12 | ],
13 | "template": {
14 | "mappings": {
15 | "properties": {
16 | "google_workspace":{
17 | "properties": {
18 | "admin": {
19 | "properties": {
20 | "event_parameters": {
21 | "properties": {
22 | "OLD_VALUE": {
23 | "type": "text"
24 | },
25 | "NEW_VALUE": {
26 | "type": "text"
27 | }
28 | }
29 | }
30 | }
31 | }
32 | }
33 | },
34 | "type": {
35 | "type": "keyword"
36 | },
37 | "version": {
38 | "type": "integer"
39 | }
40 | }
41 | }
42 | },
43 | "_meta": {
44 | "provider": "SOF-ELK"
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-httpdlog.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "httpdlog-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-port",
12 | "mappings-zeekuid",
13 | "mappings-final"
14 | ],
15 | "template": {
16 | "mappings": {
17 | "properties": {
18 | "http": {
19 | "properties": {
20 | "request": {
21 | "properties": {
22 | "method": {
23 | "type": "keyword"
24 | }
25 | }
26 | },
27 | "response": {
28 | "properties": {
29 | "status_code": {
30 | "type": "short",
31 | "ignore_malformed": false,
32 | "coerce": true
33 | }
34 | }
35 | },
36 | "version": {
37 | "type": "keyword"
38 | }
39 | }
40 | },
41 | "client": {
42 | "properties": {
43 | "user": {
44 | "properties": {
45 | "name": {
46 | "type": "keyword"
47 | }
48 | }
49 | }
50 | }
51 | }
52 | }
53 | }
54 | },
55 | "_meta": {
56 | "provider": "SOF-ELK"
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-kubernetes.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "kubernetes-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-port",
12 | "mappings-final"
13 | ],
14 | "template": {
15 | "mappings": {
16 | "properties": {
17 | "type": {
18 | "type": "keyword"
19 | },
20 | "version": {
21 | "type": "integer"
22 | }
23 | }
24 | }
25 | },
26 | "_meta": {
27 | "provider": "SOF-ELK"
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-lnkfiles.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "lnkfiles-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-final"
10 | ],
11 | "_meta": {
12 | "provider": "SOF-ELK"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-logstash.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "logstash-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-port",
12 | "mappings-zeekuid",
13 | "mappings-final"
14 | ],
15 | "template": {
16 | "mappings": {
17 | "properties": {
18 | "event": {
19 | "properties": {
20 | "auth_result": {
21 | "type": "keyword"
22 | }
23 | }
24 | },
25 | "ttl": {
26 | "type": "integer"
27 | },
28 | "key_id": {
29 | "type": "keyword"
30 | }
31 | }
32 | }
33 | },
34 | "_meta": {
35 | "provider": "SOF-ELK"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-netflow.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "netflow-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-port",
12 | "mappings-zeekuid",
13 | "mappings-final"
14 | ],
15 | "template": {
16 | "mappings": {
17 | "runtime": {
18 | "network.bytes": {
19 | "type": "long",
20 | "script": {
21 | "source": "emit(doc['source.bytes'].value + doc['destination.bytes'].value);"
22 | }
23 | },
24 | "network.bytes_vis": {
25 | "type": "long",
26 | "script": {
27 | "source": "emit(doc['source.bytes'].value + doc['destination.bytes'].value);"
28 | }
29 | },
30 | "network.packets": {
31 | "type": "long",
32 | "script": {
33 | "source": "emit(doc['source.packets'].value + doc['destination.packets'].value);"
34 | }
35 | },
36 | "network.packets_vis": {
37 | "type": "long",
38 | "script": {
39 | "source": "emit(doc['source.packets'].value + doc['destination.packets'].value);"
40 | }
41 | },
42 | "network.pcr": {
43 | "type": "double",
44 | "script": {
45 | "source": "if (doc.containsKey('source.bytes') && doc.containsKey('destination.bytes')) { long total_bytes = doc['source.bytes'].value + doc['destination.bytes'].value; if (total_bytes == 0) { emit(0); } else { emit( (float)(doc['source.bytes'].value - doc['destination.bytes'].value) / (float)total_bytes ); } }"
46 | }
47 | }
48 | },
49 | "properties": {
50 | "netflow": {
51 | "properties": {
52 | "exporter": {
53 | "properties": {
54 | "ip": {
55 | "type": "ip"
56 | }
57 | }
58 | },
59 | "flow_duration": {
60 | "type": "double"
61 | },
62 | "flow_encrypted": {
63 | "type": "boolean"
64 | }
65 | }
66 | }
67 | }
68 | }
69 | },
70 | "_meta": {
71 | "provider": "SOF-ELK"
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-plaso.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "plaso-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-final"
10 | ],
11 | "_meta": {
12 | "provider": "SOF-ELK"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/lib/elasticsearch_templates/index_templates/index-zeek.json:
--------------------------------------------------------------------------------
1 | {
2 | "index_patterns": [
3 | "zeek-*"
4 | ],
5 | "composed_of": [
6 | "settings-common",
7 | "mappings-corecommon",
8 | "mappings-string",
9 | "mappings-ip_address",
10 | "mappings-geohash",
11 | "mappings-port",
12 | "mappings-zeekuid",
13 | "mappings-final"
14 | ],
15 | "template": {
16 | "mappings": {
17 | "properties": {
18 | "md5": {
19 | "type": "keyword"
20 | },
21 | "sha1": {
22 | "type": "keyword"
23 | },
24 | "sha256": {
25 | "type": "keyword"
26 | }
27 | }
28 | }
29 | },
30 | "_meta": {
31 | "provider": "SOF-ELK"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/lib/filebeat_inputs/aws.yml:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This is a sample Filebeat prospector file that looks into the /logstash/aws/ directory for files to load.
5 | # These are sent to localhost:5044, where Logstash is listening.
6 |
7 | - type: filestream
8 | id: aws-json
9 | paths:
10 | - /logstash/aws/**/*.json
11 | close.on_state_change.inactive: 5m
12 | clean_removed: true
13 | processors:
14 | - add_labels:
15 | labels:
16 | type: aws
17 | tags: [ 'json' ]
18 |
--------------------------------------------------------------------------------
/lib/filebeat_inputs/azure.yml:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Configuration File
2 | # (C)2024 Lewes Technology Consulting, LLC
3 | #
4 | # This is a sample Filebeat prospector file that looks into the /logstash/azure/ directory for files to load.
5 | # These are sent to localhost:5044, where Logstash is listening.
6 |
7 | - type: filestream
8 | id: azure-csv
9 | paths:
10 | - /logstash/azure/**/*.csv
11 | close.on_state_change.inactive: 5m
12 | clean_removed: true
13 | processors:
14 | - add_labels:
15 | labels:
16 | type: azure
17 | tags: [ 'csv' ]
18 |
19 | - type: filestream
20 | id: azure-json
21 | paths:
22 | - /logstash/azure/**/*.json
23 | close.on_state_change.inactive: 5m
24 | clean_removed: true
25 | processors:
26 | - add_labels:
27 | labels:
28 | type: azure
29 | tags: [ 'json' ]
30 |
31 | - type: filestream
32 | id: azure-xml
33 | paths:
34 | - /logstash/azure/**/*.xml
35 | close.on_state_change.inactive: 5m
36 | clean_removed: true
37 | parsers:
38 | - multiline:
39 | type: pattern
40 | pattern: "^\"?
8 |
9 | # Keyboard country codes are typically two-letters (IT, DE, JP, etc.)
10 | # The script will error out if an incorrect code is provided.
11 |
12 | # First, let's verify they are running as root
13 | if [ "$EUID" -ne 0 ]
14 | then echo "Please run as root"
15 | exit
16 | fi
17 |
18 | # Temporary keymap change
19 | loadkeys $1 > /dev/null
20 | echo "Changed keymap for this session to ${1}"
21 |
22 | # Permanent keymap change
23 | sed -i 's/^keymap=/keymap=$1/g' /etc/default/grub
24 | grub2-mkconfig -o /boot/grub2/grub.cfg > /dev/null 2>/dev/null
25 | echo "Changed keymap persistently to ${1}"
--------------------------------------------------------------------------------
/supporting-scripts/cronjobs/git-remote-update.cron:
--------------------------------------------------------------------------------
1 | SHELL=/bin/bash
2 | PATH=/sbin:/bin:/usr/sbin:/usr/bin
3 | MAILTO=root
4 | HOME=/
5 |
6 | # For details see man 4 crontabs
7 |
8 | # Example of job definition:
9 | # .---------------- minute (0 - 59)
10 | # | .------------- hour (0 - 23)
11 | # | | .---------- day of month (1 - 31)
12 | # | | | .------- month (1 - 12) OR jan,feb,mar,apr ...
13 | # | | | | .---- day of week (0 - 6) (Sunday=0 or 7) OR sun,mon,tue,wed,thu,fri,sat
14 | # | | | | |
15 | # * * * * * user-name command to be executed
16 | 10 15 * * * root /usr/local/sof-elk/supporting-scripts/git-remote-update.sh
17 | 0 0 * * * root echo '/usr/local/sof-elk/supporting-scripts/vm-update-check.sh > /dev/null' | at now + $(( RANDOM \% 1440 )) minutes 2> /dev/null
18 |
--------------------------------------------------------------------------------
/supporting-scripts/dhcp-renew-login-screen.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This runs scripts after a dhcp lease is secured or renewed
6 |
7 | /usr/sbin/agetty --reload
8 |
--------------------------------------------------------------------------------
/supporting-scripts/elk_user_login.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)20124 Lewes Technology Consulting, LLC
4 | #
5 | # This script is read at login time (local and remote) for the elk_user user account
6 | # it's designed to display useful stuff for the user/student
7 |
8 | [ -r /etc/lsb-release ] && . /etc/lsb-release
9 |
10 | if [ -z "$DISTRIB_DESCRIPTION" ] && [ -x /usr/bin/lsb_release ]; then
11 | # Fall back to using the very slow lsb_release utility
12 | DISTRIB_DESCRIPTION=$(lsb_release -s -d)
13 | fi
14 |
15 | echo
16 | echo "Welcome to the SOF-ELK® VM Distribution"
17 | printf "Built on %s (%s %s %s)\n" "$DISTRIB_DESCRIPTION" "$(uname -o)" "$(uname -r)" "$(uname -m)"
18 | echo "--------------------------------------"
19 | echo "Here are some useful commands:"
20 | echo " sof-elk_clear.py"
21 | echo " Forcibly removes all records from the Elasticsearch index."
22 | echo " Use '-h' for usage."
23 | echo " load_all_dashboards.sh"
24 | echo " Resets all Kibana dashboards to the versions on disk in the"
25 | echo " /usr/local/sof-elk/ directory."
26 | echo
27 |
28 | /usr/local/sof-elk/supporting-scripts/git-check-pull-needed.sh
29 |
30 | PATH=$PATH:/usr/local/sof-elk/supporting-scripts
31 | export PATH
--------------------------------------------------------------------------------
/supporting-scripts/es_ls_commands.txt:
--------------------------------------------------------------------------------
1 | systemctl stop logstash.service ; rm -f /var/log/logstash/* /var/db/logstash/sincedb ; curl -XDELETE 'http://localhost:9200/logstash-*?pretty' ; curl -XDELETE 'http://localhost:9200/logstash-*?pretty' ; curl -XDELETE 'http://localhost:9200/netflow-*?pretty' ; curl -XDELETE 'http://localhost:9200/httpdlog-*?pretty' ; curl -XDELETE 'http://localhost:9200/_template/*?pretty' ; systemctl start logstash.service
2 |
3 |
4 |
5 |
6 | curl -s -XGET 'http://localhost:9200/_cat/indices/'|sort
7 |
8 |
9 | for type in search dashboard query; do
10 | curl -s -XDELETE http://localhost:9200/.kibana/${type}/_query?q=*\&pretty
11 | done
12 |
13 | curl -XDELETE http://localhost:9200/.kibana/search/_query?q=*\&pretty
14 | curl -XDELETE http://localhost:9200/.kibana/dashboard/_query?q=*\&pretty
15 | curl -XDELETE http://localhost:9200/.kibana/visualization/_query?q=*\&pretty
16 |
17 | NOTE: after deleting documents, free space with a forcemerge:
18 | curl -XPOST 'http://localhost:9200/_forcemerge?only_expunge_deletes=true'
19 |
20 |
21 |
22 |
23 | vim command to convert arrays to hashes:
24 | %s/convert => \[ "\(.\+\)", "\(.\+\)" \]/convert => { "\1" => "\2" }/
25 |
26 | Raw results in JSON (including event count per file):
27 | INDEX='logstash-*'
28 | curl -s -XGET -H "Content-Type: application/json" "http://127.0.0.1:9200/${INDEX}/_search" -d '{ "size": "0", "aggs": { "unique_filesource": { "terms": { "field": "log.file.path.keyword"}}}}’
29 |
30 | Just the filenames:
31 | INDEX='logstash-*'
32 | curl -s -XGET -H "Content-Type: application/json" "http://127.0.0.1:9200/%{INDEX}/_search" -d '{ "size": "0", "aggs": { "unique_filesource": { "terms": { "field": "log.file.path.keyword"}}}}'|jq -c '.aggregations.unique_filesource.buckets[].key’
33 |
--------------------------------------------------------------------------------
/supporting-scripts/es_plugin_update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script is run after an elasticsearch update
6 |
7 | # core concepts/syntax from: https://github.com/jjfalling/update-elasticsearch-plugins
8 |
9 | set -e
10 |
11 | elasticsearchPluginDir='/usr/share/elasticsearch/plugins'
12 | elasticsearchPlugin='/usr/share/elasticsearch/bin/elasticsearch-plugin'
13 | #elasticsearchPluginPage='https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-plugins.html'
14 |
15 | declare -A customPlugins
16 | customPlugins=( ["head"]="mobz/elasticsearch-head" )
17 |
18 | # Make sure only root can run our script
19 | if [ "$(id -u)" != "0" ]; then
20 | printf "This script needs to run as root to run the updates!\n" 1>&2
21 | exit 1
22 | fi
23 |
24 | #ensure the elasticsearch plugin command exists
25 | command -v "${elasticsearchPlugin}" >/dev/null 2>&1 || { printf "\nERROR: elasticsearch plugin command %s does not exist\n\n" ${elasticsearchPlugin}; exit 1; }
26 |
27 | #ensure the es plugin dir exists
28 | if [ ! -d "${elasticsearchPluginDir}" ]; then
29 | printf "\nERROR: elasticsearchPluginDir %s does not exist\n" ${elasticsearchPluginDir}
30 | exit 1
31 | fi
32 |
33 | #look at each installed plugin and try to find its repo, then offer to update
34 | for currentInstalledPlugin in $( "${elasticsearchPlugin}" list ); do
35 | if [[ "${currentInstalledPlugin}" == "preupgrade"* ]]; then
36 | continue
37 | fi
38 |
39 | if [[ ${customPlugins["$currentInstalledPlugin"]} ]]; then
40 | "${elasticsearchPlugin}" remove "${currentInstalledPlugin}" > /dev/null
41 | "${elasticsearchPlugin}" install "${customPlugins["$currentInstalledPlugin"]}" > /dev/null
42 |
43 | else
44 | "${elasticsearchPlugin}" remove "${currentInstalledPlugin}" > /dev/null
45 | "${elasticsearchPlugin}" install "${currentInstalledPlugin}" > /dev/null
46 |
47 | fi
48 | done
49 |
--------------------------------------------------------------------------------
/supporting-scripts/fw_modify.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script will read a file or directory tree of nfcapd-compatible netflow data and output in a format that SOF-ELK® can read with its NetFlow ingest feature
6 |
7 | # bash functionality to get command-line parameters
8 | # source: http://stackoverflow.com/questions/192249/how-do-i-parse-command-line-arguments-in-bash
9 | # Use -gt 1 to consume two arguments per pass in the loop (e.g. each
10 | # argument has a corresponding value to go with it).
11 | # Use -gt 0 to consume one or more arguments per pass in the loop (e.g.
12 | # some arguments don't have a corresponding value to go with it such
13 | # as in the --default example).
14 | while [[ $# -gt 1 ]]; do
15 | key="$1"
16 |
17 | case $key in
18 | -a|--action)
19 | ACTION="$2"
20 | shift # past argument
21 | ;;
22 | -p|--port)
23 | PORT="$2"
24 | shift # past argument
25 | ;;
26 | -r|--protocol)
27 | PROTOCOL="$2"
28 | shift # past argument
29 | ;;
30 | *)
31 | # unknown option
32 | ;;
33 | esac
34 | shift # past argument or value
35 | done
36 |
37 | if [[ -z "${ACTION}" || ( "${ACTION}" != "open" && "${ACTION}" != "close" ) ]]; then
38 | echo
39 | echo "Please specify the firewall action to take with the '-a' option. Options are 'open' or 'closed'."
40 | echo
41 | echo "Example: $0 -a open -p 5514 -r tcp"
42 | echo
43 | exit 2
44 | fi
45 |
46 | if [[ -z "${PORT}" ]]; then
47 | echo
48 | echo "Please specify the port to act on with the '-p' option."
49 | echo
50 | echo "Example: $0 -a open -p 5514 -r tcp"
51 | echo
52 | exit 3
53 | fi
54 |
55 | re='^[0-9]+$'
56 | if ! [[ "${PORT}" =~ ${re} ]] ; then
57 | echo
58 | echo "Error - ${PORT} is not a number. Exiting."
59 | echo
60 | exit 4
61 | fi
62 |
63 | if [[ -z "${PROTOCOL}" || ( "${PROTOCOL}" != "tcp" && "${PROTOCOL}" != "udp" ) ]]; then
64 | echo
65 | echo "Please specify the protocol to act on with the '-r' option. Options are 'tcp' or 'udp'."
66 | echo
67 | echo "Example: $0 -a open -p 5514 -r tcp"
68 | echo
69 | exit 2
70 | fi
71 |
72 | firewall-cmd --zone=public --add-port="${PORT}"/"${PROTOCOL}" --permanent
73 | firewall-cmd --reload
74 |
--------------------------------------------------------------------------------
/supporting-scripts/geoip_update_logstash.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2021 Lewes Technology Consulting, LLC
4 | #
5 | # This script will update GeoIP databases using an existing GeoIP.conf file
6 | # If any of the databases have changed, it will restart the Logstash service
7 |
8 | GEOIP_CONFIG=/etc/GeoIP.conf
9 |
10 | if [ ! -f "${GEOIP_CONFIG}" ]; then
11 | echo "The GeoIP configuration file has not been created - exiting."
12 | echo
13 | echo "No updates can be downloaded without this file."
14 | echo "Run 'geoip_bootstrap.sh' as root to configure this system for"
15 | echo "automatic updates."
16 | echo
17 | echo "You will need an Account ID and License Key from a free MaxMind"
18 | echo "account to enable them."
19 | exit
20 | fi
21 |
22 | # identify the database directory or use the standard default if not set
23 | DBDIR=$( grep ^DatabaseDirectory "${GEOIP_CONFIG}" | awk '{$1=""; print}' | sed -e 's/^[[:space:]]*//' )
24 | if [ -z "${DBDIR}" ]; then
25 | DBDIR=/usr/local/share/GeoIP/
26 | fi
27 |
28 | # identify the configured databases
29 | DATABASES=$( grep ^EditionIDs "${GEOIP_CONFIG}" | awk '{$1=""; print}' | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' )
30 |
31 | # if there are no databases, there's no need to update anything!
32 | if [ -n "${DATABASES}" ]; then
33 | # default/empty variables
34 | UPDATES=0
35 | declare -A md5=()
36 | declare -A md5_check=()
37 |
38 | for DATABASE in ${DATABASES}; do
39 | md5["${DATABASE}"]=$( md5sum -b "${DBDIR}"/"${DATABASE}".mmdb | awk '{print $1}' )
40 | done
41 |
42 | # run the updater
43 | geoipupdate -f ${GEOIP_CONFIG}
44 |
45 | # compare md5s of what was there before the update to what is there now
46 | for DATABASE in ${DATABASES}; do
47 | md5_check[${DATABASE}]=$( md5sum -b "${DBDIR}"/"${DATABASE}".mmdb | awk '{print $1}' )
48 | if [ "${md5["${DATABASE}"]}" != "${md5_check["${DATABASE}"]}" ]; then
49 | UPDATES=1
50 | fi
51 | done
52 |
53 | # if there were any updates, run the expensive Logstash restart
54 | if [ ${UPDATES} == 1 ]; then
55 | systemctl restart logstash.service
56 | fi
57 | fi
58 |
--------------------------------------------------------------------------------
/supporting-scripts/git-check-pull-needed.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2017 Lewes Technology Consulting, LLC
4 | #
5 | # This script displays a message to STDOUT if the origin has a newer version than the local checkout
6 |
7 | vm_update_status_file=/var/run/sof-elk_vm_update
8 |
9 | cd /usr/local/sof-elk/
10 |
11 | UPSTREAM=${1:-'@{u}'}
12 | LOCAL=$(git rev-parse @{0})
13 | REMOTE=$(git rev-parse "$UPSTREAM")
14 | BASE=$(git merge-base @{0} "$UPSTREAM")
15 |
16 | if [ $LOCAL = $REMOTE ]; then
17 | # up to date, nothing to do
18 | true
19 |
20 | elif [ $LOCAL = $BASE ]; then
21 | echo "Upstream Updates Available!!!!"
22 | echo "------------------------------"
23 | echo
24 | echo "There are upstream updates to the SOF-ELK® configuration files available"
25 | echo "in the Github repository. These are not required, but if desired,"
26 | echo "run the following command to retrieve and activate them:"
27 | echo
28 | echo "sudo sof-elk_update.sh"
29 | echo
30 |
31 | elif [ $REMOTE = $BASE ]; then
32 | # this should never happen - local copies won't push to origin
33 | echo "ERROR: You have local commits that are past the Github-based origin."
34 | echo " Automatic updates not possible."
35 |
36 | else
37 | # there should be no other case - this means some weird error occurred in gathering the SHA hashes
38 | echo "ERROR: Something very unexpected occurred when determining if there are any"
39 | echo " upstream updates. Ensure you have internet connectivity and please"
40 | echo " try again later."
41 | fi
42 |
43 | if [ -f ${vm_update_status_file} ]; then
44 | echo "A new version of the SOF-ELK VM is available!!!"
45 | echo "-----------------------------------------------"
46 | echo
47 | echo "There is a new VM version available for download. Please see the release"
48 | echo "information at https://for572.com/sof-elk-readme"
49 | echo
50 | fi
51 |
--------------------------------------------------------------------------------
/supporting-scripts/git-remote-update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2017 Lewes Technology Consulting, LLC
4 | #
5 | # This script is used to update the origin for the SOF-ELK® repository files
6 |
7 | RUNNOW=0
8 |
9 | # parse any command line arguments
10 | if [ $# -gt 0 ]; then
11 | while true; do
12 | if [ $1 ]; then
13 | if [ $1 == '-now' ]; then
14 | RUNNOW=1
15 | fi
16 | shift
17 | else
18 | break
19 | fi
20 | done
21 | fi
22 |
23 | if [[ $EUID -ne 0 ]]; then
24 | echo "This script must be run as root. Exiting."
25 | exit 1
26 | fi
27 |
28 | if [ $RUNNOW -eq 0 ]; then
29 | # wait up to 20min to start, so all these VMs don't hit the server at the same exact time
30 | randomNumber=$RANDOM
31 | let "randomNumber %= 1800"
32 | sleep ${randomNumber}
33 | fi
34 |
35 | cd /usr/local/sof-elk/
36 | git remote update > /dev/null 2>&1
37 |
--------------------------------------------------------------------------------
/supporting-scripts/hex_to_integer.rb:
--------------------------------------------------------------------------------
1 | # This script will convert a string with a hex representation of a number to
2 | # its integer value, overwriting the source
3 |
4 | # the value of `params` is the value of the hash passed to `script_params`
5 | # in the logstash configuration
6 | def register(params)
7 | @source_field = params["source_field"]
8 | end
9 |
10 | # the filter method receives an event and must return a list of events.
11 | # Dropping an event means not including it in the return array,
12 | # while creating new ones only requires you to add a new instance of
13 | # LogStash::Event to the returned array
14 | def filter(event)
15 | hex_string = event.get(@source_field)
16 |
17 | event.set(@source_field, hex_string.hex)
18 |
19 | return [event]
20 | end
21 |
--------------------------------------------------------------------------------
/supporting-scripts/kick-aws-logs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2021 Lewes Technology Consulting, LLC
4 | #
5 | # This script will add a newline to any *.json file under /logstash/aws/ if it doesn't already have one.
6 | # This is utterly absurd but AWS CloudTrail logs place all Records on one line within in JSON array and no newline charater.
7 | # That treachery results in filebeat's reasonable line-based logic never seeing a file as "ready to read".
8 |
9 | find /logstash/aws/ -type f -print0 | xargs -0 -L1 bash -c 'test "$(tail -c 1 "$0")" && echo >> $0'
10 |
--------------------------------------------------------------------------------
/supporting-scripts/ls_plugin_update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script is run after logstash installs or updates
6 |
7 | # core concepts/syntax from: https://github.com/jjfalling/update-elasticsearch-plugins
8 | set -e
9 |
10 | PLUGINBIN=/usr/share/logstash/bin/logstash-plugin
11 |
12 | lsplugins="logstash-input-relp logstash-input-google_pubsub logstash-filter-tld logstash-filter-json_encode logstash-filter-cidr"
13 |
14 | for lsplugin in ${lsplugins}; do
15 | $PLUGINBIN install ${lsplugin}
16 | done
17 |
--------------------------------------------------------------------------------
/supporting-scripts/sof-elk_update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script is used to update the repository from its git origin
6 | # It will not overwrite any local changes unless -f (force) is specified
7 |
8 | FORCE=0
9 |
10 | # bash function to echo to STDERR instead of STDOUT
11 | # source: https://stackoverflow.com/a/2990533/1400064
12 | echoerr() {
13 | echo "$@" 1>&2;
14 | }
15 |
16 | if [[ $EUID -ne 0 ]]; then
17 | echo "This script must be run as root. Exiting."
18 | exit 1
19 | fi
20 |
21 | while getopts ":f" opt; do
22 | case "${opt}" in
23 | f) FORCE=1 ;;
24 | ?)
25 | echoerr "ERROR: Invalid option: -${OPTARG}."
26 | exit 2
27 | ;;
28 | esac
29 | done
30 |
31 | cd /usr/local/sof-elk/ || exit 3
32 | if [[ $( git status --porcelain ) && $FORCE -eq 0 ]]; then
33 | echoerr "ERROR: You have local changes to this repository - will not overwrite without '-f' to force."
34 | echoerr " Run 'git status' from the /usr/local/sof-elk/ directory to identify the local changes."
35 | echoerr " Note that using '-f' will delete any modifications that have been made in this directory."
36 | exit 2
37 | fi
38 |
39 | /usr/local/sof-elk/supporting-scripts/git-remote-update.sh -now
40 | # This method adapted from method here: https://stackoverflow.com/a/3278427
41 | LOCAL=$(git rev-parse "@{0}")
42 | REMOTE=$(git rev-parse "@{u}")
43 | BASE=$(git merge-base "@{0}" "@{u}")
44 |
45 | if [[ "${LOCAL}" == "${REMOTE}" ]]; then
46 | echo "Up-to-date"
47 |
48 | elif [[ "${LOCAL}" == "${BASE}" ]]; then
49 | # Need to pull
50 | git reset --hard > /dev/null
51 | git clean -fdx > /dev/null
52 | git pull origin
53 |
54 | /usr/local/sof-elk/supporting-scripts/git-remote-update.sh -now
55 | for lspid in $( pgrep -u logstash java ); do
56 | kill -s HUP "${lspid}"
57 | done
58 |
59 | elif [[ "${REMOTE}" == "${BASE}" ]]; then
60 | echo "Need to push - this should never happen"
61 |
62 | else
63 | echo "Diverged - this should never happen"
64 | fi
65 |
--------------------------------------------------------------------------------
/supporting-scripts/split_kv_multi_to_fields.rb:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Supporting script
2 | # (C)2021 Lewes Technology Consulting, LLC
3 | #
4 | # This script takes a hash of "{ name: x, value1: y, value2: z }" elements and creates "x: { y, z }" fields
5 | # for example:
6 | # - source: [{"Name":"Identity","Value1":"jvandyne","Value2":"admin"}]
7 | # - result: [{"Identity":{"Value1":"jvandyne","Value2":"admin"}]
8 |
9 | # the value of `params` is the value of the hash passed to `script_params`
10 | # in the logstash configuration
11 | def register(params)
12 | @source_field = params["source_field"]
13 | @destination_field = params["destination_field"]
14 | @key_field = params["key_field"]
15 | end
16 |
17 | # the filter method receives an event and must return a list of events.
18 | # Dropping an event means not including it in the return array,
19 | # while creating new ones only requires you to add a new instance of
20 | # LogStash::Event to the returned array
21 | def filter(event)
22 | # if source field is not present
23 | if event.get(@source_field).nil?
24 | event.tag("#{@source_field}_not_found")
25 | return [event]
26 | end
27 | source_data = event.get(@source_field)
28 |
29 | # create empty hash to hold new result
30 | output = Hash.new()
31 |
32 | for item in source_data
33 | new_value = Marshal.load(Marshal.dump(item))
34 | key = new_value.delete(@key_field)
35 |
36 | output[key] = new_value
37 | end
38 |
39 | event.set(@destination_field, output)
40 |
41 | return [event]
42 | end
--------------------------------------------------------------------------------
/supporting-scripts/split_kv_to_fields.rb:
--------------------------------------------------------------------------------
1 | # SOF-ELK® Supporting script
2 | # (C)2022 Lewes Technology Consulting, LLC
3 | #
4 | # This script takes an array of "name: x, value: y" pairs and creates "x: y" fields
5 | # for example:
6 | # - source: [{"Name":"Identity","Value":"jvandyne"}]
7 | # - result: [{"Identity":"jvandyne"}]
8 |
9 | # the value of `params` is the value of the hash passed to `script_params`
10 | # in the logstash configuration
11 | def register(params)
12 | @source_field = params["source_field"]
13 | @destination_field = params["destination_field"]
14 | @key_field = params["key_field"]
15 | @val_field = params["val_field"]
16 | end
17 |
18 | # the filter method receives an event and must return a list of events.
19 | # Dropping an event means not including it in the return array,
20 | # while creating new ones only requires you to add a new instance of
21 | # LogStash::Event to the returned array
22 | def filter(event)
23 | # if source field is not present
24 | if event.get(@source_field).nil?
25 | event.tag("#{@source_field}_not_found")
26 | return [event]
27 | end
28 | source_data = event.get(@source_field)
29 |
30 | # create empty hash to hold new result
31 | output = Hash.new()
32 |
33 | if source_data.is_a?(Array)
34 | for item in source_data
35 | if item.key?(@val_field) && !(item[@val_field] == "")
36 | output[item[@key_field]] = item[@val_field]
37 | end
38 | end
39 |
40 | elsif source_data.is_a?(Hash)
41 | item = source_data
42 | if item.key?(@val_field) && !(item[@val_field] == "")
43 | output[item[@key_field]] = item[@val_field]
44 | end
45 |
46 | # PJH: This should probably have a final "else" stanza to raise an exception
47 | # if the source is not a hash or an array of hashes
48 | end
49 |
50 | event.set(@destination_field, output)
51 |
52 | return [event]
53 | end
54 |
--------------------------------------------------------------------------------
/supporting-scripts/vm-update-check.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script creates a file if a new version of the SOF-ELK VM is available
6 | # This is ONLY run for public/community versions
7 |
8 | VM_UPDATE_STATUS_FILE="/var/run/sof-elk_vm_update"
9 |
10 | cd /usr/local/sof-elk/ || exit 1
11 |
12 | current_branch=$( git branch | grep ^\* | awk '{print $2}' )
13 |
14 | if ! echo "${current_branch}" | grep -q "^public\/v[0-9]\{8\}$" ; then
15 | # not on a public/community edition branch
16 | exit
17 | fi
18 |
19 | if [ -f "${VM_UPDATE_STATUS_FILE}" ]; then
20 | # already checked since boot, no need to do it again
21 | exit
22 | fi
23 |
24 | current_release=$( echo "${current_branch}" | sed -e 's/^public\/v\([0-9]\{8\}\).*/\1/' )
25 | latest_release=$( curl -s --head --referer "${current_release}" https://for572.com/sof-elk-versioncheck | grep "^Location: " | sed -e 's/^Location: .*v\([0-9]\{8\}\).*/\1/' )
26 |
27 | if [[ ${current_release} < ${latest_release} ]]; then
28 | # there is a new public version available
29 | touch "${VM_UPDATE_STATUS_FILE}"
30 | fi
31 |
--------------------------------------------------------------------------------
/supporting-scripts/wait_for_es.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # SOF-ELK® Supporting script
3 | # (C)2024 Lewes Technology Consulting, LLC
4 | #
5 | # This script will wait for elasticsearch to be available, preventing silly kibana errors in the browser
6 |
7 | eshost=127.0.0.1
8 | esport=9200
9 |
10 | while ! curl --output /dev/null --silent --head --fail http://"${eshost}":"${esport}"; do
11 | sleep 1 && echo -n '.'
12 | done
13 |
--------------------------------------------------------------------------------