├── .DS_Store ├── 0_build_images.sh ├── 1_push_images_to_test.sh ├── 2_push_images_to_stable.sh ├── README.md ├── so-acng ├── Dockerfile ├── files │ └── so-entrypoint.sh └── repos │ ├── acng.conf │ ├── acng.conf.au │ ├── acng.conf.uk │ ├── acng.conf.us │ ├── apache_mirrors │ ├── backends_apache.au │ ├── backends_apache.uk │ ├── backends_apache.us │ ├── backends_centos.au │ ├── backends_centos.uk │ ├── backends_centos.us │ ├── backends_debian │ ├── backends_epel.au │ ├── backends_epel.uk │ ├── backends_epel.us │ ├── backends_fedora.au │ ├── backends_fedora.uk │ ├── backends_fedora.us │ ├── backends_ubuntu.au │ ├── backends_ubuntu.uk │ ├── backends_ubuntu.us │ ├── centos_mirrors │ ├── epel_mirrors │ ├── fedora_mirrors │ └── mirrors_alpine ├── so-core ├── Dockerfile ├── README.md ├── conf │ ├── daemon.json │ ├── elasticdownload.conf │ ├── enabledmodules.conf │ ├── securityonion.conf │ ├── securityonion.conf.orig │ └── sensortab.orig ├── files │ ├── .DS_Store │ ├── html │ │ ├── alpha_logo.jpg │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ ├── favicon.ico │ │ ├── index.html │ │ ├── securityonion_logo.jpg │ │ └── so_smaller.png │ └── startso.sh └── get_cyberchef ├── so-curator ├── Dockerfile └── files │ └── curator.repo ├── so-domainstats └── Dockerfile ├── so-elastalert ├── Dockerfile ├── README.md └── files │ ├── bro_conn.yaml │ ├── create_index.py │ ├── elastalert.json │ ├── elastalert_config.conf │ ├── elastalert_supervisord.conf │ ├── ids.yaml │ ├── past_elastalert.json │ └── start-elastalert.sh ├── so-elasticsearch ├── Dockerfile ├── bin │ └── es-docker ├── elasticsearch.yml └── log4j2.properties ├── so-filebeat ├── Dockerfile └── files │ └── docker-entrypoint ├── so-fleet-launcher ├── Dockerfile └── launcher │ ├── generate-packages.sh │ └── src │ ├── config │ ├── launcher-msi.flags │ ├── launcher.flags │ └── secret │ ├── packages │ ├── launcher.deb │ ├── launcher.msi │ ├── launcher.pkg │ └── launcher.rpm │ └── tools │ ├── mkbom │ └── xar ├── so-fleet ├── Dockerfile └── startfleet.sh ├── so-freqserver └── Dockerfile ├── so-grafana ├── Dockerfile └── run.sh ├── so-idstools ├── Dockerfile ├── README.md └── files │ └── so-idstools.sh ├── so-influxdb ├── Dockerfile ├── entrypoint.sh ├── influxdb.conf └── init-influxdb.sh ├── so-kibana ├── Dockerfile ├── bin │ ├── kibana-docker │ └── so-kibana.sh ├── dashboards │ ├── 01600fb0-34e4-11e7-9669-7f1d3242b798.json │ ├── 022713e0-3ab0-11e7-a83b-b1b4da7d15f4.json │ ├── 0de7a390-3644-11e7-a6f7-4f44d7bf1c33.json │ ├── 130017f0-46ce-11e7-946f-1bfb1be7c36b.json │ ├── 1d98d620-7dce-11e7-846a-150cdcaf3374.json │ ├── 230134a0-34c6-11e7-8360-0b86c90983fd.json │ ├── 27f3b380-3583-11e7-a588-05992195c551.json │ ├── 2d315d80-3582-11e7-98ef-19df58fe538b.json │ ├── 2fdf5bf0-3581-11e7-98ef-19df58fe538b.json │ ├── 3a457d70-3583-11e7-a588-05992195c551.json │ ├── 4323af90-76e5-11e7-ab14-e1a4c1bc11e0.json │ ├── 46582d50-3af2-11e7-a83b-b1b4da7d15f4.json │ ├── 468022c0-3583-11e7-a588-05992195c551.json │ ├── 4e108070-46c7-11e7-946f-1bfb1be7c36b.json │ ├── 4f6f3440-6d62-11e7-8ddb-e71eb260f4a3.json │ ├── 50173bd0-3582-11e7-98ef-19df58fe538b.json │ ├── 56a34ce0-3583-11e7-a588-05992195c551.json │ ├── 61d43810-6d62-11e7-8ddb-e71eb260f4a3.json │ ├── 68563ed0-34bf-11e7-9b32-bb903919ead9.json │ ├── 68f738e0-46ca-11e7-946f-1bfb1be7c36b.json │ ├── 6b0d4870-3583-11e7-a588-05992195c551.json │ ├── 6d189680-6d62-11e7-8ddb-e71eb260f4a3.json │ ├── 70c005f0-3583-11e7-a588-05992195c551.json │ ├── 7929f430-3583-11e7-a588-05992195c551.json │ ├── 7f27a830-34e5-11e7-9669-7f1d3242b798.json │ ├── 85348270-357b-11e7-ac34-8965f6420c51.json │ ├── 8a10e380-3583-11e7-a588-05992195c551.json │ ├── 90b246c0-3583-11e7-a588-05992195c551.json │ ├── 94b52620-342a-11e7-9d52-4f090484f59e.json │ ├── 97f8c3a0-3583-11e7-a588-05992195c551.json │ ├── 9d0e2da0-14e1-11e9-82f7-0da02d93a48b.json │ ├── 9ef20ae0-3583-11e7-a588-05992195c551.json │ ├── AV6-POJSDwoBUzALqKAg.json │ ├── AWAi5k4jAvKNGEbUWFis.json │ ├── AWAi6wvxAvKNGEbUWO_j.json │ ├── AWBLNS3CRuBloj96jxub.json │ ├── a2ab0c40-3b0a-11e7-a6f9-5d3fe735ec2b.json │ ├── ad3c0830-3583-11e7-a588-05992195c551.json │ ├── af0ea750-18d3-11e9-932c-d12d2cf4ee95.json │ ├── b10a9c60-3583-11e7-a588-05992195c551.json │ ├── b3a53710-3aaa-11e7-8b17-0d8709b02c80.json │ ├── b65775e0-46cb-11e7-946f-1bfb1be7c36b.json │ ├── b65c2710-3583-11e7-a588-05992195c551.json │ ├── c2c99c30-3583-11e7-a588-05992195c551.json │ ├── c4bbe040-76b3-11e7-ba96-cba76a1e264d.json │ ├── c6ccfc00-3583-11e7-a588-05992195c551.json │ ├── cb367060-3b04-11e7-a83b-b1b4da7d15f4.json │ ├── cca67b60-3583-11e7-a588-05992195c551.json │ ├── ccfcc540-4638-11e7-a82e-d97152153689.json │ ├── d7b54ae0-3583-11e7-a588-05992195c551.json │ ├── de2da250-3583-11e7-a588-05992195c551.json │ ├── e0a34b90-34e6-11e7-9118-45bd317f0ca4.json │ ├── e5aa7170-3583-11e7-a588-05992195c551.json │ ├── ea211360-46c4-11e7-a82e-d97152153689.json │ ├── ebf5ec90-34bf-11e7-9b32-bb903919ead9.json │ ├── ed6f7e20-e060-11e9-8f0c-2ddbf5ed9290.json │ └── f042ad60-46c6-11e7-946f-1bfb1be7c36b.json └── ssl │ ├── kibana.example.org.crt │ └── kibana.example.org.key ├── so-logstash ├── Dockerfile ├── bin │ ├── docker-entrypoint │ └── docker-entrypoint.old ├── config │ ├── log4j2.properties │ └── logstash.yml ├── env2yaml │ └── env2yaml.go ├── files │ ├── conf.d.so │ │ ├── 0000_input_syslogng.conf │ │ ├── 0001_input_json.conf │ │ ├── 0002_input_windows_json.conf │ │ ├── 0003_input_syslog.conf │ │ ├── 0005_input_suricata.conf │ │ ├── 0006_input_beats.conf │ │ ├── 0007_input_import.conf │ │ ├── 0008_input_eval.conf │ │ ├── 1000_preprocess_log_elapsed.conf │ │ ├── 1001_preprocess_syslogng.conf │ │ ├── 1002_preprocess_json.conf │ │ ├── 1003_preprocess_bro.conf │ │ ├── 1004_preprocess_syslog_types.conf │ │ ├── 1026_preprocess_dhcp.conf │ │ ├── 1029_preprocess_esxi.conf │ │ ├── 1030_preprocess_greensql.conf │ │ ├── 1031_preprocess_iis.conf │ │ ├── 1032_preprocess_mcafee.conf │ │ ├── 1033_preprocess_snort.conf │ │ ├── 1034_preprocess_syslog.conf │ │ ├── 1100_preprocess_bro_conn.conf │ │ ├── 1101_preprocess_bro_dhcp.conf │ │ ├── 1102_preprocess_bro_dns.conf │ │ ├── 1103_preprocess_bro_dpd.conf │ │ ├── 1104_preprocess_bro_files.conf │ │ ├── 1105_preprocess_bro_ftp.conf │ │ ├── 1106_preprocess_bro_http.conf │ │ ├── 1107_preprocess_bro_irc.conf │ │ ├── 1108_preprocess_bro_kerberos.conf │ │ ├── 1109_preprocess_bro_notice.conf │ │ ├── 1110_preprocess_bro_rdp.conf │ │ ├── 1111_preprocess_bro_signatures.conf │ │ ├── 1112_preprocess_bro_smtp.conf │ │ ├── 1113_preprocess_bro_snmp.conf │ │ ├── 1114_preprocess_bro_software.conf │ │ ├── 1115_preprocess_bro_ssh.conf │ │ ├── 1116_preprocess_bro_ssl.conf │ │ ├── 1117_preprocess_bro_syslog.conf │ │ ├── 1118_preprocess_bro_tunnel.conf │ │ ├── 1119_preprocess_bro_weird.conf │ │ ├── 1121_preprocess_bro_mysql.conf │ │ ├── 1122_preprocess_bro_socks.conf │ │ ├── 1123_preprocess_bro_x509.conf │ │ ├── 1124_preprocess_bro_intel.conf │ │ ├── 1125_preprocess_bro_modbus.conf │ │ ├── 1126_preprocess_bro_sip.conf │ │ ├── 1127_preprocess_bro_radius.conf │ │ ├── 1128_preprocess_bro_pe.conf │ │ ├── 1129_preprocess_bro_rfb.conf │ │ ├── 1130_preprocess_bro_dnp3.conf │ │ ├── 1131_preprocess_bro_smb_files.conf │ │ ├── 1132_preprocess_bro_smb_mapping.conf │ │ ├── 1133_preprocess_bro_ntlm.conf │ │ ├── 1134_preprocess_bro_dce_rpc.conf │ │ ├── 1998_test_data.conf │ │ ├── 2000_network_flow.conf │ │ ├── 6000_bro.conf │ │ ├── 6001_bro_import.conf │ │ ├── 6002_syslog.conf │ │ ├── 6101_switch_brocade.conf │ │ ├── 6200_firewall_fortinet.conf │ │ ├── 6201_firewall_pfsense.conf │ │ ├── 6300_windows.conf │ │ ├── 6301_dns_windows.conf │ │ ├── 6400_suricata.conf │ │ ├── 6500_ossec.conf │ │ ├── 6501_ossec_sysmon.conf │ │ ├── 6502_ossec_autoruns.conf │ │ ├── 6600_winlogbeat_sysmon.conf │ │ ├── 6700_winlogbeat.conf │ │ ├── 8000_postprocess_bro_cleanup.conf │ │ ├── 8001_postprocess_common_ip_augmentation.conf │ │ ├── 8006_postprocess_dns.conf │ │ ├── 8007_postprocess_dns_top1m_tagging.conf │ │ ├── 8007_postprocess_http.conf │ │ ├── 8008_postprocess_dns_whois_age.conf │ │ ├── 8200_postprocess_tagging.conf │ │ ├── 8502_postprocess_freq_analysis_bro_dns.conf │ │ ├── 8503_postprocess_freq_analysis_bro_http.conf │ │ ├── 8504_postprocess_freq_analysis_bro_ssl.conf │ │ ├── 8505_postprocess_freq_analysis_bro_x509.conf │ │ ├── 8998_postprocess_log_elapsed.conf │ │ └── 8999_postprocess_rename_type.conf │ ├── dictionaries │ │ ├── iana_protocols.yaml │ │ ├── iana_services.yaml │ │ ├── services.yaml │ │ └── tcp_flags.yaml │ ├── domainstats │ │ ├── 8007_postprocess_dns_top1m_tagging.conf │ │ └── 8008_postprocess_dns_whois_age.conf │ └── freq │ │ ├── 8502_postprocess_freq_analysis_bro_dns.conf │ │ ├── 8503_postprocess_freq_analysis_bro_http.conf │ │ ├── 8504_postprocess_freq_analysis_bro_ssl.conf │ │ └── 8505_postprocess_freq_analysis_bro_x509.conf └── pipeline │ └── default.conf ├── so-mysql ├── Dockerfile ├── docker-entrypoint.sh └── healthcheck.sh ├── so-navigator ├── Dockerfile ├── angular.json ├── e2e │ ├── app.e2e-spec.ts │ ├── app.po.ts │ └── tsconfig.e2e.json ├── karma.conf.js ├── package-lock.json ├── package.json ├── patch-webpack.js ├── protractor.conf.js ├── src │ ├── app │ │ ├── app.component.html │ │ ├── app.component.scss │ │ ├── app.component.spec.ts │ │ ├── app.component.ts │ │ ├── app.module.ts │ │ ├── config.service.spec.ts │ │ ├── config.service.ts │ │ ├── data.service.spec.ts │ │ ├── data.service.ts │ │ ├── datatable │ │ │ ├── data-table.component.html │ │ │ ├── data-table.component.scss │ │ │ └── data-table.component.ts │ │ ├── exporter │ │ │ ├── exporter.component.html │ │ │ ├── exporter.component.scss │ │ │ ├── exporter.component.spec.ts │ │ │ └── exporter.component.ts │ │ ├── globals.ts │ │ ├── help │ │ │ ├── help.component.html │ │ │ ├── help.component.scss │ │ │ ├── help.component.spec.ts │ │ │ └── help.component.ts │ │ ├── tab │ │ │ ├── tab.component.html │ │ │ ├── tab.component.scss │ │ │ ├── tab.component.spec.ts │ │ │ └── tab.component.ts │ │ ├── tabs │ │ │ ├── dynamic-tabs.directive.ts │ │ │ ├── tabs.component.html │ │ │ ├── tabs.component.scss │ │ │ ├── tabs.component.spec.ts │ │ │ └── tabs.component.ts │ │ ├── taxii2lib.ts │ │ ├── viewmodels.service.spec.ts │ │ └── viewmodels.service.ts │ ├── assets │ │ ├── NavigatorLayerFileFormatv1.pdf │ │ ├── NavigatorLayerFileFormatv1_1.pdf │ │ ├── NavigatorLayerFileFormatv1_2.pdf │ │ ├── NavigatorLayerFileFormatv1_3.pdf │ │ ├── NavigatorLayerFileFormatv2.pdf │ │ ├── NavigatorLayerFileFormatv2_1.pdf │ │ ├── config.json │ │ ├── icons │ │ │ ├── baseline-grid_on-24px.svg │ │ │ ├── ic_camera_alt_black_24px.svg │ │ │ ├── ic_check_box_black_24px.svg │ │ │ ├── ic_check_box_outline_blank_black_24px.svg │ │ │ ├── ic_clear_black_24px.svg │ │ │ ├── ic_clear_gray_24px.svg │ │ │ ├── ic_close_black_24px.svg │ │ │ ├── ic_color_lens_black_24px.svg │ │ │ ├── ic_content_copy_black_24px.svg │ │ │ ├── ic_description_black_24px.svg │ │ │ ├── ic_done_black_24px.svg │ │ │ ├── ic_done_gray_24px.svg │ │ │ ├── ic_file_download_black_24px.svg │ │ │ ├── ic_file_upload_black_24px.svg │ │ │ ├── ic_file_upload_gray_24px.svg │ │ │ ├── ic_filter_list_black_24px.svg │ │ │ ├── ic_format_color_fill_black_24px.svg │ │ │ ├── ic_format_color_fill_black_nobottom_24px.svg │ │ │ ├── ic_format_color_fill_gray_nobottom_24px.svg │ │ │ ├── ic_format_size_black_24px.svg │ │ │ ├── ic_insert_chart_black_24px.svg │ │ │ ├── ic_insert_chart_gray_24px.svg │ │ │ ├── ic_insert_comment_black_24px.svg │ │ │ ├── ic_insert_comment_gray_24px.svg │ │ │ ├── ic_keyboard_arrow_down_black_24px.svg │ │ │ ├── ic_keyboard_arrow_right_black_24px.svg │ │ │ ├── ic_keyboard_arrow_up_black_24px.svg │ │ │ ├── ic_layers_clear_black_24px.svg │ │ │ ├── ic_layers_clear_gray_24px.svg │ │ │ ├── ic_lock_black_24px.svg │ │ │ ├── ic_lock_open_black_24px.svg │ │ │ ├── ic_palette_black_24px.svg │ │ │ ├── ic_photo_size_select_large_black_24px.svg │ │ │ ├── ic_playlist_add_black_24px.svg │ │ │ ├── ic_playlist_add_gray_24px.svg │ │ │ ├── ic_remove_circle_black_24px.svg │ │ │ ├── ic_save_black_24px.svg │ │ │ ├── ic_save_gray_24px.svg │ │ │ ├── ic_search_black_24px.svg │ │ │ ├── ic_search_gray_24px.svg │ │ │ ├── ic_sort_alphabetically_ascending_black_24px.svg │ │ │ ├── ic_sort_alphabetically_black_24px.svg │ │ │ ├── ic_sort_alphabetically_descending_black_24px.svg │ │ │ ├── ic_sort_numerically_ascending_black_24px.svg │ │ │ ├── ic_sort_numerically_black_24px.svg │ │ │ ├── ic_sort_numerically_descending_black_24px.svg │ │ │ ├── ic_texture_black_24px.svg │ │ │ ├── ic_texture_gray_24px.svg │ │ │ ├── ic_view_large_black_24px.svg │ │ │ ├── ic_view_list_black_24px.svg │ │ │ ├── ic_view_list_grey_24px.svg │ │ │ ├── ic_view_medium_black_24px.svg │ │ │ ├── ic_view_small_black_24px.svg │ │ │ ├── ic_visibility_black_24px.svg │ │ │ ├── ic_visibility_gray_24px.svg │ │ │ └── ic_visibility_off_black_24px.svg │ │ └── image_scoreVariableExample.png │ ├── bootstrap.min.css │ ├── colors.scss │ ├── environments │ │ ├── environment.prod.ts │ │ └── environment.ts │ ├── favicon.ico │ ├── faviconO.ico │ ├── index.html │ ├── main.ts │ ├── polyfills.ts │ ├── styles.scss │ ├── test.ts │ ├── tsconfig.app.json │ ├── tsconfig.spec.json │ └── typings.d.ts ├── tsconfig.json └── tslint.json ├── so-nodered ├── Dockerfile ├── README.md ├── docker-make.sh ├── package.json └── scripts │ ├── install_devtools.sh │ └── remove_native_gpio.sh ├── so-playbook ├── Dockerfile └── playbook │ ├── circle_theme.tar.bz2 │ ├── issues_controller.patch │ ├── passenger-nginx-config-template.erb │ └── plugin │ └── redmine_playbook.tar.bz2 ├── so-redis ├── Dockerfile └── files │ └── docker-entrypoint.sh ├── so-soctopus ├── .gitignore ├── Dockerfile ├── requirements.txt └── so-soctopus │ ├── SOCtopus.conf │ ├── SOCtopus.py │ ├── config.py │ ├── destinations.py │ ├── forms.py │ ├── grr.py │ ├── helpers.py │ ├── playbook.py │ ├── playbook │ ├── securityonion-baseline.yml │ ├── securityonion-network.yml │ └── sysmon.yml │ ├── playbook_bulk-update.py │ ├── playbook_play-sync.py │ └── templates │ ├── cancel.html │ ├── hive.html │ ├── postresult.html │ ├── result.html │ ├── strelka.html │ └── update_event.html ├── so-steno ├── Dockerfile ├── README.md └── files │ ├── sensoroni │ └── so-steno.sh ├── so-strelka-backend ├── Dockerfile ├── requirements.txt └── setup.py ├── so-strelka-filestream └── Dockerfile ├── so-strelka-frontend └── Dockerfile ├── so-strelka-manager └── Dockerfile ├── so-suricata ├── Dockerfile ├── README.md └── files │ └── so-suricata.sh ├── so-tcpreplay ├── Dockerfile └── files │ └── tcpreplay ├── so-telegraf ├── Dockerfile └── entrypoint.sh ├── so-thehive-cortex ├── Dockerfile └── requirements.txt ├── so-thehive-es ├── Dockerfile ├── bin │ └── es-docker ├── elasticsearch.yml ├── es-docker └── log4j2.properties ├── so-thehive ├── Dockerfile └── bin │ └── so-thehive.sh ├── so-wazuh ├── Dockerfile ├── README.md ├── config │ ├── data_dirs.env │ ├── entrypoint.sh │ ├── init.bash │ ├── ossec.conf │ ├── repos.bash │ └── securityonion_rules.xml ├── logstash │ ├── 1001_preprocess_syslogng.conf │ ├── 9600_output_ossec.conf │ └── logstash-ossec-template.json ├── ossec_agent │ └── ossec_agent.tcl ├── scripts │ ├── so-ossec-list-agents │ ├── so-ossec-logtest │ ├── so-ossec-manage-agents │ ├── so-ossec-restart │ ├── so-ossec-start │ └── so-ossec-stop └── syslog-ng │ └── syslog-ng.conf └── so-zeek ├── Dockerfile ├── README.md └── files └── zeek.sh /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/.DS_Store -------------------------------------------------------------------------------- /1_push_images_to_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DOCKERHUB="dougburks" 4 | 5 | echo 6 | echo "This script will push all Docker images to:" 7 | echo "https://hub.docker.com/u/${DOCKERHUB}/." 8 | echo 9 | echo "Press Enter to continue or Ctrl-c to cancel." 10 | read PAUSE 11 | echo 12 | 13 | docker push --disable-content-trust=false ${DOCKERHUB}/so-elasticsearch:latest 14 | docker push --disable-content-trust=false ${DOCKERHUB}/so-logstash:latest 15 | docker push --disable-content-trust=false ${DOCKERHUB}/so-kibana:latest 16 | docker push --disable-content-trust=false ${DOCKERHUB}/so-elastalert:latest 17 | docker push --disable-content-trust=false ${DOCKERHUB}/so-curator:latest 18 | docker push --disable-content-trust=false ${DOCKERHUB}/so-freqserver:latest 19 | docker push --disable-content-trust=false ${DOCKERHUB}/so-domainstats:latest 20 | -------------------------------------------------------------------------------- /2_push_images_to_stable.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DOCKERHUBTEST="dougburks" 4 | DOCKERHUBPROD="securityonionsolutions" 5 | 6 | echo "This script will push latest images from ${DOCKERHUBTEST} to ${DOCKERHUBPROD}." 7 | echo 8 | echo "Press Enter to continue or Ctrl-c to cancel." 9 | read input 10 | 11 | for i in so-elasticsearch so-logstash so-kibana so-elastalert so-curator so-freqserver so-domainstats; do 12 | docker tag ${DOCKERHUBTEST}/${i}:latest ${DOCKERHUBPROD}/${i}:latest 13 | docker push --disable-content-trust=false ${DOCKERHUBPROD}/${i}:latest 14 | done 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # securityonion-docker 2 | This repo contains Docker files for Security Onion. 3 | -------------------------------------------------------------------------------- /so-acng/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:bionic-20180526 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="apt-cacher running in Docker container for use with Security Onion" 5 | 6 | ENV APT_CACHER_NG_VERSION=3.1 \ 7 | APT_CACHER_NG_CACHE_DIR=/var/cache/apt-cacher-ng \ 8 | APT_CACHER_NG_LOG_DIR=/var/log/apt-cacher-ng \ 9 | APT_CACHER_NG_USER=socore 10 | 11 | RUN apt-get update \ 12 | && DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ 13 | apt-cacher-ng=${APT_CACHER_NG_VERSION}* \ 14 | && rm -rf /var/lib/apt/lists/* 15 | 16 | # Create socore user. 17 | RUN addgroup --gid 939 socore && \ 18 | adduser --uid 939 --gid 939 \ 19 | --home /opt/so --no-create-home socore 20 | 21 | COPY files/so-entrypoint.sh /sbin/so-entrypoint.sh 22 | COPY repos/* /etc/apt-cacher-ng/ 23 | 24 | RUN chmod 755 /sbin/so-entrypoint.sh && chown -R 939:939 /etc/apt-cacher-ng 25 | RUN ln -sf /dev/stdout /var/log/apt-cacher-ng/apt-cacher.log; \ 26 | ln -sf /dev/stderr /var/log/apt-cacher-ng/apt-cacher.err; 27 | 28 | EXPOSE 3142/tcp 29 | 30 | ENTRYPOINT ["/sbin/so-entrypoint.sh"] 31 | 32 | CMD ["/usr/sbin/apt-cacher-ng"] 33 | -------------------------------------------------------------------------------- /so-acng/files/so-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | create_pid_dir() { 5 | mkdir -p /run/apt-cacher-ng 6 | chmod -R 0755 /run/apt-cacher-ng 7 | chown ${APT_CACHER_NG_USER}:${APT_CACHER_NG_USER} /run/apt-cacher-ng 8 | } 9 | 10 | create_cache_dir() { 11 | mkdir -p ${APT_CACHER_NG_CACHE_DIR} 12 | chmod -R 0755 ${APT_CACHER_NG_CACHE_DIR} 13 | chown -R ${APT_CACHER_NG_USER}:root ${APT_CACHER_NG_CACHE_DIR} 14 | } 15 | 16 | create_log_dir() { 17 | mkdir -p ${APT_CACHER_NG_LOG_DIR} 18 | chmod -R 0755 ${APT_CACHER_NG_LOG_DIR} 19 | chown -R ${APT_CACHER_NG_USER}:${APT_CACHER_NG_USER} ${APT_CACHER_NG_LOG_DIR} 20 | } 21 | 22 | create_pid_dir 23 | create_cache_dir 24 | create_log_dir 25 | 26 | # allow arguments to be passed to apt-cacher-ng 27 | if [[ ${1:0:1} = '-' ]]; then 28 | EXTRA_ARGS="$@" 29 | set -- 30 | elif [[ ${1} == apt-cacher-ng || ${1} == $(which apt-cacher-ng) ]]; then 31 | EXTRA_ARGS="${@:2}" 32 | set -- 33 | fi 34 | 35 | # default behaviour is to launch apt-cacher-ng 36 | if [[ -z ${1} ]]; then 37 | exec start-stop-daemon --start --chuid ${APT_CACHER_NG_USER}:${APT_CACHER_NG_USER} \ 38 | --exec $(which apt-cacher-ng) -- -c /etc/apt-cacher-ng ${EXTRA_ARGS} 39 | else 40 | exec "$@" 41 | fi 42 | -------------------------------------------------------------------------------- /so-acng/repos/backends_apache.au: -------------------------------------------------------------------------------- 1 | http://apache.mirror.amaze.com.au/ 2 | http://apache.mirror.digitalpacific.com.au/ 3 | http://mirror.intergrid.com.au/apache/ 4 | http://apache.mirror.serversaustralia.com.au/ 5 | http://mirror.ventraip.net.au/apache/ 6 | http://apache.melbourneitmirror.net/ 7 | -------------------------------------------------------------------------------- /so-acng/repos/backends_apache.uk: -------------------------------------------------------------------------------- 1 | apache.mirror.anlx.net 2 | mirror.vorboss.net 3 | www.mirrorservice.org 4 | mirror.ox.ac.uk 5 | apache.mirrors.nublue.co.uk 6 | mirrors.ukfast.co.uk 7 | -------------------------------------------------------------------------------- /so-acng/repos/backends_apache.us: -------------------------------------------------------------------------------- 1 | http://mirror.cogentco.com/pub/apache/ 2 | http://mirrors.gigenet.com/apache/ 3 | http://mirrors.koehn.com/apache/ 4 | http://download.nextag.com/apache/ 5 | http://apache.mirrors.pair.com/ 6 | http://mirrors.sorengard.com/apache/ 7 | http://mirrors.ocf.berkeley.edu/apache/ 8 | http://mirror.cc.columbia.edu/pub/software/apache/ 9 | http://apache.mirrors.lucidnetworks.net/ 10 | http://mirror.metrocast.net/apache/ 11 | http://mirror.olnevhost.net/pub/apache/ 12 | http://mirrors.sonic.net/apache/ 13 | http://apache.mirrors.tds.net/ 14 | http://apache.claz.org/ 15 | http://mirrors.ibiblio.org/apache/ 16 | http://apache.mirrors.ionfish.org/ 17 | http://apache.osuosl.org/ 18 | http://mirror.stjschools.org/public/apache/ 19 | -------------------------------------------------------------------------------- /so-acng/repos/backends_centos.au: -------------------------------------------------------------------------------- 1 | http://centos.mirror.serversaustralia.com.au/ 2 | http://mirror.aarnet.edu.au/pub/centos/ 3 | http://mirror.optus.net/centos/ 4 | http://mirror.colocity.com/centos/ 5 | -------------------------------------------------------------------------------- /so-acng/repos/backends_centos.uk: -------------------------------------------------------------------------------- 1 | http://centos.mirror.serversaustralia.com.au/ 2 | http://mirror.aarnet.edu.au/pub/centos/ 3 | http://mirror.optus.net/centos/ 4 | http://mirror.colocity.com/centos/ 5 | -------------------------------------------------------------------------------- /so-acng/repos/backends_centos.us: -------------------------------------------------------------------------------- 1 | http://centos.mirror.serversaustralia.com.au/ 2 | http://mirror.aarnet.edu.au/pub/centos/ 3 | http://mirror.optus.net/centos/ 4 | http://mirror.colocity.com/centos/ 5 | -------------------------------------------------------------------------------- /so-acng/repos/backends_debian: -------------------------------------------------------------------------------- 1 | http://cdn-fastly.deb.debian.org/debian 2 | http://cdn-aws.deb.debian.org/debian 3 | -------------------------------------------------------------------------------- /so-acng/repos/backends_epel.au: -------------------------------------------------------------------------------- 1 | http://fedora.melbourneitmirror.net/epel 2 | http://mirror.aarnet.edu.au/pub/epel 3 | -------------------------------------------------------------------------------- /so-acng/repos/backends_epel.uk: -------------------------------------------------------------------------------- 1 | http://mirror.sax.uk.as61049.net/epel 2 | http://mirror.bytemark.co.uk/fedora/epel 3 | http://mirror.freethought-internet.co.uk/epel 4 | http://mirrors.coreix.net/fedora-epel 5 | http://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel 6 | http://mirrors.ukfast.co.uk/sites/dl.fedoraproject.org/pub/epel 7 | http://mirror.vorboss.net/fedora/epel 8 | http://uk-mirrors.evowise.com/epel 9 | -------------------------------------------------------------------------------- /so-acng/repos/backends_fedora.au: -------------------------------------------------------------------------------- 1 | http://fedora.melbourneitmirror.net/fedora/linux 2 | http://mirror.aarnet.edu.au/pub/fedora/linux 3 | -------------------------------------------------------------------------------- /so-acng/repos/backends_fedora.uk: -------------------------------------------------------------------------------- 1 | http://mirror.bytemark.co.uk/fedora/linux 2 | http://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/fedora/linux 3 | http://mirror.sucs.org/pub/linux/fedora 4 | http://mirror.vorboss.net/fedora/linux 5 | -------------------------------------------------------------------------------- /so-acng/repos/backends_ubuntu.au: -------------------------------------------------------------------------------- 1 | http://au.archive.ubuntu.com/ubuntu/ 2 | http://mirror.aarnet.edu.au/pub/ubuntu/ 3 | -------------------------------------------------------------------------------- /so-acng/repos/backends_ubuntu.uk: -------------------------------------------------------------------------------- 1 | http://uk.archive.ubuntu.com/ubuntu/ 2 | -------------------------------------------------------------------------------- /so-acng/repos/backends_ubuntu.us: -------------------------------------------------------------------------------- 1 | http://us.archive.ubuntu.com/ubuntu/ 2 | -------------------------------------------------------------------------------- /so-acng/repos/mirrors_alpine: -------------------------------------------------------------------------------- 1 | http://dl-cdn.alpinelinux.org/alpine/ 2 | -------------------------------------------------------------------------------- /so-core/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright 2014,2015,2016,2017,2018 Security Onion Solutions, LLC 2 | 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | 16 | FROM centos:7 17 | 18 | LABEL maintainer "Security Onion Solutions, LLC" 19 | LABEL description="Security Onion Core Functions Docker" 20 | 21 | RUN yum update -y && yum -y install rsync epel-release 22 | RUN yum -y install nginx 23 | RUN yum -y erase epel-release && yum clean all && rm -rf /var/cache/yum 24 | 25 | RUN mkdir -p /opt/socore/html 26 | 27 | COPY files/startso.sh /opt/socore/ 28 | COPY files/html/ /opt/socore/html/ 29 | 30 | RUN chmod +x /opt/socore/startso.sh 31 | 32 | # Create socore user. 33 | RUN groupadd --gid 939 socore && \ 34 | adduser --uid 939 --gid 939 \ 35 | --home-dir /opt/so --no-create-home socore 36 | RUN setcap cap_net_bind_service=ep /usr/sbin/nginx 37 | 38 | EXPOSE 80 39 | EXPOSE 443 40 | 41 | ENTRYPOINT ["/opt/socore/startso.sh"] 42 | -------------------------------------------------------------------------------- /so-core/README.md: -------------------------------------------------------------------------------- 1 | SO-Core Docker 2 | -------------------------------------------------------------------------------- /so-core/conf/daemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "bip": "172.17.0.1/24" 3 | } 4 | -------------------------------------------------------------------------------- /so-core/conf/elasticdownload.conf: -------------------------------------------------------------------------------- 1 | # Elastic Download 2 | GITREPO="elastic-test" 3 | GITURL="https://github.com/Security-Onion-Solutions/elastic-test.git" 4 | DOCKERHUB="securityonionsolutions" 5 | -------------------------------------------------------------------------------- /so-core/conf/enabledmodules.conf: -------------------------------------------------------------------------------- 1 | syslog-ng 2 | -------------------------------------------------------------------------------- /so-core/conf/sensortab.orig: -------------------------------------------------------------------------------- 1 | #sotest-eth0 1 8000 eth0 2 | sotest-eth1 1 8100 eth1 3 | -------------------------------------------------------------------------------- /so-core/files/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/.DS_Store -------------------------------------------------------------------------------- /so-core/files/html/alpha_logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/alpha_logo.jpg -------------------------------------------------------------------------------- /so-core/files/html/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/favicon-16x16.png -------------------------------------------------------------------------------- /so-core/files/html/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/favicon-32x32.png -------------------------------------------------------------------------------- /so-core/files/html/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/favicon.ico -------------------------------------------------------------------------------- /so-core/files/html/securityonion_logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/securityonion_logo.jpg -------------------------------------------------------------------------------- /so-core/files/html/so_smaller.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-core/files/html/so_smaller.png -------------------------------------------------------------------------------- /so-core/files/startso.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo 4 | #echo "Syncing base files with the host OS" 5 | #rsync --update -raz /opt/socore/ /opt/so 6 | 7 | echo 8 | echo "Running Security Onion" 9 | 10 | #for module in $(cat /opt/so/conf/enabledmodules.conf); 11 | # do 12 | # /opt/so/bin/so-$module-start 13 | #done 14 | nginx 15 | sleep infinity 16 | -------------------------------------------------------------------------------- /so-core/get_cyberchef: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION="9.11.14" 3 | COREPATH="so-core" 4 | mkdir -p $COREPATH/files/html/cyberchef && \ 5 | wget -O $COREPATH/CyberChef_v$VERSION.zip https://github.com/gchq/CyberChef/releases/download/v$VERSION/CyberChef_v$VERSION.zip && \ 6 | unzip -o $COREPATH/CyberChef_v$VERSION.zip -d $COREPATH/files/html/cyberchef && \ 7 | mv $COREPATH/files/html/cyberchef/CyberChef_v$VERSION.html $COREPATH/files/html/cyberchef/index.html && \ 8 | rm -f $COREPATH/CyberChef_v$VERSION.zip 9 | -------------------------------------------------------------------------------- /so-curator/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | 5 | # Create a common centos update layer 6 | RUN yum update -y && \ 7 | yum clean all 8 | 9 | # Create user 10 | RUN groupadd --gid 934 curator && \ 11 | adduser --uid 934 --gid 934 \ 12 | --home-dir /usr/share/curator --no-create-home \ 13 | curator && \ 14 | rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch 15 | 16 | COPY ./files/curator.repo /etc/yum.repos.d/curator.repo 17 | 18 | RUN yum update -y && \ 19 | yum install -y elasticsearch-curator && \ 20 | chown -R curator: /opt/elasticsearch-curator /usr/bin/curator* && \ 21 | yum clean all 22 | 23 | USER curator 24 | 25 | ENV LC_ALL en_US.UTF-8 26 | 27 | ENTRYPOINT ["/bin/bash"] 28 | -------------------------------------------------------------------------------- /so-curator/files/curator.repo: -------------------------------------------------------------------------------- 1 | [curator-5] 2 | name=CentOS/RHEL 7 repository for Elasticsearch Curator 5.x packages 3 | baseurl=https://packages.elastic.co/curator/5/centos/7 4 | gpgcheck=1 5 | gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch 6 | enabled=1 7 | -------------------------------------------------------------------------------- /so-domainstats/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | # Originally developed by Justin Henderson justin@hasecuritysolutions.com 4 | LABEL maintainer "Security Onion Solutions, LLC" 5 | LABEL description="Domainstats running in Docker container for use with Security Onion" 6 | 7 | # Create a common centos update layer 8 | RUN yum update -y && \ 9 | yum clean all 10 | 11 | # Create a common python/git layer 12 | RUN yum update -y && \ 13 | yum install -y python3 git \ 14 | yum clean all 15 | 16 | # Create user 17 | RUN groupadd --gid 936 domainstats && \ 18 | adduser --uid 936 --gid 936 \ 19 | --home-dir /usr/share/domainstats --no-create-home \ 20 | domainstats 21 | 22 | # Install and set perms in same layer to save space 23 | RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ 24 | python3 get-pip.py && \ 25 | cd /opt && \ 26 | git clone https://github.com/MarkBaggett/domain_stats.git && \ 27 | pip install python-whois six && \ 28 | mkdir /var/log/domain_stats && \ 29 | ln -sf /dev/stderr /var/log/domain_stats/domain_stats.log && \ 30 | chown -R domainstats: /opt/domain_stats 31 | 32 | USER domainstats 33 | 34 | EXPOSE 20000 35 | 36 | STOPSIGNAL SIGTERM 37 | 38 | CMD /usr/bin/python3 /opt/domain_stats/domain_stats.py -ip 0.0.0.0 20000 -a /opt/domain_stats/top-1m.csv --preload 0 39 | -------------------------------------------------------------------------------- /so-elastalert/README.md: -------------------------------------------------------------------------------- 1 | Docker image for Elastalert, based on CentOS 7, for use with Security Onion 2 | -------------------------------------------------------------------------------- /so-elastalert/files/bro_conn.yaml: -------------------------------------------------------------------------------- 1 | # Alert when the rate of events exceeds a threshold 2 | 3 | es_host: elasticsearch 4 | es_port: 9200 5 | 6 | 7 | # (Required) 8 | # Rule name, must be unique 9 | name: Security Onion ElastAlert -- New Connection Logs Found! 10 | 11 | # (Required) 12 | # Type of alert. 13 | # the frequency rule type alerts when num_events events occur with timeframe time 14 | type: frequency 15 | 16 | # (Required) 17 | # Index to search, wildcard supported 18 | index: logstash-bro* 19 | 20 | use_strftime_index: true 21 | 22 | # (Required, frequency specific) 23 | # Alert when this many documents matching the query occur within a timeframe 24 | num_events: 1 25 | 26 | # (Required, frequency specific) 27 | # num_events must occur within this amount of time to trigger an alert 28 | timeframe: 29 | minutes: 1 30 | 31 | # This option allows you to ignore repeating alerts for a period of time. 32 | realert: 33 | minutes: 5 34 | 35 | # (Required) 36 | # A list of Elasticsearch filters used for find events 37 | # These filters are joined with AND and nested in a filtered query 38 | # For more info: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl.html 39 | filter: 40 | - term: 41 | type: 'bro_conn' 42 | 43 | # (Required) 44 | # The alert is use when a match is found 45 | #alert: 46 | #- "email" 47 | #email: 48 | #- "root@localhost" 49 | alert: 50 | - "debug" 51 | -------------------------------------------------------------------------------- /so-elastalert/files/elastalert.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "rule_name": { 4 | "type": "keyword" 5 | }, 6 | "@timestamp": { 7 | "type": "date", 8 | "format": "dateOptionalTime" 9 | }, 10 | "alert_time": { 11 | "type": "date", 12 | "format": "dateOptionalTime" 13 | }, 14 | "match_time": { 15 | "type": "date", 16 | "format": "dateOptionalTime" 17 | }, 18 | "match_body": { 19 | "type": "object", 20 | "enabled": "true" 21 | }, 22 | "aggregate_id": { 23 | "type": "keyword" 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /so-elastalert/files/elastalert_supervisord.conf: -------------------------------------------------------------------------------- 1 | [unix_http_server] 2 | file=/var/run/elastalert/elastalert_supervisor.sock 3 | chmod=0770 4 | chown=elastalert:elastalert 5 | 6 | [supervisord] 7 | user=elastalert 8 | logfile=/var/log/elastalert_supervisord.log 9 | logfile_maxbytes=1MB 10 | logfile_backups=2 11 | loglevel=debug 12 | nodaemon=false 13 | directory=%(here)s 14 | 15 | [rpcinterface:supervisor] 16 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 17 | 18 | [supervisorctl] 19 | user=elastalert 20 | serverurl=unix:///var/run/elastalert/elastalert_supervisor.sock 21 | 22 | [program:elastalert] 23 | user=elastalert 24 | # running globally 25 | command = 26 | python elastalert.py 27 | --verbose 28 | # (alternative) using virtualenv 29 | # command=/path/to/venv/bin/elastalert --config /path/to/config.yaml --verbose 30 | process_name=elastalert 31 | autorestart=true 32 | startsecs=15 33 | stopsignal=INT 34 | stopasgroup=true 35 | killasgroup=true 36 | stderr_logfile=/var/log/elastalert_stderr.log 37 | stderr_logfile_maxbytes=5MB 38 | -------------------------------------------------------------------------------- /so-elastalert/files/ids.yaml: -------------------------------------------------------------------------------- 1 | # From example_rules/example_frequency.yaml 2 | es_host: elasticsearch 3 | es_port: 9200 4 | name: Security Onion ElastAlert - New IDS Event! 5 | type: frequency 6 | index: logstash-ids* 7 | num_events: 1 8 | timeframe: 9 | minutes: 1 10 | filter: 11 | - term: 12 | type: "snort" 13 | alert: 14 | - "debug" 15 | -------------------------------------------------------------------------------- /so-elastalert/files/past_elastalert.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "rule_name": { 4 | "type": "keyword" 5 | }, 6 | "match_body": { 7 | "type": "object", 8 | "enabled": "true" 9 | }, 10 | "@timestamp": { 11 | "type": "date", 12 | "format": "dateOptionalTime" 13 | }, 14 | "aggregate_id": { 15 | "type": "keyword" 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /so-elasticsearch/Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile was based on the official Elasticsearch Docker image: 2 | # https://github.com/elastic/elasticsearch-docker 3 | 4 | # Copyright 2014,2015,2016,2017,2019,2020 Security Onion Solutions, LLC 5 | 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | ARG FLAVOR 19 | ARG VERSION 20 | 21 | FROM docker.elastic.co/elasticsearch/$FLAVOR:$VERSION 22 | 23 | RUN groupmod -g 930 elasticsearch && \ 24 | usermod -u 930 -g 930 elasticsearch 25 | 26 | WORKDIR /usr/share/elasticsearch 27 | 28 | RUN set -ex && for esdirs in config data logs; do \ 29 | mkdir -p "$esdirs"; \ 30 | chown -R elasticsearch:elasticsearch "$esdirs"; \ 31 | done 32 | 33 | COPY elasticsearch.yml log4j2.properties config/ 34 | COPY bin/es-docker bin/es-docker 35 | 36 | RUN chown elasticsearch:elasticsearch config/elasticsearch.yml config/log4j2.properties bin/es-docker && \ 37 | chmod 0750 bin/es-docker 38 | 39 | USER elasticsearch 40 | CMD ["/bin/bash", "bin/es-docker"] 41 | -------------------------------------------------------------------------------- /so-elasticsearch/bin/es-docker: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run Elasticsearch and allow setting default settings via env vars 4 | # 5 | # e.g. Setting the env var cluster.name=testcluster 6 | # 7 | # will cause Elasticsearch to be invoked with -Ecluster.name=testcluster 8 | # 9 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html#_setting_default_settings 10 | 11 | #declare -a es_opts 12 | 13 | #while IFS='=' read -r envvar_key envvar_value 14 | #do 15 | # Elasticsearch env vars need to have at least two dot separated lowercase words, e.g. `cluster.name` 16 | # if [[ "$envvar_key" =~ ^[a-z]+\.[a-z]+ ]] 17 | # then 18 | # if [[ ! -z $envvar_value ]]; then 19 | # es_opt="-E${envvar_key}=${envvar_value}" 20 | # es_opts+=("${es_opt}") 21 | # fi 22 | # fi 23 | #done < <(env) 24 | 25 | # The virtual file /proc/self/cgroup should list the current cgroup 26 | # membership. For each hierarchy, you can follow the cgroup path from 27 | # this file to the cgroup filesystem (usually /sys/fs/cgroup/) and 28 | # introspect the statistics for the cgroup for the given 29 | # hierarchy. Alas, Docker breaks this by mounting the container 30 | # statistics at the root while leaving the cgroup paths as the actual 31 | # paths. Therefore, Elasticsearch provides a mechanism to override 32 | # reading the cgroup path from /proc/self/cgroup and instead uses the 33 | # cgroup path defined the JVM system property 34 | # es.cgroups.hierarchy.override. Therefore, we set this value here so 35 | # that cgroup statistics are available for the container this process 36 | # will run in. 37 | export ES_JAVA_OPTS="-Des.cgroups.hierarchy.override=/ $ES_JAVA_OPTS" 38 | 39 | exec bin/elasticsearch #"${es_opts[@]}" 40 | -------------------------------------------------------------------------------- /so-elasticsearch/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | cluster.name: "docker-cluster" 2 | network.host: 0.0.0.0 3 | 4 | # minimum_master_nodes need to be explicitly set when bound on a public IP 5 | # set to 1 to allow single node clusters 6 | # Details: https://github.com/elastic/elasticsearch/pull/17288 7 | discovery.zen.minimum_master_nodes: 1 8 | -------------------------------------------------------------------------------- /so-elasticsearch/log4j2.properties: -------------------------------------------------------------------------------- 1 | status = error 2 | 3 | appender.console.type = Console 4 | appender.console.name = console 5 | appender.console.layout.type = PatternLayout 6 | appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n 7 | 8 | rootLogger.level = info 9 | rootLogger.appenderRef.console.ref = console 10 | -------------------------------------------------------------------------------- /so-filebeat/Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile was based on the official Filebeat Docker image: 2 | # https://hub.docker.com/r/elastic/filebeat/ 3 | 4 | # Copyright 2014,2015,2016,2017,2019,2020 Security Onion Solutions, LLC 5 | 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | ARG FLAVOR 19 | ARG VERSION 20 | 21 | FROM docker.elastic.co/beats/$FLAVOR:$VERSION 22 | USER root 23 | # Add entrypoint wrapper script 24 | ADD files/docker-entrypoint /usr/local/bin 25 | RUN chmod 755 /usr/local/bin/docker-entrypoint 26 | 27 | # Provide a non-root user. 28 | RUN groupadd --gid 939 socore && \ 29 | useradd -M --uid 939 --gid 939 --home /usr/share/filebeat socore && \ 30 | groupadd -g 945 ossec && \ 31 | usermod -a -G ossec socore 32 | 33 | WORKDIR /usr/share/filebeat 34 | RUN chown -R root:socore . && \ 35 | find /usr/share/filebeat -type d -exec chmod 0750 {} \; && \ 36 | find /usr/share/filebeat -type f -exec chmod 0640 {} \; && \ 37 | chmod 0750 filebeat && \ 38 | chmod 0770 modules.d && \ 39 | chmod 0770 data logs 40 | USER socore 41 | ENTRYPOINT ["/usr/local/bin/docker-entrypoint"] 42 | CMD ["-c", "/usr/share/filebeat/filebeat.yml"] 43 | -------------------------------------------------------------------------------- /so-filebeat/files/docker-entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sleep 30 3 | set -euo pipefail 4 | 5 | # Check if the the user has invoked the image with flags. 6 | # eg. "filebeat -c filebeat.yml" 7 | if [[ -z $1 ]] || [[ ${1:0:1} == '-' ]] ; then 8 | exec filebeat "$@" 9 | else 10 | # They may be looking for a Beat subcommand, like "filebeat setup". 11 | subcommands=$(filebeat help \ 12 | | awk 'BEGIN {RS=""; FS="\n"} /Available Commands:/' \ 13 | | awk '/^\s+/ {print $1}') 14 | 15 | # If we _did_ get a subcommand, pass it to filebeat. 16 | for subcommand in $subcommands; do 17 | if [[ $1 == $subcommand ]]; then 18 | exec filebeat "$@" 19 | fi 20 | done 21 | fi 22 | 23 | # If niether of those worked, then they have specified the binary they want, so 24 | # just do exactly as they say. 25 | exec "$@" 26 | -------------------------------------------------------------------------------- /so-fleet-launcher/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | WORKDIR /var/launcher 4 | COPY launcher /var/launcher 5 | COPY launcher/src/tools/* /usr/local/bin/ 6 | 7 | #Install the packages we need 8 | RUN apt-get update && \ 9 | apt-get install -y --no-install-recommends \ 10 | gcab \ 11 | msitools \ 12 | ruby \ 13 | ruby-dev \ 14 | rubygems \ 15 | build-essential \ 16 | cpio \ 17 | binutils \ 18 | cpio \ 19 | cabextract \ 20 | rpm && \ 21 | \ 22 | #Install fpm 23 | gem install --no-ri --no-rdoc fpm && \ 24 | \ 25 | #Clean up what we can 26 | apt-get -f -y --auto-remove remove build-essential autoconf libtool && \ 27 | apt-get clean && \ 28 | rm -rf /var/lib/apt/lists/* 29 | 30 | RUN chmod +x /var/launcher/generate-packages.sh 31 | ENTRYPOINT ["/var/launcher/generate-packages.sh"] -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/config/launcher-msi.flags: -------------------------------------------------------------------------------- 1 | enroll_secret_path C:\Program Files\Kolide\Launcher-so-launcher\conf\secret 2 | hostname ninja 3 | root_directory C:\Program Files\Kolide\Launcher-so-launcher\data 4 | osqueryd_path C:\Program Files\Kolide\Launcher-so-launcher\bin\osqueryd 5 | -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/config/launcher.flags: -------------------------------------------------------------------------------- 1 | enroll_secret_path /etc/so-launcher/secret 2 | hostname ninja 3 | root_directory /var/so-launcher/securityonion 4 | osqueryd_path /usr/local/so-launcher/bin/osqueryd -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/config/secret: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/packages/launcher.deb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/packages/launcher.deb -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/packages/launcher.msi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/packages/launcher.msi -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/packages/launcher.pkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/packages/launcher.pkg -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/packages/launcher.rpm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/packages/launcher.rpm -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/tools/mkbom: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/tools/mkbom -------------------------------------------------------------------------------- /so-fleet-launcher/launcher/src/tools/xar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-fleet-launcher/launcher/src/tools/xar -------------------------------------------------------------------------------- /so-fleet/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | LABEL description="Fleet running in Docker container for use with Security Onion" 4 | 5 | RUN apk --update add ca-certificates unzip curl 6 | RUN mkdir -p /tmp/fleet && cd /tmp/fleet \ 7 | && curl -OL https://github.com/kolide/fleet/releases/latest/download/fleet.zip \ 8 | && unzip fleet.zip 'linux/*' \ 9 | && cp linux/fleet /usr/bin/fleet \ 10 | && cp linux/fleetctl /usr/bin/fleetctl \ 11 | && cd /tmp && rm -rf /tmp/fleet 12 | 13 | COPY startfleet.sh /startfleet.sh 14 | RUN chmod +x /startfleet.sh 15 | 16 | ENTRYPOINT ["/startfleet.sh"] 17 | -------------------------------------------------------------------------------- /so-fleet/startfleet.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Let's initialize the DB if it hasn't been done 4 | /usr/bin/fleet prepare db 5 | /usr/bin/fleet serve 6 | -------------------------------------------------------------------------------- /so-freqserver/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | # Originally developed by Justin Henderson justin@hasecuritysolutions.com 4 | LABEL maintainer "Security Onion Solutions, LLC" 5 | LABEL description="Freqserver running in Docker container for use with Security Onion" 6 | 7 | # Create a common centos update layer 8 | RUN yum update -y && \ 9 | yum clean all 10 | 11 | # Create a common python/git layer 12 | RUN yum update -y && \ 13 | yum install -y python3 git && pip3 install six && \ 14 | yum clean all 15 | 16 | # Create user 17 | RUN groupadd --gid 935 freqserver && \ 18 | adduser --uid 935 --gid 935 \ 19 | --home-dir /usr/share/freqserver --no-create-home \ 20 | freqserver 21 | 22 | # Install and set perms in same layer to save space 23 | RUN mkdir -p /opt/freq_server && \ 24 | cd /opt/freq_server && \ 25 | git clone https://github.com/MarkBaggett/freq.git && \ 26 | chown -R freqserver: /opt/freq_server && \ 27 | mkdir /var/log/freq_server && \ 28 | ln -sf /dev/stderr /var/log/freq_server/freq_server.log 29 | 30 | USER freqserver 31 | 32 | EXPOSE 10004 33 | 34 | STOPSIGNAL SIGTERM 35 | 36 | CMD /usr/bin/python3 /opt/freq_server/freq/freq_server.py -s 0 -ip 0.0.0.0 10004 /opt/freq_server/freq/freqtable2018.freq 37 | -------------------------------------------------------------------------------- /so-idstools/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright 2014,2015,2016,2017,2018 Security Onion Solutions, LLC 2 | 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | 16 | FROM centos:7 17 | 18 | LABEL maintainer "Security Onion Solutions, LLC" 19 | LABEL description="IDSTools for downloading rules" 20 | 21 | RUN yum update -y && \ 22 | yum clean all 23 | 24 | # Install epel 25 | RUN yum -y install epel-release bash && yum clean all 26 | RUN yum update -y && yum -y install python-idstools \ 27 | && yum clean all && rm -rf /var/cache/yum 28 | 29 | RUN mkdir -p /opt/so/idstools/bin 30 | COPY files/so-idstools.sh /opt/so/idstools/bin 31 | 32 | RUN chmod +x /opt/so/idstools/bin/so-idstools.sh 33 | 34 | # Create socore user. 35 | RUN groupadd --gid 939 socore && \ 36 | adduser --uid 939 --gid 939 \ 37 | --home-dir /opt/so --no-create-home socore 38 | 39 | ENTRYPOINT ["/opt/so/idstools/bin/so-idstools.sh"] 40 | -------------------------------------------------------------------------------- /so-idstools/README.md: -------------------------------------------------------------------------------- 1 | IDSTOOLS Docker 2 | -------------------------------------------------------------------------------- /so-idstools/files/so-idstools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd /opt/so/idstools/etc && idstools-rulecat 3 | 4 | sleep infinity 5 | -------------------------------------------------------------------------------- /so-influxdb/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.7 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | LABEL description="InfluxDB running in Docker container for use with Security Onion" 4 | 5 | RUN echo 'hosts: files dns' >> /etc/nsswitch.conf 6 | RUN apk add --no-cache tzdata bash ca-certificates && \ 7 | update-ca-certificates 8 | 9 | ENV INFLUXDB_VERSION 1.7.5 10 | RUN set -ex && \ 11 | apk add --no-cache --virtual .build-deps wget gnupg tar && \ 12 | for key in \ 13 | 05CE15085FC09D18E99EFB22684A14CF2582E0C5 ; \ 14 | do \ 15 | gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" || \ 16 | gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ 17 | gpg --keyserver keyserver.pgp.com --recv-keys "$key" ; \ 18 | done && \ 19 | wget --no-verbose https://dl.influxdata.com/influxdb/releases/influxdb-${INFLUXDB_VERSION}-static_linux_amd64.tar.gz.asc && \ 20 | wget --no-verbose https://dl.influxdata.com/influxdb/releases/influxdb-${INFLUXDB_VERSION}-static_linux_amd64.tar.gz && \ 21 | gpg --batch --verify influxdb-${INFLUXDB_VERSION}-static_linux_amd64.tar.gz.asc influxdb-${INFLUXDB_VERSION}-static_linux_amd64.tar.gz && \ 22 | mkdir -p /usr/src && \ 23 | tar -C /usr/src -xzf influxdb-${INFLUXDB_VERSION}-static_linux_amd64.tar.gz && \ 24 | rm -f /usr/src/influxdb-*/influxdb.conf && \ 25 | chmod +x /usr/src/influxdb-*/* && \ 26 | cp -a /usr/src/influxdb-*/* /usr/bin/ && \ 27 | rm -rf *.tar.gz* /usr/src /root/.gnupg && \ 28 | apk del .build-deps 29 | COPY influxdb.conf /etc/influxdb/influxdb.conf 30 | 31 | EXPOSE 8086 32 | 33 | VOLUME /var/lib/influxdb 34 | 35 | COPY entrypoint.sh /entrypoint.sh 36 | COPY init-influxdb.sh /init-influxdb.sh 37 | RUN chmod +x /entrypoint.sh && chmod +x /init-influxdb.sh 38 | ENTRYPOINT ["/entrypoint.sh"] 39 | CMD ["influxd"] 40 | -------------------------------------------------------------------------------- /so-influxdb/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [ "${1:0:1}" = '-' ]; then 5 | set -- influxd "$@" 6 | fi 7 | 8 | if [ "$1" = 'influxd' ]; then 9 | /init-influxdb.sh "${@:2}" 10 | fi 11 | 12 | exec "$@" 13 | -------------------------------------------------------------------------------- /so-influxdb/influxdb.conf: -------------------------------------------------------------------------------- 1 | [meta] 2 | dir = "/var/lib/influxdb/meta" 3 | 4 | [data] 5 | dir = "/var/lib/influxdb/data" 6 | engine = "tsm1" 7 | wal-dir = "/var/lib/influxdb/wal" 8 | -------------------------------------------------------------------------------- /so-kibana/dashboards/4f6f3440-6d62-11e7-8ddb-e71eb260f4a3.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "6.7.2", 3 | "objects": [ 4 | { 5 | "id": "4f6f3440-6d62-11e7-8ddb-e71eb260f4a3", 6 | "type": "dashboard", 7 | "updated_at": "2019-09-26T12:53:19.145Z", 8 | "version": "WzQyLDFd", 9 | "attributes": { 10 | "hits": 0, 11 | "timeRestore": false, 12 | "description": "", 13 | "title": "OSSEC", 14 | "uiStateJSON": "{}", 15 | "panelsJSON": "[]", 16 | "optionsJSON": "{\"darkTheme\":false}", 17 | "version": 1, 18 | "kibanaSavedObjectMeta": { 19 | "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}" 20 | } 21 | } 22 | } 23 | ] 24 | } -------------------------------------------------------------------------------- /so-kibana/ssl/kibana.example.org.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIID/TCCAuWgAwIBAgIJALVJyrUvH8uGMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD 3 | VQQGEwJBVTERMA8GA1UECAwIVmljdG9yaWExEjAQBgNVBAcMCU1lbGJvdXJuZTEd 4 | MBsGA1UECgwURXhhbXBsZSBPcmdhbml6YXRpb24xGzAZBgNVBAMMEmtpYmFuYS5l 5 | eGFtcGxlLm9yZzEiMCAGCSqGSIb3DQEJARYTZXhhbXBsZUBleGFtcGxlLm9yZzAe 6 | Fw0xNjA5MTgwMzU4NTNaFw0yNjA5MTYwMzU4NTNaMIGUMQswCQYDVQQGEwJBVTER 7 | MA8GA1UECAwIVmljdG9yaWExEjAQBgNVBAcMCU1lbGJvdXJuZTEdMBsGA1UECgwU 8 | RXhhbXBsZSBPcmdhbml6YXRpb24xGzAZBgNVBAMMEmtpYmFuYS5leGFtcGxlLm9y 9 | ZzEiMCAGCSqGSIb3DQEJARYTZXhhbXBsZUBleGFtcGxlLm9yZzCCASIwDQYJKoZI 10 | hvcNAQEBBQADggEPADCCAQoCggEBAK3AKz7Jq8CGR75NcbuUKiLTJhSS0/TBGA9M 11 | k+OmradT5qWhFKbxUDoMwT4YpUTzkUGUzkGEVbUsrKK4XaYXldRoHOm8mS0aBErk 12 | g2ffmpi+TiGk5LHykZ0avmYeEsqVESAjVEMuU3fWJzZd4NfUcTKsBA7Ccqfnb3CB 13 | TS5fbAz/zw1K//XR5ZNCEBOsFD+0oFGKTNLCeMBCwccFNyVI2mNZpDA++HilNiQj 14 | TEJytBXGcMVZaS+Tc0vENfVWHAAtVdk8+dd6jkpNnDhK1W1TB7HvhScewyEGh9yb 15 | cK5asE8TBebEvUIrPaVFz+ef/o3lRev0Gq1QUYAoaG7ps9JlDcsCAwEAAaNQME4w 16 | HQYDVR0OBBYEFIKSLrYMY9fXpCS7OGnyxhS7JzRrMB8GA1UdIwQYMBaAFIKSLrYM 17 | Y9fXpCS7OGnyxhS7JzRrMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEB 18 | ABiu7+GH8WGNPqPUUHP17HHYK41xAyYoKSzCrELKmFLphYhnUQA6EeBY8rjYr3X0 19 | Z2O0F9J6Cik/kG/uBYW0Oi8PQ0EeSZwzvd+CsAc2EJAryOBpafWRXa2XrOtFkKjq 20 | jKWHSa0QJsBALiu8+JVfcwXYshbhmdVBxrktxMoY9WpGI7EcZkf77SW5enY4LtSF 21 | wuRr2lv29YADsa/Q7JV2I4oAbWUjW3513mR5JsefHPk7hXZbRCXqUxFTmWpnDomF 22 | HDeEDAdSgMgzODnbU/qHI9tmP6iDaCTz/eVfyCMKGYA0oIX1j5GdJ6HXK1OHafJK 23 | W/M9gDRhQKU0bTXC2CoEOAs= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /so-kibana/ssl/kibana.example.org.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtwCs+yavAhke+ 3 | TXG7lCoi0yYUktP0wRgPTJPjpq2nU+aloRSm8VA6DME+GKVE85FBlM5BhFW1LKyi 4 | uF2mF5XUaBzpvJktGgRK5INn35qYvk4hpOSx8pGdGr5mHhLKlREgI1RDLlN31ic2 5 | XeDX1HEyrAQOwnKn529wgU0uX2wM/88NSv/10eWTQhATrBQ/tKBRikzSwnjAQsHH 6 | BTclSNpjWaQwPvh4pTYkI0xCcrQVxnDFWWkvk3NLxDX1VhwALVXZPPnXeo5KTZw4 7 | StVtUwex74UnHsMhBofcm3CuWrBPEwXmxL1CKz2lRc/nn/6N5UXr9BqtUFGAKGhu 8 | 6bPSZQ3LAgMBAAECggEAKE5S+d7ItAGydOf8QTpux+NZix5Agx+kGDB4gi/xSsbA 9 | 051ZcBy8sqqwzVnkROmHwcHmUvaC92NkrN6+AsCn5j08r9ArbHYDlugJv7f0YgiL 10 | kWxyDQGrW9mPX5sMWuhXr6/iZS9C96K7N/ZxKUBD/jA/RLlA5chfZqboI6DJLP5s 11 | u03ZW4toaGItLZtPxF07eElE4MGG/TbkoBZ5t79Y98TJP2oO95IpPQa8omqQcDEM 12 | w0DsbV/dJV9AtyZBfmIRNaG5beiIUH26OQzUJP7FP2eIN4F/CAXLdlMnHtQA1XJz 13 | QRwMxbfBQ+JfKD7YWedqivrjR0iPMx80M4HSU9DFaQKBgQDa+7F7zDRy4WU0GtfP 14 | g+ArDUMjQv5pTXA0b2hKjHJRXM3LVo5qNJYWlirHtPjkVL4BJSY+iVl9/lfV2Xrf 15 | gDXQLcUhABx5HNzZ5p+NyDHWrpZamFkfw9wLwQHXZGG8I+dla/iToHRWEH2QYxdz 16 | VvfntpyE7XbxzhPPiS5haHGerwKBgQDLHxPn/jm2dSHa6jAidJXFJCMblcDuwIBy 17 | 7Cm7vnRro9Mbx/hZkRPXYWHQ/4WEVlWO9Sgkf4ambPSmdFg8ksQwBBd+l3K5K4Un 18 | 2AIL/PqgZg+anoEYYn1sqiWSc+YiO2GiRaHh5wOWtekEBBH9HcYkmKIXm3JHVdSw 19 | SpQsryxppQKBgDi7qqDLJ+93a7PTHIRbwzEEL5esHKOFd4NuNZPSzS89HugGVcE4 20 | PIY/g3fX7/+f4NYFHOJozTGyOmlh/c53tfloUkEZeUb7blBZyo5+BkN7WeLAw55S 21 | LOkyqNp33EZU+vcwwObmVB2UCiPlOEceif2TNvvNVdqRrFpBTS3ZVE5rAoGAJivk 22 | 9cgU2HESt60i9paqPq4X2us9oqCSgwZWoW2dO01CMwpVZZ+Z9vPaCgi68q+2zHLN 23 | 5G4Cw+vd3honKtr5+3wJXkTfzmSbVW2GlPIpt1L2w7vdztNTdsRS4z7clLpMEs67 24 | KVTcm8n7zEFnkAW0YtQ8Qet4gmdi3O5nAi65TfECgYEA0Pw2X4R37IrYM7tFDJSz 25 | zDF4mRmk2qfhHBd8IaIDBpsXSgLcVRduro4HBzf/JeCt8rvw3VMLEFyqwyszpHro 26 | xNnqPvbMPMCtOvUcr1VtXnO7CzaFNCJrfjYdV8vmDgvbYmBEHzG+LGbnVZIBUhQa 27 | igVjtqU1uT83+VpR703IcBI= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /so-logstash/bin/docker-entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ -z $1 ]] || [[ ${1:0:1} == '-' ]] ; then 4 | exec logstash $@ 5 | else 6 | exec $@ 7 | fi 8 | -------------------------------------------------------------------------------- /so-logstash/bin/docker-entrypoint.old: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | . /usr/share/logstash/rulesets 4 | 5 | rsync --update -raz /usr/share/logstash/pipeline.so/ /usr/share/logstash/pipeline/ 6 | 7 | if [[ $FREQ == 1 ]]; then 8 | rsync --update -raz /usr/share/logstash/pipeline.freq/ /usr/share/logstash/pipeline/ 9 | fi 10 | if [[ $DSTATS == 1 ]]; then 11 | rsync --update -raz /usr/share/logstash/pipeline.dstats/ /usr/share/logstash/pipeline/ 12 | fi 13 | 14 | if [[ -z $1 ]] || [[ ${1:0:1} == '-' ]] ; then 15 | exec logstash $@ 16 | else 17 | exec $@ 18 | fi 19 | -------------------------------------------------------------------------------- /so-logstash/config/log4j2.properties: -------------------------------------------------------------------------------- 1 | status = error 2 | name = LogstashPropertiesConfig 3 | 4 | appender.console.type = Console 5 | appender.console.name = plain_console 6 | appender.console.layout.type = PatternLayout 7 | appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n 8 | 9 | appender.json_console.type = Console 10 | appender.json_console.name = json_console 11 | appender.json_console.layout.type = JSONLayout 12 | appender.json_console.layout.compact = true 13 | appender.json_console.layout.eventEol = true 14 | 15 | rootLogger.level = ${sys:ls.log.level} 16 | rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console 17 | -------------------------------------------------------------------------------- /so-logstash/config/logstash.yml: -------------------------------------------------------------------------------- 1 | http.host: "0.0.0.0" 2 | path.config: /usr/share/logstash/pipeline 3 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0000_input_syslogng.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/15/2017 5 | 6 | input { 7 | tcp { 8 | port => 6050 9 | codec => json 10 | tags => "syslogng" 11 | } 12 | } 13 | filter { 14 | if "syslogng" in [tags] { 15 | mutate { 16 | #add_tag => [ "conf_file_0000"] 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0001_input_json.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | input { 7 | tcp { 8 | port => 6051 9 | codec => json 10 | tags => [ "json" ] 11 | } 12 | } 13 | filter { 14 | if "json" in [tags] { 15 | mutate { 16 | #add_tag => [ "conf_file_0001"] 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0002_input_windows_json.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | input { 7 | tcp { 8 | port => 6052 9 | type => "windows" 10 | tags => [ "json" ] 11 | codec => json { 12 | charset => "CP1252" 13 | } 14 | } 15 | } 16 | filter { 17 | if [type] == "windows" { 18 | mutate { 19 | #add_tag => [ "conf_file_0002"] 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0003_input_syslog.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/15/2017 5 | 6 | #input { 7 | # udp { 8 | # port => 1514 9 | # tags => "syslog" 10 | # } 11 | #} 12 | #filter { 13 | # if "syslog" in [tags] { 14 | # mutate { 15 | # #add_tag => [ "conf_file_0003"] 16 | # } 17 | # } 18 | #} 19 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0005_input_suricata.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | input { 7 | tcp { 8 | port => 6053 9 | codec => json 10 | type => "suricata" 11 | } 12 | } 13 | filter { 14 | if [type] == "suricata" { 15 | mutate { 16 | #add_tag => [ "conf_file_0005"] 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/0006_input_beats.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolutions.com 4 | # Last Update: 12/11/2017 5 | 6 | input { 7 | beats { 8 | port => "5044" 9 | tags => [ "beat" ] 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1000_preprocess_log_elapsed.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | ruby { 8 | code => "event.set('task_start', Time.now.to_f)" 9 | } 10 | mutate { 11 | #add_tag => [ "conf_file_1000"] 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1001_preprocess_syslogng.conf: -------------------------------------------------------------------------------- 1 | # Updated by: Doug Burks and Wes Lambert 2 | # Last Update: 10/30/2018 3 | 4 | filter { 5 | if "syslogng" in [tags] { 6 | mutate { 7 | rename => { "MESSAGE" => "message" } 8 | rename => { "PROGRAM" => "type" } 9 | rename => { "FACILITY" => "syslog-facility" } 10 | rename => { "FILE_NAME" => "syslog-file_name" } 11 | rename => { "HOST" => "syslog-host" } 12 | rename => { "HOST_FROM" => "syslog-host_from" } 13 | rename => { "LEGACY_MSGHDR" => "syslog-legacy_msghdr" } 14 | rename => { "PID" => "syslog-pid" } 15 | rename => { "PRIORITY" => "syslog-priority" } 16 | rename => { "SOURCEIP" => "syslog-sourceip" } 17 | rename => { "TAGS" => "syslog-tags" } 18 | lowercase => [ "syslog-host_from" ] 19 | remove_field => [ "ISODATE" ] 20 | remove_field => [ "SEQNUM" ] 21 | #add_tag => [ "conf_file_1001"] 22 | } 23 | if "bro_" in [type] { 24 | mutate { 25 | add_tag => [ "bro" ] 26 | } 27 | } else if [type] !~ /ossec.*|snort/ and "firewall" not in [tags] { 28 | mutate { 29 | add_tag => [ "syslog" ] 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1002_preprocess_json.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if "json" in [tags]{ 8 | json { 9 | source => "message" 10 | } 11 | mutate { 12 | remove_tag => [ "json" ] 13 | } 14 | mutate { 15 | #add_tag => [ "conf_file_1002"] 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1003_preprocess_bro.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/15/2017 5 | 6 | filter { 7 | if "bro" in [tags] { 8 | # If a log comes in with a message starting with # then drop it as it doesn'then 9 | # contain anything and is the header of a rotated bro log 10 | if [message] =~ /^#/ { 11 | drop { } 12 | } else { 13 | # Replace the host field with the host found in the bro log 14 | if [bro_host] { 15 | # mutate { 16 | # replace => [ "host", "%{bro_host}" ] 17 | # } 18 | } 19 | } 20 | mutate { 21 | #add_tag => [ "conf_file_1003"] 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1004_preprocess_syslog_types.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if "syslog" in [tags] { 3 | if [host] == "172.16.1.1" { 4 | mutate { 5 | add_field => { "type" => "fortinet" } 6 | add_tag => [ "firewall" ] 7 | } 8 | } 9 | if [host] == "10.0.0.101" { 10 | mutate { 11 | add_field => { "type" => "brocade" } 12 | add_tag => [ "switch" ] 13 | } 14 | } 15 | mutate { 16 | #add_tag => [ "conf_file_1004"] 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1029_preprocess_esxi.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | # 6 | # This configuration file takes ESXi syslog messages and filters them. There is no input as the logs would have came in via syslog 7 | filter { 8 | # This is an example of using an IP address range to classify a syslog message to a specific type of log 9 | # This is helpful as so many devices only send logs via syslog 10 | if [host] =~ "10\.[0-1]\.9\." { 11 | mutate { 12 | replace => ["type", "esxi"] 13 | } 14 | } 15 | if [host] =~ "\.234$" { 16 | mutate { 17 | replace => ["type", "esxi"] 18 | } 19 | } 20 | if [type] == "esxi" { 21 | grok { 22 | match => { "message" => "(?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGHOST:logsource}) (?:%{SYSLOGPROG}): (?(?:\[(?[0-9A-Z]{8,8}) %{DATA:esxi_loglevel} \'%{DATA:esxi_service}\'\] %{GREEDYDATA:esxi_message}|%{GREEDYDATA}))"} 23 | 24 | # pattern => ['(?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp8601}) (?:%{SYSLOGHOST:logsource}) (?:%{SYSLOGPROG}): (?(?:\[(?[0-9A-Z]{8,8}) %{DATA:esxi_loglevel} \'%{DATA:esxi_service}\'\] %{GREEDYDATA:esxi_message}|%{GREEDYDATA}))'] 25 | } 26 | mutate { 27 | #add_tag => [ "conf_file_1029"] 28 | } 29 | } 30 | } 31 | 32 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1030_preprocess_greensql.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if [type] == "greensql" { 8 | # This section is parsing out the fields for GreenSQL syslog data 9 | grok { 10 | match => { "message" => "<%{INT:Code}>%{DATA:Category}\[%{INT:Transcation}\]:\s*Database=%{DATA:Database}\sUser=%{DATA:UserName}\sApplication Name=%{DATA:Application}\sSource IP=%{IPV4:SrcIp}\sSource Port=%{INT:SrcPort}\sTarget IP=?%{IPV4:DstIp}\sTarget Port=%{DATA:DstPort}\sQuery=%{GREEDYDATA:Query}"} 11 | match => { "message" => "<%{INT:Code}>%{DATA:Category}\[%{INT:Transcation}\]:\sAdmin_Name=%{DATA:UserName}\sIP_Address=%{IPV4:SrcIp}\sUser_Agent=%{DATA:UserAgent}\sMessage=%{DATA:StatusMessage}\sDescription=%{DATA:Description}\sSeverity=%{GREEDYDATA:Severity}"} 12 | } 13 | # Remove the message field as it is unnecessary 14 | #mutate { 15 | # remove_field => [ "message"] 16 | #} 17 | mutate { 18 | #add_tag => [ "conf_file_1030"] 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1031_preprocess_iis.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if [type] == "iis" { 8 | # The log is expected to have come from NXLog and in JSON format. This allows for automatic parsing of fields 9 | json { 10 | source => "message" 11 | } 12 | # This removes the message field as it is unneccesary and tags the packet as web 13 | mutate { 14 | # remove_field => [ "message"] 15 | add_tag => [ "web" ] 16 | } 17 | mutate { 18 | #add_tag => [ "conf_file_1031"] 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1032_preprocess_mcafee.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | # 6 | # This file looks for McAfee EPO logs 7 | filter { 8 | if [type] == "mcafee" { 9 | # NXLog should be sending the logs in JSON format so they auto parse 10 | json { 11 | source => "message" 12 | } 13 | # This section converts the UTC fields to the proper time format 14 | date { 15 | match => [ "ReceivedUTC", "YYYY-MM-dd HH:mm:ss" ] 16 | target => [ "ReceivedUTC" ] 17 | } 18 | date { 19 | match => [ "DetectedUTC", "YYYY-MM-dd HH:mm:ss" ] 20 | target => [ "DetectedUTC" ] 21 | } 22 | mutate { 23 | #add_tag => [ "conf_file_1032"] 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1034_preprocess_syslog.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/22/2017 5 | 6 | filter { 7 | if [type] == "syslog" { 8 | # This drops syslog messages regarding license messages. You may want to comment it out. 9 | #if [message] =~ "license" { 10 | # drop { } 11 | #} 12 | mutate { 13 | #convert => [ "status_code", "integer" ] 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1103_preprocess_bro_dpd.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for dpd.log from Bro systems 7 | filter { 8 | if [type] == "bro_dpd" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | rename => { "proto" => "protocol" } 23 | #analyzer 24 | #failure_reason 25 | } 26 | } else { 27 | 28 | mutate { 29 | gsub => [ "message", "[\"']", "" ] 30 | } 31 | csv { 32 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","analyzer","failure_reason"] 33 | separator => " " 34 | } 35 | } 36 | 37 | mutate { 38 | #add_tag => [ "conf_file_1103"] 39 | } 40 | 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1104_preprocess_bro_files.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for files.log from Bro systems 7 | filter { 8 | if [type] == "bro_files" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #fuid 18 | rename => { "tx_hosts" => "file_ip" } 19 | rename => { "rx_hosts" => "destination_ip" } 20 | rename => { "conn_uids" => "connection_uids" } 21 | #source field 22 | #depth field 23 | rename => { "analyzers" => "analyzer" } 24 | rename => { "mime_type" => "mimetype" } 25 | rename => { "filename" => "file_name" } 26 | #duration 27 | #local_orig 28 | #is_orig 29 | #seen_bytes 30 | #total_bytes 31 | #missing_bytes 32 | #overflow_bytes 33 | rename => { "timedout" => "timed_out" } 34 | #parent_fuid 35 | #md5 36 | #sha1 37 | #sha256 38 | #extracted 39 | #extracted_cutoff 40 | #extracted_size 41 | } 42 | } else { 43 | 44 | csv { 45 | columns => ["timestamp","fuid","file_ip","destination_ip","connection_uids","source","depth","analyzer","mimetype","file_name","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timed_out","parent_fuid","md5","sha1","sha256","extracted","extracted_cutoff","extracted_size"] 46 | separator => " " 47 | } 48 | } 49 | 50 | mutate { 51 | #add_tag => [ "conf_file_1104"] 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1105_preprocess_bro_ftp.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for ftp.log from Bro systems 7 | filter { 8 | if [type] == "bro_ftp" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | rename => { "user" => "username" } 23 | #password 24 | rename => { "command" => "ftp_command" } 25 | rename => { "arg" => "ftp_argument" } 26 | rename => { "mime_type" => "mimetype" } 27 | #file_size 28 | #reply_code 29 | rename => { "reply_msg" => "reply_message" } 30 | rename => { "data_channel.passive" => "data_channel_passive" } 31 | rename => { "data_channel.orig_h" => "data_channel_source_ip" } 32 | rename => { "data_channel.resp_h" => "data_channel_destination_ip" } 33 | rename => { "data_channel.resp_p" => "data_channel_destination_port" } 34 | #fuid 35 | } 36 | 37 | mutate { 38 | convert => { "reply" => "string" } 39 | } 40 | 41 | } else { 42 | 43 | mutate { 44 | gsub => [ "message", "[\"']", "" ] 45 | } 46 | csv { 47 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","username","password","ftp_command","ftp_argument","mimetype","file_size","reply_code","reply_message","data_channel_passive","data_channel_source_ip","data_channel_destination_ip","data_channel_destination_port","fuid"] 48 | separator => " " 49 | } 50 | } 51 | 52 | mutate { 53 | #add_tag => [ "conf_file_1105"] 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1107_preprocess_bro_irc.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for irc.log from Bro systems 7 | filter { 8 | if [type] == "bro_irc" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | #nick 23 | rename => { "user" => "irc_username" } 24 | rename => { "command" => "irc_command" } 25 | #value 26 | rename => { "addl" => "additional_info" } 27 | #dcc_file_name 28 | #dcc_file_size 29 | #dcc_mime_type 30 | #fuid 31 | } 32 | } else { 33 | mutate { 34 | gsub => [ "message", "[\"']", "" ] 35 | } 36 | csv { 37 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","nick","irc_username","irc_command","value","additional_info","dcc_file_name","dcc_file_size","dcc_mime_type","fuid"] 38 | separator => " " 39 | } 40 | } 41 | 42 | mutate { 43 | #add_tag => [ "conf_file_1107"] 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1111_preprocess_bro_signatures.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for signatures.log from Bro systems 7 | filter { 8 | if [type] == "bro_signatures" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | #note 23 | rename => { "sig_id" => "signature_id" } 24 | rename => { "event_msg" => "event_message" } 25 | rename => { "sub_msg" => "sub_message" } 26 | rename => { "sig_count" => "signature_count" } 27 | #host_count 28 | } 29 | } else { 30 | mutate { 31 | gsub => [ "message", "[\"']", "" ] 32 | } 33 | csv { 34 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","note","signature_id","event_message","sub_message","signature_count","host_count"] 35 | separator => " " 36 | } 37 | } 38 | 39 | mutate { 40 | #add_tag => [ "conf_file_1111"] 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1113_preprocess_bro_snmp.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for snmp.log from Bro systems 7 | filter { 8 | if [type] == "bro_snmp" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | #duration 23 | #version 24 | #convert => { "version" => "string" } 25 | #community 26 | #get_requests 27 | #get_bulk_requests 28 | #get_responses 29 | #set_requests 30 | #display_string 31 | #up_since 32 | } 33 | } else { 34 | mutate { 35 | gsub => [ "message", "[\"']", "" ] 36 | } 37 | csv { 38 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","duration","version","community","get_requests","get_bulk_requests","get_responses","set_requests","display_string","up_since"] 39 | separator => " " 40 | } 41 | } 42 | 43 | mutate { 44 | #add_tag => [ "conf_file_1113"] 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1114_preprocess_bro_software.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for software.log from Bro systems 7 | filter { 8 | if [type] == "bro_software" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "host" => "source_ip" } 19 | rename => { "host_p" => "source_port" } 20 | #software_type 21 | #name 22 | rename => { "version.major" => "version_major" } 23 | rename => { "version.minor" => "version_minor" } 24 | rename => { "version.minor2" => "version_minor2" } 25 | rename => { "version.minor3" => "version_minor3" } 26 | rename => { "version.addl" => "version_additional_info" } 27 | #unparsed_version 28 | } 29 | 30 | mutate { 31 | convert => { "version_major" => "string" } 32 | convert => { "version_minor" => "string" } 33 | } 34 | 35 | } else { 36 | mutate { 37 | gsub => [ "message", "[\"']", "" ] 38 | } 39 | csv { 40 | columns => ["timestamp","source_ip","source_port","software_type","name","version_major","version_minor","version_minor2","version_minor3","version_additional_info","unparsed_version"] 41 | separator => " " 42 | } 43 | } 44 | 45 | mutate { 46 | #add_tag => [ "conf_file_1114"] 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1117_preprocess_bro_syslog.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for syslog.log from Bro systems 7 | filter { 8 | if [type] == "bro_syslog" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | rename => { "proto" => "protocol" } 23 | #facility 24 | #severity 25 | #message 26 | } 27 | } else { 28 | mutate { 29 | gsub => [ "message", "[\"']", "" ] 30 | } 31 | csv { 32 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","facility","severity","message"] 33 | separator => " " 34 | } 35 | } 36 | 37 | mutate { 38 | #add_tag => [ "conf_file_1117"] 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1118_preprocess_bro_tunnel.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for tunnel.log from Bro systems 7 | # Security Onion syslog-ng.conf sets type to "bro_tunnels" 8 | filter { 9 | if [type] == "bro_tunnels" { 10 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 11 | if [message] =~ /^{.*}$/ { 12 | json { 13 | source => "message" 14 | } 15 | 16 | mutate { 17 | rename => { "ts" => "timestamp" } 18 | #uid 19 | rename => { "id.orig_h" => "source_ip" } 20 | rename => { "id.orig_p" => "source_port" } 21 | rename => { "id.resp_h" => "destination_ip" } 22 | rename => { "id.resp_p" => "destination_port" } 23 | #tunnel_type 24 | #action 25 | } 26 | } else { 27 | mutate { 28 | gsub => [ "message", "[\"']", "" ] 29 | } 30 | csv { 31 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","tunnel_type","action"] 32 | separator => " " 33 | } 34 | } 35 | 36 | mutate { 37 | #add_tag => [ "conf_file_1118"] 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1119_preprocess_bro_weird.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for weird.log from Bro systems 7 | filter { 8 | if [type] == "bro_weird" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | #name 23 | rename => { "addl" => "additional_info" } 24 | #notice 25 | #peer 26 | } 27 | 28 | mutate { 29 | convert => { "notice" => "string" } 30 | } 31 | 32 | } else { 33 | grok { 34 | match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))" ] 35 | } 36 | } 37 | 38 | mutate { 39 | #add_tag => [ "conf_file_1119"] 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1124_preprocess_bro_intel.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 2/7/2018 5 | # 6 | # This conf file is based on accepting logs for intel.log from Bro systems 7 | filter { 8 | if [type] == "bro_intel" { 9 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 10 | if [message] =~ /^{.*}$/ { 11 | json { 12 | source => "message" 13 | } 14 | 15 | mutate { 16 | rename => { "ts" => "timestamp" } 17 | #uid 18 | rename => { "id.orig_h" => "source_ip" } 19 | rename => { "id.orig_p" => "source_port" } 20 | rename => { "id.resp_h" => "destination_ip" } 21 | rename => { "id.resp_p" => "destination_port" } 22 | rename => { "seen.indicator" => "indicator" } 23 | rename => { "seen.indicator_type" => "indicator_type" } 24 | rename => { "seen.where" => "seen_where" } 25 | rename => { "seen.node" => "seen_node" } 26 | #matched 27 | #sources 28 | #fuid 29 | rename => { "file_mime_type" => "mimetype" } 30 | rename => { "file_desc" => "file_description" } 31 | } 32 | } else { 33 | mutate { 34 | gsub => [ "message", "[\"']", "" ] 35 | } 36 | csv { 37 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","indicator","indicator_type","seen_where","seen_node","matched","sources","fuid","mimetype","file_description"] 38 | separator => " " 39 | } 40 | } 41 | 42 | mutate { 43 | #add_tag => [ "conf_file_1124"] 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1125_preprocess_bro_modbus.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # Adapted from existing filters provided by Justin Henderson 3 | # 4 | # Updated by: Doug Burks 5 | # Last Update: 2/7/2018 6 | # 7 | # This conf file is based on accepting logs for modbus.log from Bro systems 8 | # 9 | filter { 10 | if [type] == "bro_modbus" { 11 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 12 | if [message] =~ /^{.*}$/ { 13 | json { 14 | source => "message" 15 | } 16 | 17 | mutate { 18 | rename => { "ts" => "timestamp" } 19 | #uid 20 | rename => { "id.orig_h" => "source_ip" } 21 | rename => { "id.orig_p" => "source_port" } 22 | rename => { "id.resp_h" => "destination_ip" } 23 | rename => { "id.resp_p" => "destination_port" } 24 | rename => { "func" => "function" } 25 | #exception 26 | } 27 | } else { 28 | mutate { 29 | gsub => [ "message", "[\"']", "" ] 30 | } 31 | csv { 32 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","function","exception"] 33 | separator => " " 34 | } 35 | } 36 | } 37 | } 38 | 39 | # Parse using grok 40 | #filter { 41 | # if [type] == "bro_modbus" { 42 | # grok { 43 | # match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] 44 | # } 45 | #mutate { 46 | #add_tag => [ "conf_file_1125"] 47 | #} 48 | # } 49 | #} 50 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1128_preprocess_bro_pe.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks 6 | # 7 | # This conf file is based on accepting logs for pe.log from Bro systems 8 | # 9 | filter { 10 | if [type] == "bro_pe" { 11 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 12 | if [message] =~ /^{.*}$/ { 13 | json { 14 | source => "message" 15 | } 16 | 17 | mutate { 18 | rename => { "ts" => "timestamp" } 19 | rename => { "id" => "fuid" } 20 | #machine 21 | #compile_ts 22 | #os 23 | #subsystem 24 | #is_exe 25 | #is_64bit 26 | #uses_aslr 27 | #uses_dep 28 | #uses_code_integrity 29 | #uses_seh 30 | #has_import_table 31 | #has_export_table 32 | #has_cert_table 33 | #has_debug_data 34 | #section_names 35 | } 36 | } else { 37 | mutate { 38 | gsub => [ "message", "[\"']", "" ] 39 | } 40 | csv { 41 | columns => ["timestamp","fuid","machine","compile_ts","os","subsystem","is_exe","is_64bit","uses_aslr","uses_dep","uses_code_integrity","uses_seh","has_import_table","has_export_table","has_cert_table","has_debug_data","section_names"] 42 | separator => " " 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1130_preprocess_bro_dnp3.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks 6 | # Last Update: 2/7/2018 7 | # 8 | # This conf file is based on accepting logs for dnp3.log from Bro systems 9 | # 10 | filter { 11 | if [type] == "bro_dnp3" { 12 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 13 | if [message] =~ /^{.*}$/ { 14 | json { 15 | source => "message" 16 | } 17 | 18 | mutate { 19 | rename => { "ts" => "timestamp" } 20 | #uid 21 | rename => { "id.orig_h" => "source_ip" } 22 | rename => { "id.orig_p" => "source_port" } 23 | rename => { "id.resp_h" => "destination_ip" } 24 | rename => { "id.resp_p" => "destination_port" } 25 | #fc_request 26 | #fc_reply 27 | #iin 28 | } 29 | } else { 30 | mutate { 31 | gsub => [ "message", "[\"']", "" ] 32 | } 33 | csv { 34 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fc_request","fc_reply","iin"] 35 | separator => " " 36 | } 37 | } 38 | } 39 | } 40 | 41 | # Parse using grok 42 | #filter { 43 | # if [type] == "bro_dnp3" { 44 | # grok { 45 | # match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] 46 | # } 47 | # mutate { 48 | # #add_tag => [ "conf_file_1130"] 49 | # } 50 | # } 51 | #} 52 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1131_preprocess_bro_smb_files.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks 6 | # Last Update: 2/7/2018 7 | # 8 | # This conf file is based on accepting logs for smb_files.log from Bro systems 9 | # 10 | filter { 11 | if [type] == "bro_smb_files" { 12 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 13 | if [message] =~ /^{.*}$/ { 14 | json { 15 | source => "message" 16 | } 17 | 18 | mutate { 19 | rename => { "ts" => "timestamp" } 20 | #uid 21 | rename => { "id.orig_h" => "source_ip" } 22 | rename => { "id.orig_p" => "source_port" } 23 | rename => { "id.resp_h" => "destination_ip" } 24 | rename => { "id.resp_p" => "destination_port" } 25 | #fuid 26 | #action 27 | #path 28 | #name 29 | #size 30 | #prev_name 31 | rename => { "times.modified" => "times_modified" } 32 | rename => { "times.accessed" => "times_accessed" } 33 | rename => { "times.created" => "times_created" } 34 | rename => { "times.changed" => "times_changed" } 35 | } 36 | } else { 37 | mutate { 38 | gsub => [ "message", "[\"']", "" ] 39 | } 40 | csv { 41 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fuid","action","path","name","size","prev_name","times_modified","times_accessed","times_created","times_changed"] 42 | separator => " " 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1132_preprocess_bro_smb_mapping.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks 6 | # Last Update: 2/7/2018 7 | # 8 | # This conf file is based on accepting logs for smb_mapping.log from Bro systems 9 | # 10 | filter { 11 | if [type] == "bro_smb_mapping" { 12 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 13 | if [message] =~ /^{.*}$/ { 14 | json { 15 | source => "message" 16 | } 17 | 18 | mutate { 19 | rename => { "ts" => "timestamp" } 20 | #uid 21 | rename => { "id.orig_h" => "source_ip" } 22 | rename => { "id.orig_p" => "source_port" } 23 | rename => { "id.resp_h" => "destination_ip" } 24 | rename => { "id.resp_p" => "destination_port" } 25 | #path 26 | #service 27 | #native_file_system 28 | #share_type 29 | } 30 | } else { 31 | mutate { 32 | gsub => [ "message", "[\"']", "" ] 33 | } 34 | csv { 35 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","path","service","native_file_system","share_type"] 36 | separator => " " 37 | } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1133_preprocess_bro_ntlm.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks and Wes Lambert 6 | # Last Update: 1/2/2019 7 | # 8 | # This conf file is based on accepting logs for ntlm.log from Bro systems 9 | # 10 | filter { 11 | if [type] == "bro_ntlm" { 12 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 13 | if [message] =~ /^{.*}$/ { 14 | json { 15 | source => "message" 16 | } 17 | 18 | mutate { 19 | rename => { "ts" => "timestamp" } 20 | #uid 21 | rename => { "id.orig_h" => "source_ip" } 22 | rename => { "id.orig_p" => "source_port" } 23 | rename => { "id.resp_h" => "destination_ip" } 24 | rename => { "id.resp_p" => "destination_port" } 25 | #hostname 26 | rename => { "domainname" => "domain_name" } 27 | rename => { "success" => "ntlm_success" } 28 | #status 29 | } 30 | } else { 31 | mutate { 32 | gsub => [ "message", "[\"']", "" ] 33 | } 34 | csv { 35 | columns => [ "timestamp", "uid", "source_ip", "source_port", "destination_ip", "destination_port", "username", "hostname", "domain_name", "server_nb_computer_name", "server_dns_computer_name", "server_tree_name", "ntlm_success"] 36 | separator => " " 37 | } 38 | ruby { 39 | code =>" 40 | hash = event.to_hash.each do |key,value| 41 | if value == '-' 42 | event.remove(key) 43 | end 44 | end" 45 | } 46 | 47 | 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1134_preprocess_bro_dce_rpc.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Adapted from existing filters provided by Justin Henderson 4 | # 5 | # Updated by: Doug Burks 6 | # Last Update: 2/7/2018 7 | # 8 | # This conf file is based on accepting logs for dce_rpc.log from Bro systems 9 | # 10 | filter { 11 | if [type] == "bro_dce_rpc" { 12 | # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. 13 | if [message] =~ /^{.*}$/ { 14 | json { 15 | source => "message" 16 | } 17 | 18 | mutate { 19 | rename => { "ts" => "timestamp" } 20 | #uid 21 | rename => { "id.orig_h" => "source_ip" } 22 | rename => { "id.orig_p" => "source_port" } 23 | rename => { "id.resp_h" => "destination_ip" } 24 | rename => { "id.resp_p" => "destination_port" } 25 | #rtt 26 | #named_pipe 27 | #endpoint 28 | #operation 29 | } 30 | 31 | #mutate { 32 | #convert => { "rtt" => "float" } 33 | #} 34 | } else { 35 | mutate { 36 | gsub => [ "message", "[\"']", "" ] 37 | } 38 | csv { 39 | columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","rtt","named_pipe","endpoint","operation"] 40 | separator => " " 41 | } 42 | 43 | if [rtt] == "-" { 44 | mutate { 45 | remove_field => [ "rtt" ] 46 | } 47 | } 48 | 49 | #mutate { 50 | #convert => [ "rtt", "float" ] 51 | #} 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/1998_test_data.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if [test] == "test" { 8 | mutate { 9 | remove_field => [ "test" ] 10 | add_tag => [ "test_data" ] 11 | } 12 | mutate { 13 | #add_tag => [ "conf_file_1998"] 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6001_bro_import.conf: -------------------------------------------------------------------------------- 1 | # Updated by: Doug Burks 2 | # Last Update: 2/10/2018 3 | # 4 | filter { 5 | if "import" in [tags] and "bro" in [tags] { 6 | 7 | # we're setting timestamp in 6000 now 8 | #date { 9 | # match => [ "timestamp", "UNIX" ] 10 | #} 11 | 12 | mutate { 13 | #add_tag => [ "conf_file_6001"] 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6002_syslog.conf: -------------------------------------------------------------------------------- 1 | # Updated by: Doug Burks 2 | # Last Update: 5/16/2017 3 | # 4 | filter { 5 | if "syslog" in [tags] { 6 | mutate { 7 | #convert => [ "status_code", "integer" ] 8 | #add_tag => [ "conf_file_6002"] 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6101_switch_brocade.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if [type] == "brocade" { 8 | grok { 9 | match => ["message", "<%{DATA}>%{GREEDYDATA:sys_message}"] 10 | } 11 | grok { 12 | match => { "sys_message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid:int}\])?: %{GREEDYDATA:syslog_message}" } 13 | add_field => [ "received_at", "%{@timestamp}" ] 14 | } 15 | if [syslog_message] =~ "Interface ethernet" or [syslog_program] == "PORT" { 16 | grok { 17 | match => { "syslog_message" => "%{DATA}%{INT:unit}\/%{INT:interface_type}\/%{INT:interface:int}" } 18 | } 19 | mutate { 20 | add_field => { "interface_port" => "%{unit}/%{interface_type}/%{interface}" } 21 | } 22 | } 23 | date { 24 | match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] 25 | timezone => "America/Chicago" 26 | remove_field => "syslog_timestamp" 27 | remove_field => "received_at" 28 | } 29 | mutate { 30 | #add_tag => [ "conf_file_6101"] 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6201_firewall_pfsense.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # Last Update: 02/19/2019 3 | 4 | 5 | filter { 6 | if [type] == "filterlog" { 7 | grok { 8 | match => ["message", "^(%{NONNEGINT:rule_number})?\,(%{NONNEGINT:sub_rule_number})?\,(%{DATA:anchor})?\,(%{NONNEGINT:tracker_id})?\,%{DATA:interface}\,%{DATA:reason}\,%{DATA:action}\,%{DATA:direction}\,%{NONNEGINT:ip_version},%{GREEDYDATA:sub_msg}$"] 9 | } 10 | if [ip_version] =~ "4" { 11 | csv { 12 | source => [sub_msg] 13 | columns => ["ipv4_tos","ipv4_ecn","ipv4_ttl","ipv4_id","ipv4_offset", "ipv4_flags","ipv4_protocol_id","ipv4_protocol","ipv4_protocol_length","source_ip","destination_ip","source_port","destination_port","data_length","tcp_flags","sequence_number","ack","window","urg","options"] 14 | separator => "," 15 | } 16 | } 17 | if [ip_version] =~ "6" { 18 | 19 | grok { 20 | match => ["sub_msg", "^%{DATA:class},%{DATA:flow_label},%{NONNEGINT:hop_limit},%{DATA:protocol},%{GREEDYDATA:ipv6_sub_msg}$"] 21 | } 22 | if [protocol] =~ "Options" { 23 | grok { 24 | match => ["ipv6_sub_msg", "^%{DATA:protocol_id},%{DATA:length},%{DATA:source_ip},%{DATA:destination_ip},%{GREEDYDATA:options}$"] 25 | } 26 | mutate { 27 | split => { "options" => "," } 28 | } 29 | } 30 | else { 31 | csv { 32 | source => [ipv6_sub_msg] 33 | columns => ["protocol_id","length","source_ip","destination_ip","source_port","destination_port","data_length","tcp_flags","sequence_number","ack","window","urg","options"] 34 | separator => "," 35 | } 36 | } 37 | } 38 | mutate { 39 | convert => [ "destination_port", "integer" ] 40 | convert => [ "source_port", "integer" ] 41 | convert => [ "ip_version", "integer" ] 42 | replace => { "type" => "firewall" } 43 | add_tag=> [ "pfsense","firewall" ] 44 | remove_field => [ "sub_msg", "ipv6_sub_msg" ] 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6600_winlogbeat_sysmon.conf: -------------------------------------------------------------------------------- 1 | # Author: Wes Lambert 2 | # 3 | # Last Update: 09/24/2018 4 | # 5 | # This conf file is based on accepting Sysmon logs from winlogbeat 6 | 7 | filter { 8 | if "beat" in [tags] and [source_name] =~ "Microsoft-Windows-Sysmon" { 9 | mutate { 10 | replace => { "type" => "sysmon" } 11 | rename => { "[event_data][User]" => "username" } 12 | rename => { "[event_data][DestinationPort]" => "destination_port" } 13 | rename => { "[event_data][DestinationIp]" => "destination_ip" } 14 | rename => { "[event_data][SourceIp]" => "source_ip" } 15 | rename => { "[event_data][Image]" => "image_path" } 16 | rename => { "[event_data][ParentImage]" => "parent_image_path" } 17 | rename => { "[data][sysmon][targetfilename]" => "target_filename" } 18 | rename => { "[event_data][SourceHostname]" => "source_hostname" } 19 | rename => { "[event_data][DestinationHostname]" => "destination_hostname" } 20 | rename => { "[event_data][TargetFilename]" => "target_filename" } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/6700_winlogbeat.conf: -------------------------------------------------------------------------------- 1 | # Author: Doug Burks 2 | # 3 | # Last Update: 09/24/2018 4 | # 5 | # This conf file is for beat data 6 | 7 | filter { 8 | if "beat" in [tags] { 9 | mutate { 10 | # As of beats 6.3.0, host is now an object: 11 | # https://www.elastic.co/guide/en/beats/libbeat/current/release-notes-6.3.0.html 12 | # This creates a conflict with our existing host string. 13 | # So let's rename the host object to beat_host. 14 | rename => { "host" => "beat_host" } 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8000_postprocess_bro_cleanup.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | if "bro" in [tags] { 8 | if "_grokparsefailure" not in [tags] and "_csvparsefailure" not in [tags] and "_jsonparsefailure" not in [tags] { 9 | #mutate { 10 | # remove_field => [ "message" ] 11 | #} 12 | } 13 | mutate { 14 | #add_tag => [ "conf_file_8000"] 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8001_postprocess_common_ip_augmentation.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/20/2017 5 | 6 | filter { 7 | if [source_ip] { 8 | if [source_ip] == "-" { 9 | mutate { 10 | replace => { "source_ip" => "0.0.0.0" } 11 | } 12 | } 13 | if [source_ip] =~ "10\." or [source_ip] =~ "192\.168\." or [source_ip] =~ "172\.(1[6-9]|2[0-9]|3[0-1])\." or [source_ip] =~ "fe80::20c:29ff:fe19:f7d" or [source_ip] =~ "::1" { 14 | mutate { 15 | } 16 | } else { 17 | geoip { 18 | source => "[source_ip]" 19 | target => "source_geo" 20 | } 21 | } 22 | if [source_ip] { 23 | mutate { 24 | add_field => { "ips" => "%{source_ip}" } 25 | add_field => { "source_ips" => [ "%{source_ip}" ] } 26 | } 27 | } 28 | } 29 | if [destination_ip] { 30 | if [destination_ip] == "-" { 31 | mutate { 32 | replace => { "destination_ip" => "0.0.0.0" } 33 | } 34 | } 35 | if [destination_ip] =~ "10\." or [destination_ip] =~ "192\.168\." or [destination_ip] =~ "172\.(1[6-9]|2[0-9]|3[0-1])\." or [destination_ip] =~ "239.255.255.250" or [destination_ip] =~ "224\.0\.0\." or [destination_ip] =~ "255.255.255.255" or [destination_ip] =~ "ff02::fb" or [destination_ip] =~ "fe80::20c:29ff:fe19:f7d" or [destination_ip] =~ "224\.0\.1\." { 36 | mutate { 37 | } 38 | } 39 | else { 40 | geoip { 41 | source => "[destination_ip]" 42 | target => "destination_geo" 43 | } 44 | } 45 | } 46 | if [destination_ip] { 47 | mutate { 48 | add_field => { "ips" => "%{destination_ip}" } 49 | add_field => { "destination_ips" => [ "%{destination_ip}" ] } 50 | } 51 | } 52 | } 53 | #if [source_ip] or [destination_ip] { 54 | # mutate { 55 | #add_tag => [ "conf_file_8001"] 56 | # } 57 | #} 58 | 59 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8006_postprocess_dns.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/13/2017 5 | 6 | filter { 7 | if [type] == "bro_dns" or "dns" in [tags] { 8 | # Used for whois lookups - can create log loop 9 | if [query] =~ "^whois\." { 10 | drop { } 11 | } 12 | # REPLACE test.int with your internal domain 13 | if [query] and [query] !~ "\.test\.int$" { 14 | mutate { 15 | lowercase => [ "query" ] 16 | } 17 | if [query_type_name] != "NB" and [query_type_name] != "TKEY" and [query_type_name] != "NBSTAT" and [query_type_name] != "PTR" { 18 | tld { 19 | source => "query" 20 | } 21 | ruby { 22 | code => "event.set('query_length', event.get('query').length)" 23 | } 24 | mutate { 25 | rename => { "[SubLog][sessionid]" => "sub_session_id" } 26 | rename => { "[tld][domain]" => "highest_registered_domain" } 27 | rename => { "[tld][trd]" => "subdomain" } 28 | rename => { "[tld][tld]" => "top_level_domain" } 29 | rename => { "[tld][sld]" => "parent_domain" } 30 | } 31 | if [parent_domain] { 32 | ruby { 33 | code => "event.set('parent_domain_length', event.get('parent_domain').length)" 34 | } 35 | } 36 | if [subdomain] { 37 | ruby { 38 | code => "event.set('subdomain_length', event.get('subdomain').length)" 39 | } 40 | } 41 | } 42 | } 43 | mutate { 44 | #add_tag => [ "conf_file_8006"] 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8007_postprocess_dns_top1m_tagging.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 4/11/2017 5 | 6 | filter { 7 | if [type] == "dns" or [type] == "bro_dns" { 8 | if [highest_registered_domain] { 9 | rest { 10 | request => { 11 | url => "http://domainstats:20000/alexa/%{highest_registered_domain}" 12 | } 13 | sprintf => true 14 | json => false 15 | target => "site" 16 | } 17 | if [site] != "0" and [site] { 18 | mutate { 19 | add_tag => [ "top-1m" ] 20 | remove_field => [ "site" ] 21 | } 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8007_postprocess_http.conf: -------------------------------------------------------------------------------- 1 | # Original Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/13/2017 5 | 6 | filter { 7 | if [type] == "bro_http" { 8 | if [uri] { 9 | ruby { 10 | code => "event.set('uri_length', event.get('uri').length)" 11 | } 12 | } 13 | if [virtual_host] { 14 | ruby { 15 | code => "event.set('virtual_host_length', event.get('virtual_host').length)" 16 | } 17 | } 18 | if [useragent] { 19 | ruby { 20 | code => "event.set('useragent_length', event.get('useragent').length)" 21 | } 22 | } 23 | mutate { 24 | ##add_tag => [ "conf_file_8007"] 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8008_postprocess_dns_whois_age.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 7/22/2017 5 | 6 | filter { 7 | if "dns" in [tags] or [type] == "bro_dns" { 8 | if "top-1m" not in [tags] and [highest_registered_domain] { 9 | rest { 10 | request => { 11 | url => "http://domainstats:20000/domain/creation_date/%{highest_registered_domain}" 12 | } 13 | sprintf => true 14 | json => false 15 | target => "domain_age" 16 | } 17 | if [domain_age] and "No whois record" not in [domain_age] { 18 | date { 19 | match => [ "domain_age", "YYYY-MM-dd HH:mm:ss'; '", 20 | "YYYY-MM-dd HH:mm:ss';'", 21 | "YYYY-MM-dd'T'HH:mm:ssZ'; '", 22 | "YYYY-MM-dd'T'HH:mm:ssZ';'", 23 | "YYYY-MM-dd'T'HH:mm:ss'.00Z; '", 24 | "YYYY-MM-dd'T'HH:mm:ss'.00Z;'" ] 25 | target => "creation_date" 26 | remove_field => [ "domain_age" ] 27 | } 28 | } 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8503_postprocess_freq_analysis_bro_http.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 7/22/2017 5 | 6 | filter { 7 | if [type] == "bro_http" { 8 | if [virtual_host] and [virtual_host_length] > 5 { 9 | mutate { 10 | add_field => { "freq_virtual_host" => "%{virtual_host}"} 11 | } 12 | mutate { 13 | gsub => [ "freq_virtual_host", "\W", "" ] 14 | } 15 | rest { 16 | request => { 17 | url => "http://freqserver:10004/measure/%{freq_virtual_host}" 18 | } 19 | sprintf => true 20 | json => false 21 | target => "virtual_host_frequency_score" 22 | } 23 | mutate { 24 | remove_field => [ "virtual_host_domain" ] 25 | } 26 | if [virtual_host_frequency_score] { 27 | mutate { 28 | convert => [ "virtual_host_frequency_score", "float" ] 29 | add_field => { "frequency_scores" => "%{virtual_host_frequency_score}" } 30 | } 31 | } 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8998_postprocess_log_elapsed.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 12/9/2016 5 | 6 | filter { 7 | ruby { 8 | code => "event.set('task_end', Time.now.to_f)" 9 | } 10 | ruby { 11 | code => "event.set('logstash_time', event.get('task_end') - event.get('task_start'))" 12 | } 13 | mutate { 14 | remove_field => [ 'task_start', 'task_end' ] 15 | } 16 | mutate { 17 | #add_tag => [ "conf_file_8998"] 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /so-logstash/files/conf.d.so/8999_postprocess_rename_type.conf: -------------------------------------------------------------------------------- 1 | # Author: Doug Burks 2 | # Last Update: 12/10/2017 3 | 4 | filter { 5 | mutate { 6 | rename => [ "type", "event_type" ] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /so-logstash/files/dictionaries/services.yaml: -------------------------------------------------------------------------------- 1 | "Windows Update": whitelist 2 | "SEC555 Service": whitelist 3 | "Evil Service": blacklist 4 | -------------------------------------------------------------------------------- /so-logstash/files/dictionaries/tcp_flags.yaml: -------------------------------------------------------------------------------- 1 | "0x00": NULL 2 | "0x01": FIN 3 | "0x02": SYN 4 | "0x03": FIN-SYN 5 | "0x08": PSH 6 | "0x09": FIN-PSH 7 | "0x0A": SYN-PSH 8 | "0x0B": FIN-SYN-PSH 9 | "0x10": ACK 10 | "0x11": FIN-ACK 11 | "0x12": SYN-ACK 12 | "0x13": FIN-SYN-ACK 13 | "0x18": PSH-ACK 14 | "0x19": FIN-PSH-ACK 15 | "0x1A": SYN-PSH-ACK 16 | "0x1B": FIN-SYN-PSH-ACK 17 | "0x40": ECE 18 | "0x41": FIN-ECE 19 | "0x42": SYN-ECE 20 | "0x43": FIN-SYN-ECE 21 | "0x48": PSH-ECE 22 | "0x49": FIN-PSH-ECE 23 | "0x4A": SYN-PSH-ECE 24 | "0x4B": FIN-SYN-PSH-ECE 25 | "0x50": ACK-ECE 26 | "0x51": FIN-ACK-ECE 27 | "0x52": SYN-ACK-ECE 28 | "0x53": FIN-SYN-ACK-ECE 29 | "0x58": PSH-ACK-ECE 30 | "0x59": FIN-PSH-ACK-ECE 31 | "0x5A": SYN-PSH-ACK-ECE 32 | "0x5B": FIN-SYN-PSH-ACK-ECE 33 | "0x80": CWR 34 | "0x81": FIN-CWR 35 | "0x82": SYN-CWR 36 | "0x83": FIN-SYN-CWR 37 | "0x88": PSH-CWR 38 | "0x89": FIN-PSH-CWR 39 | "0x8A": SYN-PSH-CWR 40 | "0x8B": FIN-SYN-PSH-CWR 41 | "0x90": ACK-CWR 42 | "0x91": FIN-ACK-CWR 43 | "0x92": SYN-ACK-CWR 44 | "0x93": FIN-SYN-ACK-CWR 45 | "0x98": PSH-ACK-CWR 46 | "0x99": FIN-PSH-ACK-CWR 47 | "0x9A": SYN-PSH-ACK-CWR 48 | "0x9B": FIN-SYN-PSH-ACK-CWR 49 | "0xC0": ECE-CWR 50 | "0xC1": FIN-ECE-CWR 51 | "0xC2": SYN-ECE-CWR 52 | "0xC3": FIN-SYN-ECE-CWR 53 | "0xC8": PSH-ECE-CWR 54 | "0xC9": FIN-PSH-ECE-CWR 55 | "0xCA": SYN-PSH-ECE-CWR 56 | "0xCB": FIN-SYN-PSH-ECE-CWR 57 | "0xD0": ACK-ECE-CWR 58 | "0xD1": FIN-ACK-ECE-CWR 59 | "0xD2": SYN-ACK-ECE-CWR 60 | "0xD3": FIN-SYN-ACK-ECE-CWR 61 | "0xD8": PSH-ACK-ECE-CWR 62 | "0xD9": FIN-PSH-ACK-ECE-CWR 63 | "0xDA": SYN-PSH-ACK-ECE-CWR 64 | "0xDB": FIN-SYN-PSH-ACK-ECE-CWR -------------------------------------------------------------------------------- /so-logstash/files/domainstats/8007_postprocess_dns_top1m_tagging.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 4/11/2017 5 | 6 | filter { 7 | if [type] == "dns" or [type] == "bro_dns" { 8 | if [highest_registered_domain] { 9 | rest { 10 | request => { 11 | url => "http://domainstats:20000/alexa/%{highest_registered_domain}" 12 | } 13 | sprintf => true 14 | json => false 15 | target => "site" 16 | } 17 | if [site] != "0" and [site] { 18 | mutate { 19 | add_tag => [ "top-1m" ] 20 | remove_field => [ "site" ] 21 | } 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /so-logstash/files/domainstats/8008_postprocess_dns_whois_age.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 7/22/2017 5 | 6 | filter { 7 | if "dns" in [tags] or [type] == "bro_dns" { 8 | if "top-1m" not in [tags] and [highest_registered_domain] { 9 | rest { 10 | request => { 11 | url => "http://domainstats:20000/domain/creation_date/%{highest_registered_domain}" 12 | } 13 | sprintf => true 14 | json => false 15 | target => "domain_age" 16 | } 17 | if [domain_age] and [domain_age] !~ "No whois record"{ 18 | date { 19 | match => [ "domain_age", "YYYY-MM-dd HH:mm:ss'; '", 20 | "YYYY-MM-dd HH:mm:ss';'", 21 | "YYYY-MM-dd'T'HH:mm:ssZ'; '", 22 | "YYYY-MM-dd'T'HH:mm:ssZ';'", 23 | "YYYY-MM-dd'T'HH:mm:ss'.00Z; '", 24 | "YYYY-MM-dd'T'HH:mm:ss'.00Z;'" ] 25 | target => "creation_date" 26 | remove_field => [ "domain_age" ] 27 | } 28 | } 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /so-logstash/files/freq/8503_postprocess_freq_analysis_bro_http.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Email: justin@hasecuritysolution.com 4 | # Last Update: 7/22/2017 5 | 6 | filter { 7 | if [type] == "bro_http" { 8 | if [virtual_host] and [virtual_host_length] > 5 { 9 | mutate { 10 | add_field => { "freq_virtual_host" => "%{virtual_host}"} 11 | } 12 | mutate { 13 | gsub => [ "freq_virtual_host", "\W", "" ] 14 | } 15 | rest { 16 | request => { 17 | url => "http://freqserver:10004/measure/%{freq_virtual_host}" 18 | } 19 | sprintf => true 20 | json => false 21 | target => "virtual_host_frequency_score" 22 | } 23 | mutate { 24 | remove_field => [ "virtual_host_domain" ] 25 | } 26 | if [virtual_host_frequency_score] { 27 | mutate { 28 | convert => [ "virtual_host_frequency_score", "float" ] 29 | add_field => { "frequency_scores" => "%{virtual_host_frequency_score}" } 30 | } 31 | } 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /so-logstash/pipeline/default.conf: -------------------------------------------------------------------------------- 1 | input { 2 | beats { 3 | port => 5044 4 | } 5 | } 6 | 7 | output { 8 | stdout { 9 | codec => rubydebug 10 | } 11 | } 12 | 13 | -------------------------------------------------------------------------------- /so-mysql/healthcheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved. 3 | # 4 | # This program is free software; you can redistribute it and/or modify 5 | # it under the terms of the GNU General Public License as published by 6 | # the Free Software Foundation; version 2 of the License. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA 16 | 17 | # The mysql-init-complete file is touched by the entrypoint file before the 18 | # main server process is started 19 | if [ -f /mysql-init-complete ]; # The entrypoint script touches this file 20 | then # Ping server to see if it is ready 21 | mysqladmin --defaults-extra-file=/healthcheck.cnf ping 22 | else # Initialization still in progress 23 | exit 1 24 | fi 25 | -------------------------------------------------------------------------------- /so-navigator/Dockerfile: -------------------------------------------------------------------------------- 1 | # Build stage 2 | 3 | FROM node:alpine 4 | 5 | WORKDIR /nav-app/ 6 | 7 | # copy over needed files 8 | COPY . ./ 9 | 10 | # install packages and build 11 | RUN npm install --unsafe-perm 12 | 13 | EXPOSE 4200 14 | 15 | CMD npm start 16 | 17 | # docker run --mount type=bind,source="$(pwd)/nav_layer_playbook.json",target=/nav-app/src/assets/playbook.json -dp 4200:4200 so-navigator:1.0 18 | -------------------------------------------------------------------------------- /so-navigator/e2e/app.e2e-spec.ts: -------------------------------------------------------------------------------- 1 | import { AppPage } from './app.po'; 2 | 3 | describe('nav-app App', () => { 4 | let page: AppPage; 5 | 6 | beforeEach(() => { 7 | page = new AppPage(); 8 | }); 9 | 10 | it('should display welcome message', () => { 11 | page.navigateTo(); 12 | expect(page.getParagraphText()).toEqual('Welcome to app!'); 13 | }); 14 | }); 15 | -------------------------------------------------------------------------------- /so-navigator/e2e/app.po.ts: -------------------------------------------------------------------------------- 1 | import { browser, by, element } from 'protractor'; 2 | 3 | export class AppPage { 4 | navigateTo() { 5 | return browser.get('/'); 6 | } 7 | 8 | getParagraphText() { 9 | return element(by.css('app-root h1')).getText(); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /so-navigator/e2e/tsconfig.e2e.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "../out-tsc/e2e", 5 | "baseUrl": "./", 6 | "module": "commonjs", 7 | "target": "es5", 8 | "types": [ 9 | "jasmine", 10 | "jasminewd2", 11 | "node" 12 | ] 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /so-navigator/karma.conf.js: -------------------------------------------------------------------------------- 1 | // Karma configuration file, see link for more information 2 | // https://karma-runner.github.io/1.0/config/configuration-file.html 3 | 4 | module.exports = function (config) { 5 | config.set({ 6 | basePath: '', 7 | frameworks: ['jasmine', '@angular-devkit/build-angular'], 8 | plugins: [ 9 | require('karma-jasmine'), 10 | require('karma-chrome-launcher'), 11 | require('karma-jasmine-html-reporter'), 12 | require('karma-coverage-istanbul-reporter'), 13 | require('@angular-devkit/build-angular/plugins/karma') 14 | ], 15 | client:{ 16 | clearContext: false // leave Jasmine Spec Runner output visible in browser 17 | }, 18 | coverageIstanbulReporter: { 19 | dir: require('path').join(__dirname, 'coverage'), reports: [ 'html', 'lcovonly' ], 20 | fixWebpackSourcePaths: true 21 | }, 22 | 23 | reporters: ['progress', 'kjhtml'], 24 | port: 9876, 25 | colors: true, 26 | logLevel: config.LOG_INFO, 27 | autoWatch: true, 28 | browsers: ['Chrome'], 29 | singleRun: false, 30 | webpack: { node: { fs: 'empty', } } //https://github.com/angular/angular-cli/issues/8357 31 | }); 32 | }; 33 | -------------------------------------------------------------------------------- /so-navigator/patch-webpack.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const f = 'node_modules/@angular-devkit/build-angular/src/angular-cli-files/models/webpack-configs/browser.js'; 3 | 4 | fs.readFile(f, 'utf8', function (err,data) { 5 | if (err) { 6 | return console.log(err); 7 | } 8 | let result = data.replace(/node: false/g, "node: {crypto: true, stream: true, fs: 'empty', net: 'empty'}"); 9 | 10 | fs.writeFile(f, result, 'utf8', function (err) { 11 | if (err) return console.log(err); 12 | }); 13 | }); -------------------------------------------------------------------------------- /so-navigator/protractor.conf.js: -------------------------------------------------------------------------------- 1 | // Protractor configuration file, see link for more information 2 | // https://github.com/angular/protractor/blob/master/lib/config.ts 3 | 4 | const { SpecReporter } = require('jasmine-spec-reporter'); 5 | 6 | exports.config = { 7 | allScriptsTimeout: 11000, 8 | specs: [ 9 | './e2e/**/*.e2e-spec.ts' 10 | ], 11 | capabilities: { 12 | 'browserName': 'chrome' 13 | }, 14 | directConnect: true, 15 | baseUrl: 'http://localhost:4200/', 16 | framework: 'jasmine', 17 | jasmineNodeOpts: { 18 | showColors: true, 19 | defaultTimeoutInterval: 30000, 20 | print: function() {} 21 | }, 22 | onPrepare() { 23 | require('ts-node').register({ 24 | project: 'e2e/tsconfig.e2e.json' 25 | }); 26 | jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } })); 27 | } 28 | }; 29 | -------------------------------------------------------------------------------- /so-navigator/src/app/app.component.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | 8 | MITRE ATT&CKTM Navigator v{{nav_version}} 9 | 10 | 11 | 14 | -------------------------------------------------------------------------------- /so-navigator/src/app/app.component.scss: -------------------------------------------------------------------------------- 1 | tabs { 2 | // font-family: 'Roboto Mono', monospace; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; 4 | font-size: 9pt; 5 | 6 | } 7 | -------------------------------------------------------------------------------- /so-navigator/src/app/app.component.spec.ts: -------------------------------------------------------------------------------- 1 | import { TestBed, async } from '@angular/core/testing'; 2 | import { AppComponent } from './app.component'; 3 | describe('AppComponent', () => { 4 | beforeEach(async(() => { 5 | TestBed.configureTestingModule({ 6 | declarations: [ 7 | AppComponent 8 | ], 9 | }).compileComponents(); 10 | })); 11 | it('should create the app', async(() => { 12 | const fixture = TestBed.createComponent(AppComponent); 13 | const app = fixture.debugElement.componentInstance; 14 | expect(app).toBeTruthy(); 15 | })); 16 | it(`should have as title 'app'`, async(() => { 17 | const fixture = TestBed.createComponent(AppComponent); 18 | const app = fixture.debugElement.componentInstance; 19 | expect(app.title).toEqual('app'); 20 | })); 21 | it('should render title in a h1 tag', async(() => { 22 | const fixture = TestBed.createComponent(AppComponent); 23 | fixture.detectChanges(); 24 | const compiled = fixture.debugElement.nativeElement; 25 | expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!'); 26 | })); 27 | }); 28 | -------------------------------------------------------------------------------- /so-navigator/src/app/app.component.ts: -------------------------------------------------------------------------------- 1 | import { Component, ViewChild, DoCheck, HostListener } from '@angular/core'; 2 | import {DataService, Technique} from './data.service'; //import the DataService component so we can use it 3 | import {TabsComponent} from './tabs/tabs.component'; 4 | import * as globals from "./globals"; 5 | 6 | @Component({ 7 | selector: 'app-root', 8 | templateUrl: './app.component.html', 9 | styleUrls: ['./app.component.scss'], 10 | providers: [DataService] //add this provider to make sure we know we need DataService for this component 11 | }) 12 | export class AppComponent { 13 | @ViewChild(TabsComponent, {static: false}) tabsComponent; 14 | 15 | nav_version: string = globals.nav_version; 16 | 17 | @HostListener('window:beforeunload', ['$event']) 18 | promptNavAway($event) { 19 | //this text only shows in the data, not visible to user as far as I can tell 20 | //however, if it's not included the window doesn't open. 21 | $event.returnValue='Are you sure you want to navigate away? Your data may be lost!'; 22 | } 23 | 24 | constructor(private dataService: DataService) { 25 | Array.prototype.includes = function(value): boolean { 26 | // console.log("checking include") 27 | for (let i = 0; i < this.length; i++) { 28 | if (this[i] === value) return true 29 | } 30 | return false; 31 | } 32 | } 33 | 34 | 35 | } 36 | -------------------------------------------------------------------------------- /so-navigator/src/app/config.service.spec.ts: -------------------------------------------------------------------------------- 1 | import { TestBed, inject } from '@angular/core/testing'; 2 | 3 | import { ConfigService } from './config.service'; 4 | 5 | describe('ConfigService', () => { 6 | beforeEach(() => { 7 | TestBed.configureTestingModule({ 8 | providers: [ConfigService] 9 | }); 10 | }); 11 | 12 | it('should be created', inject([ConfigService], (service: ConfigService) => { 13 | expect(service).toBeTruthy(); 14 | })); 15 | }); 16 | -------------------------------------------------------------------------------- /so-navigator/src/app/data.service.spec.ts: -------------------------------------------------------------------------------- 1 | import { TestBed, inject } from '@angular/core/testing'; 2 | 3 | import { DataService } from './data.service'; 4 | 5 | describe('DataService', () => { 6 | beforeEach(() => { 7 | TestBed.configureTestingModule({ 8 | providers: [DataService] 9 | }); 10 | }); 11 | 12 | it('should be created', inject([DataService], (service: DataService) => { 13 | expect(service).toBeTruthy(); 14 | })); 15 | }); 16 | -------------------------------------------------------------------------------- /so-navigator/src/app/exporter/exporter.component.scss: -------------------------------------------------------------------------------- 1 | .svgcontainer { 2 | overflow-x: auto 3 | } 4 | 5 | .dropdown-container { 6 | padding: 10px; 7 | ul { 8 | padding-left: 0; 9 | li { 10 | list-style: none; 11 | text-align: left; 12 | input.has-suffix { 13 | text-align: right; 14 | &::-webkit-inner-spin-button, 15 | &::-webkit-outer-spin-button { 16 | display: none 17 | } 18 | -moz-appearance: textfield; 19 | } 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /so-navigator/src/app/exporter/exporter.component.spec.ts: -------------------------------------------------------------------------------- 1 | import { async, ComponentFixture, TestBed } from '@angular/core/testing'; 2 | 3 | import { ExporterComponent } from './exporter.component'; 4 | 5 | describe('ExporterComponent', () => { 6 | let component: ExporterComponent; 7 | let fixture: ComponentFixture; 8 | 9 | beforeEach(async(() => { 10 | TestBed.configureTestingModule({ 11 | declarations: [ ExporterComponent ] 12 | }) 13 | .compileComponents(); 14 | })); 15 | 16 | beforeEach(() => { 17 | fixture = TestBed.createComponent(ExporterComponent); 18 | component = fixture.componentInstance; 19 | fixture.detectChanges(); 20 | }); 21 | 22 | it('should create', () => { 23 | expect(component).toBeTruthy(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /so-navigator/src/app/globals.ts: -------------------------------------------------------------------------------- 1 | // ___ _ ___ ___ _ _ __ ___ ___ ___ _ ___ _ ___ ___ 2 | // / __| | / _ \| _ ) /_\ | | \ \ / /_\ | _ \_ _| /_\ | _ ) | | __/ __| 3 | // | (_ | |_| (_) | _ \/ _ \| |__ \ V / _ \| /| | / _ \| _ \ |__| _|\__ \ 4 | // \___|____\___/|___/_/ \_\____| \_/_/ \_\_|_\___/_/ \_\___/____|___|___/ 5 | // 6 | 'use strict'; 7 | 8 | export const nav_version: string="2.2.1" 9 | export const layer_version: string="2.1" 10 | -------------------------------------------------------------------------------- /so-navigator/src/app/help/help.component.scss: -------------------------------------------------------------------------------- 1 | @import "../../colors.scss"; 2 | 3 | .help { 4 | font-size: 11pt; 5 | } 6 | 7 | .content { 8 | padding: 5px; 9 | } 10 | 11 | // table of contents 12 | .toc { 13 | list-style: none; 14 | ul { 15 | list-style: none; 16 | } 17 | } 18 | 19 | code { 20 | color: black; 21 | border: 1px solid $panel-dark; 22 | padding: 1px 2px; 23 | } 24 | -------------------------------------------------------------------------------- /so-navigator/src/app/help/help.component.spec.ts: -------------------------------------------------------------------------------- 1 | import { async, ComponentFixture, TestBed } from '@angular/core/testing'; 2 | 3 | import { HelpComponent } from './help.component'; 4 | 5 | describe('HelpComponent', () => { 6 | let component: HelpComponent; 7 | let fixture: ComponentFixture; 8 | 9 | beforeEach(async(() => { 10 | TestBed.configureTestingModule({ 11 | declarations: [ HelpComponent ] 12 | }) 13 | .compileComponents(); 14 | })); 15 | 16 | beforeEach(() => { 17 | fixture = TestBed.createComponent(HelpComponent); 18 | component = fixture.componentInstance; 19 | fixture.detectChanges(); 20 | }); 21 | 22 | it('should create', () => { 23 | expect(component).toBeTruthy(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /so-navigator/src/app/help/help.component.ts: -------------------------------------------------------------------------------- 1 | import { Component, OnInit } from '@angular/core'; 2 | import * as globals from "../globals"; 3 | @Component({ 4 | selector: 'help', 5 | templateUrl: './help.component.html', 6 | styleUrls: ['./help.component.scss'] 7 | }) 8 | export class HelpComponent { 9 | nav_version: string = globals.nav_version; 10 | constructor() { } 11 | 12 | } 13 | -------------------------------------------------------------------------------- /so-navigator/src/app/tab/tab.component.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 |
5 | -------------------------------------------------------------------------------- /so-navigator/src/app/tab/tab.component.scss: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/app/tab/tab.component.scss -------------------------------------------------------------------------------- /so-navigator/src/app/tab/tab.component.spec.ts: -------------------------------------------------------------------------------- 1 | import { async, ComponentFixture, TestBed } from '@angular/core/testing'; 2 | 3 | import { TabComponent } from './tab.component'; 4 | 5 | describe('TabComponent', () => { 6 | let component: TabComponent; 7 | let fixture: ComponentFixture; 8 | 9 | beforeEach(async(() => { 10 | TestBed.configureTestingModule({ 11 | declarations: [ TabComponent ] 12 | }) 13 | .compileComponents(); 14 | })); 15 | 16 | beforeEach(() => { 17 | fixture = TestBed.createComponent(TabComponent); 18 | component = fixture.componentInstance; 19 | fixture.detectChanges(); 20 | }); 21 | 22 | it('should create', () => { 23 | expect(component).toBeTruthy(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /so-navigator/src/app/tab/tab.component.ts: -------------------------------------------------------------------------------- 1 | // https://embed.plnkr.co/wWKnXzpm8V31wlvu64od/s 2 | import { Component, Input } from '@angular/core'; 3 | 4 | @Component({ 5 | selector: 'tab', 6 | templateUrl: './tab.component.html', 7 | styleUrls: ['./tab.component.scss'] 8 | }) 9 | export class TabComponent { 10 | @Input('tabTitle') title: string; 11 | @Input() active = false; 12 | @Input() isCloseable = false; 13 | @Input() template; 14 | @Input() dataContext; 15 | @Input() showScoreVariables = false; 16 | @Input() isDataTable: boolean; 17 | 18 | } 19 | -------------------------------------------------------------------------------- /so-navigator/src/app/tabs/dynamic-tabs.directive.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This directive is used as an anchor to get access 3 | * to the ViewContainerRef which here is exposed via 4 | * the public member `viewContainer` 5 | * 6 | * Theres an ALTERNATIVE to explicitly using the anchor directive. 7 | * Read in the blog post 8 | */ 9 | 10 | import { Directive, ViewContainerRef } from '@angular/core'; 11 | 12 | @Directive({ 13 | selector: '[dynamic-tabs]' 14 | }) 15 | export class DynamicTabsDirective { 16 | constructor(public viewContainer: ViewContainerRef){} 17 | } 18 | -------------------------------------------------------------------------------- /so-navigator/src/app/tabs/tabs.component.spec.ts: -------------------------------------------------------------------------------- 1 | import { async, ComponentFixture, TestBed } from '@angular/core/testing'; 2 | 3 | import { TabsComponent } from './tabs.component'; 4 | 5 | describe('TabsComponent', () => { 6 | let component: TabsComponent; 7 | let fixture: ComponentFixture; 8 | 9 | beforeEach(async(() => { 10 | TestBed.configureTestingModule({ 11 | declarations: [ TabsComponent ] 12 | }) 13 | .compileComponents(); 14 | })); 15 | 16 | beforeEach(() => { 17 | fixture = TestBed.createComponent(TabsComponent); 18 | component = fixture.componentInstance; 19 | fixture.detectChanges(); 20 | }); 21 | 22 | it('should create', () => { 23 | expect(component).toBeTruthy(); 24 | }); 25 | }); 26 | -------------------------------------------------------------------------------- /so-navigator/src/app/viewmodels.service.spec.ts: -------------------------------------------------------------------------------- 1 | import { TestBed, inject } from '@angular/core/testing'; 2 | 3 | import { ViewModelsService } from './viewmodels.service'; 4 | 5 | describe('ViewmodelsService', () => { 6 | beforeEach(() => { 7 | TestBed.configureTestingModule({ 8 | providers: [ViewModelsService] 9 | }); 10 | }); 11 | 12 | it('should be created', inject([ViewModelsService], (service: ViewModelsService) => { 13 | expect(service).toBeTruthy(); 14 | })); 15 | }); 16 | -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv1.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv1_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv1_1.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv1_2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv1_2.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv1_3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv1_3.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv2.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/NavigatorLayerFileFormatv2_1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/NavigatorLayerFileFormatv2_1.pdf -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/baseline-grid_on-24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_camera_alt_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_check_box_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_check_box_outline_blank_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_clear_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_clear_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_close_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_color_lens_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_content_copy_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_description_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_done_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_done_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_file_download_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_file_upload_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_file_upload_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_filter_list_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_format_color_fill_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_format_color_fill_black_nobottom_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_format_color_fill_gray_nobottom_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_format_size_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_insert_chart_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_insert_chart_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_insert_comment_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_insert_comment_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_keyboard_arrow_down_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_keyboard_arrow_right_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_keyboard_arrow_up_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_layers_clear_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_layers_clear_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_lock_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_lock_open_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_palette_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_photo_size_select_large_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_playlist_add_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_playlist_add_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_remove_circle_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_save_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_save_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_search_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_search_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_alphabetically_ascending_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | A 7 | Z 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_alphabetically_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | A 7 | 8 | 9 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_alphabetically_descending_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | Z 7 | A 8 | 9 | 10 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_numerically_ascending_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | 1 7 | 2 8 | 9 | 10 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_numerically_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | # 7 | 8 | 9 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_sort_numerically_descending_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | 2 7 | 1 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_texture_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_texture_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_view_large_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_view_list_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_view_list_grey_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_view_medium_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_view_small_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_visibility_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_visibility_gray_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /so-navigator/src/assets/icons/ic_visibility_off_black_24px.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /so-navigator/src/assets/image_scoreVariableExample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/assets/image_scoreVariableExample.png -------------------------------------------------------------------------------- /so-navigator/src/colors.scss: -------------------------------------------------------------------------------- 1 | $table-background-color: rgb(255, 255, 255); 2 | $column-hover-color: rgb(235, 235, 235); 3 | $cell-border-color: darken($column-hover-color, 20%); 4 | $cell-highlight-color: rgb(96, 197, 255); 5 | $hover-cell-font-color: rgb(0, 0, 0); 6 | $panel-dark: #ddd; 7 | $panel-light: lighten($panel-dark, 8%); 8 | $tab-text-color: #555; 9 | $button-dark: #b8b8b8; -------------------------------------------------------------------------------- /so-navigator/src/environments/environment.prod.ts: -------------------------------------------------------------------------------- 1 | export const environment = { 2 | production: true 3 | }; 4 | -------------------------------------------------------------------------------- /so-navigator/src/environments/environment.ts: -------------------------------------------------------------------------------- 1 | // The file contents for the current environment will overwrite these during build. 2 | // The build system defaults to the dev environment which uses `environment.ts`, but if you do 3 | // `ng build --env=prod` then `environment.prod.ts` will be used instead. 4 | // The list of which env maps to which file can be found in `.angular-cli.json`. 5 | 6 | export const environment = { 7 | production: false 8 | }; 9 | -------------------------------------------------------------------------------- /so-navigator/src/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/favicon.ico -------------------------------------------------------------------------------- /so-navigator/src/faviconO.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-navigator/src/faviconO.ico -------------------------------------------------------------------------------- /so-navigator/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | ATT&CK™ Navigator 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /so-navigator/src/main.ts: -------------------------------------------------------------------------------- 1 | import { enableProdMode } from '@angular/core'; 2 | import { platformBrowserDynamic } from '@angular/platform-browser-dynamic'; 3 | 4 | import { AppModule } from './app/app.module'; 5 | import { environment } from './environments/environment'; 6 | 7 | if (environment.production) { 8 | enableProdMode(); 9 | } 10 | 11 | platformBrowserDynamic().bootstrapModule(AppModule) 12 | .catch(err => console.log(err)); 13 | -------------------------------------------------------------------------------- /so-navigator/src/test.ts: -------------------------------------------------------------------------------- 1 | // This file is required by karma.conf.js and loads recursively all the .spec and framework files 2 | 3 | import 'zone.js/dist/long-stack-trace-zone'; 4 | import 'zone.js/dist/proxy.js'; 5 | import 'zone.js/dist/sync-test'; 6 | import 'zone.js/dist/jasmine-patch'; 7 | import 'zone.js/dist/async-test'; 8 | import 'zone.js/dist/fake-async-test'; 9 | import { getTestBed } from '@angular/core/testing'; 10 | import { 11 | BrowserDynamicTestingModule, 12 | platformBrowserDynamicTesting 13 | } from '@angular/platform-browser-dynamic/testing'; 14 | 15 | // Unfortunately there's no typing for the `__karma__` variable. Just declare it as any. 16 | declare const __karma__: any; 17 | declare const require: any; 18 | 19 | // Prevent Karma from running prematurely. 20 | __karma__.loaded = function () {}; 21 | 22 | // First, initialize the Angular testing environment. 23 | getTestBed().initTestEnvironment( 24 | BrowserDynamicTestingModule, 25 | platformBrowserDynamicTesting() 26 | ); 27 | // Then we find all the tests. 28 | const context = require.context('./', true, /\.spec\.ts$/); 29 | // And load the modules. 30 | context.keys().map(context); 31 | // Finally, start Karma to run the tests. 32 | __karma__.start(); 33 | -------------------------------------------------------------------------------- /so-navigator/src/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "allowJs": true, 5 | "outDir": "../out-tsc/app", 6 | "baseUrl": "./", 7 | "module": "es2015", 8 | "types": [] 9 | }, 10 | "exclude": [ 11 | "test.ts", 12 | "**/*.spec.ts" 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /so-navigator/src/tsconfig.spec.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "../out-tsc/spec", 5 | "baseUrl": "./", 6 | "module": "commonjs", 7 | "target": "es5", 8 | "types": [ 9 | "jasmine", 10 | "node" 11 | ] 12 | }, 13 | "files": [ 14 | "test.ts", 15 | "polyfills.ts" 16 | ], 17 | "include": [ 18 | "**/*.spec.ts", 19 | "**/*.d.ts" 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /so-navigator/src/typings.d.ts: -------------------------------------------------------------------------------- 1 | /* SystemJS module definition */ 2 | declare var module: NodeModule; 3 | interface NodeModule { 4 | id: string; 5 | } 6 | -------------------------------------------------------------------------------- /so-navigator/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": false, 3 | "compilerOptions": { 4 | "outDir": "./dist/out-tsc", 5 | "sourceMap": true, 6 | "declaration": false, 7 | "moduleResolution": "node", 8 | "emitDecoratorMetadata": true, 9 | "experimentalDecorators": true, 10 | "target": "es5", 11 | "typeRoots": [ 12 | "node_modules/@types" 13 | ], 14 | "lib": [ 15 | "es2017", 16 | "dom" 17 | ], 18 | "module": "es2015", 19 | "baseUrl": "./", 20 | } 21 | } -------------------------------------------------------------------------------- /so-nodered/docker-make.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export NODE_RED_VERSION=$(grep -oE "\"node-red\": \"(\w*.\w*.\w*.\w*.\w*.)" package.json | cut -d\" -f4) 3 | 4 | echo "#########################################################################" 5 | echo "node-red version: ${NODE_RED_VERSION}" 6 | echo "#########################################################################" 7 | 8 | docker build --no-cache \ 9 | --build-arg ARCH=amd64 \ 10 | --build-arg NODE_VERSION=12 \ 11 | --build-arg NODE_RED_VERSION=${NODE_RED_VERSION} \ 12 | --build-arg OS=alpine \ 13 | --build-arg BUILD_DATE="$(date +"%Y-%m-%dT%H:%M:%SZ")" \ 14 | --build-arg TAG_SUFFIX=default \ 15 | --file Dockerfile \ 16 | --tag soshybridunter/so-nodered:HH1.2.1 . 17 | -------------------------------------------------------------------------------- /so-nodered/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-red-docker", 3 | "version": "1.0.4", 4 | "description": "Low-code programming for event-driven applications", 5 | "homepage": "http://nodered.org", 6 | "license": "Apache-2.0", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/node-red/node-red-docker.git" 10 | }, 11 | "main": "node_modules/node-red/red/red.js", 12 | "scripts": { 13 | "start": "node $NODE_OPTIONS node_modules/node-red/red.js $FLOWS" 14 | }, 15 | "contributors": [ 16 | { 17 | "name": "Dave Conway-Jones" 18 | }, 19 | { 20 | "name": "Nick O'Leary" 21 | }, 22 | { 23 | "name": "James Thomas" 24 | }, 25 | { 26 | "name": "Raymond Mouthaan" 27 | } 28 | ], 29 | "dependencies": { 30 | "node-red": "1.0.4" 31 | }, 32 | "engines": { 33 | "node": ">=10" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /so-nodered/scripts/install_devtools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # Installing Devtools 5 | if [[ ${TAG_SUFFIX} != "minimal" ]]; then 6 | echo "Installing devtools" 7 | apk add --no-cache --virtual devtools build-base linux-headers udev python python3 8 | else 9 | echo "Skip installing devtools" 10 | fi 11 | -------------------------------------------------------------------------------- /so-nodered/scripts/remove_native_gpio.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # Remove native GPIO node if exists 5 | if [[ -d "/usr/src/node-red/node_modules/@node-red/nodes/core/hardware" ]]; then 6 | echo "Removing native GPIO node" 7 | rm -r /usr/src/node-red/node_modules/@node-red/nodes/core/hardware 8 | else 9 | echo "Skip removing native GPIO node" 10 | fi 11 | -------------------------------------------------------------------------------- /so-playbook/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM redmine:4-passenger 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="Playbook running in Docker container for use with Security Onion" 5 | 6 | WORKDIR /usr/src/redmine 7 | 8 | RUN apt-get update && apt-get install patch && rm -rf /var/lib/apt/lists/* 9 | 10 | ADD playbook/plugin/redmine_playbook.tar.bz2 /usr/src/redmine/plugins 11 | 12 | ADD playbook/circle_theme.tar.bz2 /usr/src/redmine/public/themes 13 | 14 | #ADD playbook/issues_controller.patch /tmp/issues_controller.patch 15 | 16 | RUN git clone https://github.com/suer/redmine_webhook.git /usr/src/redmine/plugins/redmine_webhook 17 | 18 | #RUN git clone https://github.com/serpi90/redmine_webhook.git /usr/src/redmine/plugins/redmine_webhook 19 | 20 | #RUN patch -p1 -i /tmp/issues_controller.patch 21 | 22 | COPY playbook/passenger-nginx-config-template.erb /passenger-nginx-config-template.erb 23 | 24 | CMD ["passenger", "start", "--nginx-config-template", "/passenger-nginx-config-template.erb"] 25 | -------------------------------------------------------------------------------- /so-playbook/playbook/circle_theme.tar.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-playbook/playbook/circle_theme.tar.bz2 -------------------------------------------------------------------------------- /so-playbook/playbook/issues_controller.patch: -------------------------------------------------------------------------------- 1 | # In order to provide the same hook chain as for normal updates, a 2 | # hook is needed that is called after an issue is saved in bulk_update. 3 | --- a/app/controllers/issues_controller.rb 2017-03-23 13:49:20.828550346 +0100 4 | +++ b/app/controllers/issues_controller.rb 2017-03-23 13:49:52.909691437 +0100 5 | @@ -323,6 +323,7 @@ 6 | call_hook(:controller_issues_bulk_edit_before_save, { :params => params, :issue => issue }) 7 | if issue.save 8 | saved_issues << issue 9 | + call_hook(:controller_issues_bulk_edit_after_save, { :params => params, :issue => issue, :journal => journal }) 10 | else 11 | unsaved_issues << orig_issue 12 | end 13 | -------------------------------------------------------------------------------- /so-playbook/playbook/passenger-nginx-config-template.erb: -------------------------------------------------------------------------------- 1 | <%= include_passenger_internal_template('global.erb') %> 2 | 3 | worker_processes 1; 4 | events { 5 | worker_connections 4096; 6 | } 7 | 8 | http { 9 | <%= include_passenger_internal_template('http.erb', 4) %> 10 | 11 | default_type application/octet-stream; 12 | types_hash_max_size 2048; 13 | server_names_hash_bucket_size 64; 14 | client_max_body_size 1024m; 15 | access_log off; 16 | keepalive_timeout 60; 17 | underscores_in_headers on; 18 | gzip on; 19 | gzip_comp_level 3; 20 | gzip_min_length 150; 21 | gzip_proxied any; 22 | gzip_types text/plain text/css text/json text/javascript 23 | application/javascript application/x-javascript application/json 24 | application/rss+xml application/vnd.ms-fontobject application/x-font-ttf 25 | application/xml font/opentype image/svg+xml text/xml; 26 | 27 | server { 28 | server_name _; 29 | listen 0.0.0.0:3000; 30 | root '/usr/src/redmine/public'; 31 | passenger_app_env 'production'; 32 | passenger_spawn_method 'smart'; 33 | passenger_load_shell_envvars off; 34 | 35 | location ~ ^/playbook(/.*|$) { 36 | alias /usr/src/redmine/public$1; 37 | passenger_base_uri /playbook; 38 | passenger_app_root /usr/src/redmine; 39 | passenger_document_root /usr/src/redmine/public; 40 | passenger_enabled on; 41 | } 42 | } 43 | 44 | passenger_pre_start http://0.0.0.0:3000/; 45 | } 46 | -------------------------------------------------------------------------------- /so-playbook/playbook/plugin/redmine_playbook.tar.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-playbook/playbook/plugin/redmine_playbook.tar.bz2 -------------------------------------------------------------------------------- /so-redis/files/docker-entrypoint.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-redis/files/docker-entrypoint.sh -------------------------------------------------------------------------------- /so-soctopus/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .idea -------------------------------------------------------------------------------- /so-soctopus/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer="Security Onion Solutions, LLC" 4 | LABEL description="API for automating SOC-related functions" 5 | 6 | RUN yum update -y && yum -y install epel-release 7 | RUN yum -y install https://centos7.iuscommunity.org/ius-release-el7.rpm 8 | #RUN rpm --import /etc/pki/rpm-gpg/IUS-COMMUNITY-GPG-KEY 9 | RUN yum -y makecache && yum -y install python3 python3-pip git && pip3 install --upgrade pip && yum clean all 10 | RUN mkdir -p /SOCtopus 11 | RUN mkdir -p /SOCtopus/templates 12 | RUN mkdir -p /SOCtopus/playbook 13 | RUN mkdir -p /var/log/SOCtopus 14 | WORKDIR /SOCtopus 15 | COPY ./requirements.txt /SOCtopus/ 16 | RUN pip3 install -r requirements.txt 17 | 18 | COPY ./so-soctopus /SOCtopus 19 | ENTRYPOINT ["python3", "SOCtopus.py"] 20 | 21 | -------------------------------------------------------------------------------- /so-soctopus/requirements.txt: -------------------------------------------------------------------------------- 1 | urllib3>=1.25,<1.26 2 | certifi>=2019.11 3 | flask-bootstrap>=3.3.7.0 4 | Flask>=1.1,<1.2 5 | Flask-WTF>=0.14,<0.15 6 | grr-api-client>=3.3,<3.3.0.post8 7 | jsonpickle>=1.2 8 | pymisp>=2.4,<2.5 9 | requests>=2.23.0,<2.24 10 | rt>=2.0,<2.1 11 | ruamel.yaml>=0.16,<0.17 12 | sigmatools>=0.16,<0.17 13 | thehive4py>=1.6,<1.7 14 | 15 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/SOCtopus.conf: -------------------------------------------------------------------------------- 1 | [es] 2 | es_url = YOURESURL 3 | es_user = YOURESUSER 4 | es_pass = YOURESPASS 5 | es_index_pattern = so-* 6 | es_verifycert = no 7 | 8 | 9 | [cortex] 10 | auto_analyze_alerts = no 11 | cortex_url = YOURCORTEXURL 12 | cortex_key = YOURCORTEXKEY 13 | supported_analyzers = Urlscan_io_Search,CERTatPassiveDNS 14 | 15 | [fir] 16 | fir_url = YOURFIRURL 17 | fir_token = YOURFIRTOKEN 18 | fir_actor = 3 19 | fir_category = 3 20 | fir_confidentiality = 1 21 | fir_detection = 2 22 | fir_plan = 8 23 | fir_severity = 4 24 | fir_verifycert = no 25 | 26 | [grr] 27 | grr_url = YOURGRRURL 28 | grr_user = YOURGRRUSER 29 | grr_pass = YOURGRRPASS 30 | 31 | [hive] 32 | hive_url = YOURHIVEURL 33 | hive_key = YOURHIVEKEY 34 | hive_tlp = 3 35 | hive_verifycert = no 36 | 37 | [misp] 38 | misp_url = YOURMISPURL 39 | misp_key = YOURMISPKEY 40 | misp_verifycert = no 41 | distrib = 0 42 | threat = 4 43 | analysis = 0 44 | 45 | [rtir] 46 | rtir_url = YOURRTIRURL 47 | rtir_api = REST/1.0/ 48 | rtir_user = YOURRTIRUSER 49 | rtir_pass = YOURRTIRPASS 50 | rtir_queue = Incidents 51 | rtir_creator = root 52 | rtir_verifycert = no 53 | 54 | [slack] 55 | slack_url = YOURSLACKWORKSPACE 56 | slack_webhook = YOURSLACKWEBHOOK 57 | 58 | [playbook] 59 | playbook_verifycert = no 60 | playbook_url = YOURPLAYBOOKURL 61 | playbook_key = YOURPLAYBOOKKEY 62 | 63 | [log] 64 | logfile = /tmp/soctopus.log 65 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/config.py: -------------------------------------------------------------------------------- 1 | # Base config 2 | import configparser 3 | 4 | parser = configparser.ConfigParser() 5 | parser.read('SOCtopus.conf') 6 | 7 | es_index = parser.get('es', 'es_index_pattern', fallback='so-*') 8 | 9 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/forms.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template 2 | from flask_wtf import FlaskForm 3 | from wtforms import StringField 4 | 5 | 6 | class DefaultForm(FlaskForm): 7 | esindex = StringField('esindex') 8 | esid = StringField('esid') 9 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/grr.py: -------------------------------------------------------------------------------- 1 | from grr_api_client import api 2 | import json 3 | import requests 4 | from requests.auth import HTTPBasicAuth 5 | 6 | def listProcessFlow(client_id,grr_url,headers,cookies,grr_user,grr_pass): 7 | data = { 8 | "flow": { 9 | "args": { 10 | "@type": "type.googleapis.com/ListProcessesArgs" 11 | }, 12 | "name": "ListProcesses" 13 | } 14 | } 15 | 16 | response = requests.post(grr_url + "/api/v2/clients/" + client_id + "/flows", 17 | headers=headers, data=json.dumps(data), 18 | cookies=cookies, auth=HTTPBasicAuth(grr_user, grr_pass)) 19 | 20 | decoded_response = response.content.decode("utf-8") 21 | result = decoded_response.lstrip(")]}'") 22 | flow_result = json.loads(result) 23 | flow_id = flow_result["flowId"] 24 | 25 | return flow_id 26 | 27 | 28 | def checkFlowStatus(client_id,grr_url,flow_id,headers,cookies,grr_user,grr_pass): 29 | response = requests.get(grr_url + "/api/clients/" + client_id + "/flows/" + flow_id, 30 | headers=headers, 31 | cookies=cookies, auth=HTTPBasicAuth(grr_user, grr_pass)) 32 | 33 | decoded_response = response.content.decode("utf-8") 34 | result = decoded_response.lstrip(")]}'") 35 | status_check = json.loads(result) 36 | status = str(status_check["value"]["state"]["value"].lower()) 37 | 38 | return status 39 | 40 | def downloadFlowResults(client_id,grr_url,flow_id,headers,cookies,grr_user,grr_pass): 41 | response = requests.get(grr_url + "/api/clients/" + client_id + "/flows/" + flow_id + "/exported-results/csv-zip", 42 | headers=headers, 43 | cookies=cookies, auth=HTTPBasicAuth(grr_user, grr_pass)) 44 | filepath = "/tmp/soctopus/" + client_id + ".zip" 45 | with open(filepath, "wb") as compressed_flow_results: 46 | compressed_flow_results.write(response.content) 47 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/playbook/securityonion-network.yml: -------------------------------------------------------------------------------- 1 | # Config file for network-based Sigma rules for use with Security Onion 2 | title: SO Network 3 | logsources: 4 | bro: 5 | category: dns 6 | index: logstash-bro-* 7 | firewall: 8 | category: firewall 9 | index: logstash-firewall-* 10 | ids: 11 | category: ids 12 | index: logstash-ids-* 13 | ids-snort: 14 | product: snort 15 | index: logstash-ids-* 16 | ids-suricata: 17 | product: suricata 18 | index: logstash-ids-* 19 | defaultindex: logstash-* 20 | fieldmappings: 21 | query: query 22 | answer: answers 23 | src_ip: source_ip 24 | src_port: source_port 25 | dst_ip: destination_ip 26 | dst_port: destination_port 27 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/playbook/sysmon.yml: -------------------------------------------------------------------------------- 1 | title: Conversion of generic rules into Sysmon 2 | order: 10 3 | logsources: 4 | process_creation: 5 | category: process_creation 6 | product: windows 7 | conditions: 8 | EventID: 1 9 | rewrite: 10 | product: windows 11 | service: sysmon 12 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/cancel.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 | 8 | 9 | 10 | {% endblock %} 11 | 12 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/hive.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 |
6 | {{ form.csrf_token }} 7 | 19 | 20 | 21 |
22 | 23 | 24 | {% endblock %} 25 | 26 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/postresult.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 | 6 | {% for key, value in result.items() %} 7 | 8 | 9 | 10 | 11 | {% endfor %} 12 |
{{ key }} {{ value }}
13 | 14 | 15 | {% endblock %} 16 | 17 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/result.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 | ES Result 6 | {% for key in result %} 7 | {% if '@version' not in key %} 8 | {{ key }}: {{ result[key] }}
9 | {% endif %} 10 | {% endfor %} 11 | {{ esindex }} 12 | 13 | 14 | {% endblock %} 15 | 16 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/strelka.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 | Strelka File Scan 6 | 7 | 13 | 14 | {% endblock %} 15 | -------------------------------------------------------------------------------- /so-soctopus/so-soctopus/templates/update_event.html: -------------------------------------------------------------------------------- 1 | {% extends "bootstrap/base.html" %} 2 | {% block content %} 3 | 4 | 5 | Update event 6 | 7 | 8 |
9 | {{ form.csrf_token }} 10 | {% for key, value in result.items() %} 11 | {{ key }}:
12 | {% endfor %} 13 | 14 |
15 | 16 | 17 | {% endblock %} 18 | 19 | -------------------------------------------------------------------------------- /so-steno/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="Google Stenographer running in a docker for use with Security Onion." 5 | 6 | RUN yum update -y && \ 7 | yum clean all 8 | 9 | # Install epel 10 | RUN yum -y install epel-release bash libpcap iproute tcpdump && yum clean all 11 | RUN yum -y install https://centos7.iuscommunity.org/ius-release.rpm 12 | RUN yum -y install snappy leveldb jq libaio libseccomp golang which openssl python36u python36u-pip 13 | RUN /usr/bin/pip3.6 install 14 | RUN yum -y erase epel-release && yum clean all && rm -rf /var/cache/yum 15 | 16 | # Install the steno package 17 | RUN rpm -i https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/stenographer_20180316/stenographer-0-1.20180316git57b88aa.el7.centos.x86_64.rpm 18 | RUN mkdir -p /opt/sensoroni 19 | 20 | # Copy the Binary in 21 | COPY files/sensoroni /opt/sensoroni/ 22 | RUN chmod +x /opt/sensoroni/sensoroni 23 | 24 | # setcap 25 | RUN setcap 'CAP_NET_RAW+ep CAP_NET_ADMIN+ep CAP_IPC_LOCK+ep CAP_SETGID+ep' /usr/bin/stenotype 26 | 27 | # Fix those perms.. big worm 28 | RUN mkdir -p /nsm/pcap/files \ 29 | && mkdir -p /nsm/pcap/index \ 30 | && chown -R 941:941 /nsm/pcap \ 31 | && chown -R 941:941 /opt/sensoroni \ 32 | && mkdir -p /etc/stenographer/certs \ 33 | && mkdir -p /var/log/stenographer \ 34 | && usermod -s /bin/bash stenographer 35 | 36 | 37 | # Copy over the entry script. 38 | COPY files/so-steno.sh /usr/local/sbin/so-steno.sh 39 | RUN chmod +x /usr/local/sbin/so-steno.sh 40 | 41 | ENTRYPOINT ["/usr/local/sbin/so-steno.sh"] 42 | -------------------------------------------------------------------------------- /so-steno/README.md: -------------------------------------------------------------------------------- 1 | Google Stenographer 2 | -------------------------------------------------------------------------------- /so-steno/files/sensoroni: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-steno/files/sensoroni -------------------------------------------------------------------------------- /so-steno/files/so-steno.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Generate the keys if they have not been already 4 | /usr/bin/stenokeys.sh 941 941 5 | 6 | chown -R 941:941 /etc/stenographer/certs 7 | 8 | #runuser -l stenographer -c '/opt/sensoroni/sensoroni -config /opt/sensoroni/sensoroni.json && /usr/bin/stenographer -syslog=false >> /var/log/stenographer/stenographer.log 2>&1' 9 | runuser -l stenographer -c '/opt/sensoroni/sensoroni -config /opt/sensoroni/sensoroni.json &' 10 | runuser -l stenographer -c '/usr/bin/stenographer -syslog=false >> /var/log/stenographer/stenographer.log 2>&1' 11 | -------------------------------------------------------------------------------- /so-strelka-backend/requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.7.1 2 | boltons==18.0.1 3 | esprima==4.0.1 4 | grpcio==1.19.0 5 | grpcio-tools==1.19.0 6 | html5lib==1.0.1 7 | inflection==0.3.1 8 | interruptingcow==0.8 9 | jsbeautifier==1.8.9 10 | libarchive-c==2.8 11 | lxml==4.3.0 12 | msoffcrypto-tool==4.10.1 13 | olefile==0.46 14 | oletools==0.53.1 15 | M2Crypto==0.33.0 16 | pdfminer.six==20181108 17 | pefile==2019.4.18 18 | pgpdump3==1.5.2 19 | pylzma==0.5.0 20 | pygments==2.3.1 21 | python-docx==0.8.10 22 | git+https://github.com/jshlbrd/python-entropy.git # v0.11 as of this freeze (package installed as 'entropy') 23 | python-magic==0.4.15 24 | pyyaml>=4.2b1 25 | rarfile==3.0 26 | redis==3.2.1 27 | requests==2.21.0 28 | rpmfile==0.1.4 29 | ssdeep==3.3 30 | tnefparse==1.3.0 31 | -------------------------------------------------------------------------------- /so-strelka-backend/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import setuptools 3 | 4 | setuptools.setup( 5 | name='strelka', 6 | author='Target Brands, Inc.', 7 | description='strelka: container-based file analysis at scale', 8 | license='Apache 2.0', 9 | packages=setuptools.find_packages(), 10 | scripts=['bin/strelka-backend'] 11 | ) 12 | -------------------------------------------------------------------------------- /so-strelka-filestream/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang AS build 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | 4 | RUN CGO_ENABLED=0 go get github.com/target/strelka/src/go/cmd/strelka-filestream 5 | 6 | FROM alpine 7 | COPY --from=build /go/bin/strelka-filestream /usr/local/bin/ 8 | RUN addgroup -g 939 strelka && \ 9 | adduser -u 939 -G strelka strelka --disabled-password \ 10 | -h /etc/strelka --no-create-home strelka 11 | RUN apk add --no-cache jq 12 | USER strelka 13 | 14 | -------------------------------------------------------------------------------- /so-strelka-frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang AS build 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | 4 | RUN CGO_ENABLED=0 go get github.com/target/strelka/src/go/cmd/strelka-frontend 5 | 6 | FROM alpine 7 | COPY --from=build /go/bin/strelka-frontend /usr/local/bin/ 8 | RUN addgroup -g 939 strelka && \ 9 | adduser -u 939 -G strelka strelka --disabled-password \ 10 | -h /etc/strelka --no-create-home strelka && \ 11 | mkdir /var/log/strelka/ && \ 12 | chown -R 939:939 /var/log/strelka/ 13 | USER strelka 14 | 15 | -------------------------------------------------------------------------------- /so-strelka-manager/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang AS build 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | 4 | RUN CGO_ENABLED=0 go get github.com/target/strelka/src/go/cmd/strelka-manager 5 | 6 | FROM alpine 7 | COPY --from=build /go/bin/strelka-manager /usr/local/bin/ 8 | RUN addgroup -g 939 strelka && \ 9 | adduser -u 939 -G strelka strelka --disabled-password \ 10 | -h /etc/strelka --no-create-home strelka 11 | USER strelka 12 | -------------------------------------------------------------------------------- /so-suricata/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="Suricata 4.1.6 running in a docker for use with Security Onion." 5 | 6 | RUN yum update -y && \ 7 | yum clean all 8 | 9 | # Install epel 10 | RUN yum -y install epel-release bash libpcap iproute 11 | 12 | RUN yum -y install GeoIP luajit libnet jansson libyaml cargo rustc && \ 13 | yum -y erase epel-release && yum clean all && rm -rf /var/cache/yum 14 | 15 | # Install the Suricata package 16 | RUN rpm -i https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-suricata-4.1.6/securityonion-suricata-4.1.6.0.rpm 17 | # Create Suricata User. 18 | RUN groupadd --gid 940 suricata && \ 19 | adduser --uid 940 --gid 940 \ 20 | --home-dir /etc/suricata --no-create-home suricata 21 | 22 | # Fix those perms.. big worm 23 | RUN chown -R 940:940 /etc/suricata && \ 24 | chown -R 940:940 /var/log/suricata 25 | 26 | # Copy over the entry script. 27 | ADD files/so-suricata.sh /usr/local/sbin/so-suricata.sh 28 | 29 | RUN chmod +x /usr/local/sbin/so-suricata.sh && chown 940:940 /var/run/suricata 30 | 31 | ENTRYPOINT ["/usr/local/sbin/so-suricata.sh"] 32 | -------------------------------------------------------------------------------- /so-suricata/README.md: -------------------------------------------------------------------------------- 1 | Suricata 4.0.4 2 | 3 | NOTE: This will only work on boxes with the elastic features enabled that use eve.json. 4 | 5 | REQUIREMENTS: AF_Packet, Suricata user with uid and gid `940`. 6 | 7 | 8 | To run for testing: 9 | ``` 10 | sudo docker run --privileged=true -e INTERFACE=eth1 \ 11 | -v /opt/so/conf/suricata/suricata.yaml:/usr/local/etc/suricata/suricata.yaml:ro \ 12 | -v /opt/so/conf/suricata/rules:/usr/local/etc/suricata/rules:ro \ 13 | -v /opt/so/log/suricata/:/usr/local/var/log/suricata/:rw \ 14 | --net=host --name=so-suricata -d toosmooth/so-suricata:test2 15 | ``` 16 | 17 | `INTERFACE` being the itnerface you want to monitor 18 | 19 | Set the volumes appropriately to where you have the required files. 20 | -------------------------------------------------------------------------------- /so-suricata/files/so-suricata.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Start Suricata - --init-errors-fatal could be added to make it die if rules are wrong 4 | /opt/suricata/bin/suricata -c /etc/suricata/suricata.yaml --af-packet=$INTERFACE --user=940 --group=940 -F /etc/suricata/bpf 5 | -------------------------------------------------------------------------------- /so-tcpreplay/files/tcpreplay: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-tcpreplay/files/tcpreplay -------------------------------------------------------------------------------- /so-telegraf/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.6 2 | LABEL maintainer "Security Onion Solutions, LLC" 3 | LABEL description="Telegraf running in Docker container for use with Security Onion" 4 | 5 | RUN echo 'hosts: files dns' >> /etc/nsswitch.conf 6 | RUN apk add --no-cache iputils ca-certificates net-snmp-tools procps lm_sensors redis curl jq && \ 7 | update-ca-certificates 8 | 9 | ENV TELEGRAF_VERSION 1.10.1 10 | 11 | RUN set -ex && \ 12 | apk add --no-cache --virtual .build-deps wget gnupg tar && \ 13 | for key in \ 14 | 05CE15085FC09D18E99EFB22684A14CF2582E0C5 ; \ 15 | do \ 16 | gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" || \ 17 | gpg --keyserver pgp.mit.edu --recv-keys "$key" || \ 18 | gpg --keyserver keyserver.pgp.com --recv-keys "$key" ; \ 19 | done && \ 20 | wget --no-verbose https://dl.influxdata.com/telegraf/releases/telegraf-${TELEGRAF_VERSION}-static_linux_amd64.tar.gz.asc && \ 21 | wget --no-verbose https://dl.influxdata.com/telegraf/releases/telegraf-${TELEGRAF_VERSION}-static_linux_amd64.tar.gz && \ 22 | gpg --batch --verify telegraf-${TELEGRAF_VERSION}-static_linux_amd64.tar.gz.asc telegraf-${TELEGRAF_VERSION}-static_linux_amd64.tar.gz && \ 23 | mkdir -p /usr/src /etc/telegraf && \ 24 | tar -C /usr/src -xzf telegraf-${TELEGRAF_VERSION}-static_linux_amd64.tar.gz && \ 25 | mv /usr/src/telegraf*/telegraf.conf /etc/telegraf/ && \ 26 | chmod +x /usr/src/telegraf*/* && \ 27 | cp -a /usr/src/telegraf*/* /usr/bin/ && \ 28 | rm -rf *.tar.gz* /usr/src /root/.gnupg && \ 29 | apk del .build-deps 30 | 31 | EXPOSE 8125/udp 8092/udp 8094 32 | 33 | COPY entrypoint.sh /entrypoint.sh 34 | RUN chmod +x /entrypoint.sh 35 | ENTRYPOINT ["/entrypoint.sh"] 36 | CMD ["telegraf"] 37 | -------------------------------------------------------------------------------- /so-telegraf/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | if [ "${1:0:1}" = '-' ]; then 5 | set -- telegraf "$@" 6 | fi 7 | 8 | exec "$@" 9 | -------------------------------------------------------------------------------- /so-thehive-cortex/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="Cortex Docker container for use with Security Onion" 5 | RUN yum update -y 6 | RUN yum install -y https://dl.bintray.com/thehive-project/rpm-stable/cortex-3.0.1-1.noarch.rpm 7 | RUN yum -y install cortex wget git 8 | RUN groupmod -g 939 cortex \ 9 | && usermod -u 939 -g 939 cortex \ 10 | && ls -la /opt \ 11 | && ls -la /opt/cortex 12 | RUN mkdir -p /opt/cortex/conf 13 | RUN chown -R cortex /opt/cortex \ 14 | /var/log/cortex 15 | 16 | 17 | RUN yum update -y && yum -y install epel-release 18 | RUN yum -y install https://centos7.iuscommunity.org/ius-release-el7.rpm 19 | RUN yum -y makecache && yum -y install python36u python36u-pip && pip3.6 install --upgrade pip && yum clean all 20 | 21 | RUN git clone https://github.com/TheHive-Project/Cortex-Analyzers 22 | 23 | RUN for I in $(find Cortex-Analyzers -name 'requirements.txt'); do pip3 install -r $I || true; done 24 | 25 | USER cortex 26 | 27 | ENTRYPOINT ["/opt/cortex/bin/cortex"] 28 | -------------------------------------------------------------------------------- /so-thehive-cortex/requirements.txt: -------------------------------------------------------------------------------- 1 | cortexutils 2 | shodan 3 | censys 4 | diskcache 5 | cybercrimetracker 6 | python-magic 7 | ssdeep 8 | pyexifinfo 9 | pefile 10 | git+https://github.com/AnyMaster/pehashng 11 | git+https://github.com/Rafiot/pdfid.git 12 | oletools>=0.52 13 | extract-msg 14 | IMAPClient 15 | -------------------------------------------------------------------------------- /so-thehive-es/Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile was based on the official Elasticsearch Docker image: 2 | # https://github.com/elastic/elasticsearch-docker 3 | 4 | # Copyright 2014,2015,2016,2017,2019,2020 Security Onion Solutions, LLC 5 | 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | 19 | 20 | FROM docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.7 21 | 22 | LABEL maintainer "Security Onion Solutions, LLC" 23 | 24 | ENV PATH /usr/share/elasticsearch/bin:$PATH 25 | 26 | WORKDIR /usr/share/elasticsearch 27 | 28 | RUN groupadd -g 939 socore && adduser -u 939 -g 939 -d /usr/share/elasticsearch socore 29 | RUN set -ex && for esdirs in config data logs; do \ 30 | mkdir -p "$esdirs"; \ 31 | chown -R socore:socore "$esdirs"; \ 32 | done 33 | 34 | USER elasticsearch 35 | 36 | COPY elasticsearch.yml config/ 37 | COPY log4j2.properties config/ 38 | COPY bin/es-docker bin/es-docker 39 | 40 | USER root 41 | 42 | RUN chown socore:socore config/elasticsearch.yml config/log4j2.properties bin/es-docker && \ 43 | chmod 0750 bin/es-docker 44 | 45 | USER elasticsearch 46 | CMD ["/bin/bash", "bin/es-docker"] 47 | -------------------------------------------------------------------------------- /so-thehive-es/bin/es-docker: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run Elasticsearch and allow setting default settings via env vars 4 | # 5 | # e.g. Setting the env var cluster.name=testcluster 6 | # 7 | # will cause Elasticsearch to be invoked with -Ecluster.name=testcluster 8 | # 9 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html#_setting_default_settings 10 | 11 | declare -a es_opts 12 | 13 | while IFS='=' read -r envvar_key envvar_value 14 | do 15 | # Elasticsearch env vars need to have at least two dot separated lowercase words, e.g. `cluster.name` 16 | if [[ "$envvar_key" =~ ^[a-z]+\.[a-z]+ ]] 17 | then 18 | if [[ ! -z $envvar_value ]]; then 19 | es_opt="-E${envvar_key}=${envvar_value}" 20 | es_opts+=("${es_opt}") 21 | fi 22 | fi 23 | done < <(env) 24 | 25 | # The virtual file /proc/self/cgroup should list the current cgroup 26 | # membership. For each hierarchy, you can follow the cgroup path from 27 | # this file to the cgroup filesystem (usually /sys/fs/cgroup/) and 28 | # introspect the statistics for the cgroup for the given 29 | # hierarchy. Alas, Docker breaks this by mounting the container 30 | # statistics at the root while leaving the cgroup paths as the actual 31 | # paths. Therefore, Elasticsearch provides a mechanism to override 32 | # reading the cgroup path from /proc/self/cgroup and instead uses the 33 | # cgroup path defined the JVM system property 34 | # es.cgroups.hierarchy.override. Therefore, we set this value here so 35 | # that cgroup statistics are available for the container this process 36 | # will run in. 37 | export ES_JAVA_OPTS="-Des.cgroups.hierarchy.override=/ $ES_JAVA_OPTS" 38 | 39 | exec bin/elasticsearch "${es_opts[@]}" 40 | -------------------------------------------------------------------------------- /so-thehive-es/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | cluster.name: "hive" 2 | network.host: 0.0.0.0 3 | discovery.zen.minimum_master_nodes: 1 4 | # This is a test -- if this is here, then the volume is mounted correctly. 5 | path.logs: /var/log/elasticsearch 6 | action.destructive_requires_name: true 7 | transport.bind_host: 0.0.0.0 8 | transport.publish_host: 0.0.0.0 9 | transport.publish_port: 9500 10 | http.host: 0.0.0.0 11 | http.port: 9400 12 | transport.tcp.port: 9500 13 | transport.host: 0.0.0.0 14 | thread_pool.index.queue_size: 100000 15 | thread_pool.search.queue_size: 100000 16 | thread_pool.bulk.queue_size: 100000 17 | -------------------------------------------------------------------------------- /so-thehive-es/es-docker: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run Elasticsearch and allow setting default settings via env vars 4 | # 5 | # e.g. Setting the env var cluster.name=testcluster 6 | # 7 | # will cause Elasticsearch to be invoked with -Ecluster.name=testcluster 8 | # 9 | # see https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html#_setting_default_settings 10 | 11 | declare -a es_opts 12 | 13 | while IFS='=' read -r envvar_key envvar_value 14 | do 15 | # Elasticsearch env vars need to have at least two dot separated lowercase words, e.g. `cluster.name` 16 | if [[ "$envvar_key" =~ ^[a-z]+\.[a-z]+ ]] 17 | then 18 | if [[ ! -z $envvar_value ]]; then 19 | es_opt="-E${envvar_key}=${envvar_value}" 20 | es_opts+=("${es_opt}") 21 | fi 22 | fi 23 | done < <(env) 24 | 25 | # The virtual file /proc/self/cgroup should list the current cgroup 26 | # membership. For each hierarchy, you can follow the cgroup path from 27 | # this file to the cgroup filesystem (usually /sys/fs/cgroup/) and 28 | # introspect the statistics for the cgroup for the given 29 | # hierarchy. Alas, Docker breaks this by mounting the container 30 | # statistics at the root while leaving the cgroup paths as the actual 31 | # paths. Therefore, Elasticsearch provides a mechanism to override 32 | # reading the cgroup path from /proc/self/cgroup and instead uses the 33 | # cgroup path defined the JVM system property 34 | # es.cgroups.hierarchy.override. Therefore, we set this value here so 35 | # that cgroup statistics are available for the container this process 36 | # will run in. 37 | export ES_JAVA_OPTS="-Des.cgroups.hierarchy.override=/ $ES_JAVA_OPTS" 38 | 39 | exec bin/elasticsearch "${es_opts[@]}" 40 | -------------------------------------------------------------------------------- /so-thehive-es/log4j2.properties: -------------------------------------------------------------------------------- 1 | status = error 2 | #appender.console.type = Console 3 | #appender.console.name = console 4 | #appender.console.layout.type = PatternLayout 5 | #appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n 6 | #rootLogger.level = info 7 | #rootLogger.appenderRef.console.ref = console 8 | # This is a test -- if this here, then the volume is mounted correctly. 9 | appender.rolling.type = RollingFile 10 | appender.rolling.name = rolling 11 | appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log 12 | appender.rolling.layout.type = PatternLayout 13 | appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n 14 | appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log 15 | appender.rolling.policies.type = Policies 16 | appender.rolling.policies.time.type = TimeBasedTriggeringPolicy 17 | appender.rolling.policies.time.interval = 1 18 | appender.rolling.policies.time.modulate = true 19 | rootLogger.level = info 20 | rootLogger.appenderRef.rolling.ref = rolling 21 | -------------------------------------------------------------------------------- /so-thehive/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="TheHive Docker container for use with Security Onion" 5 | RUN yum update -y 6 | RUN yum install -y https://dl.bintray.com/thehive-project/rpm-stable/thehive-3.4.0-1.noarch.rpm 7 | RUN yum -y install thehive wget 8 | RUN groupmod -g 939 thehive \ 9 | && usermod -u 939 -g 939 thehive \ 10 | && ls -la /opt \ 11 | && ls -la /opt/thehive 12 | RUN mkdir -p /opt/thehive/conf 13 | RUN chown -R thehive /opt/thehive \ 14 | /var/log/thehive 15 | RUN echo "play.http.secret.key=$(tr -dc 'A-Za-z0-9' < /dev/urandom | head -c 49)" >> /opt/thehive/conf/application.conf \ 16 | && echo -e 'search.host = ["elasticsearch:9300"]\n\ 17 | play.http.secret.key = ${?PLAY_SECRET}' >> /opt/thehive/conf/application.conf 18 | COPY bin/so-thehive.sh /opt/thehive/bin/so-thehive.sh 19 | RUN chmod +x /opt/thehive/bin/so-thehive.sh 20 | 21 | USER thehive 22 | 23 | EXPOSE 9000 24 | 25 | ENTRYPOINT ["/opt/thehive/bin/so-thehive.sh"] 26 | -------------------------------------------------------------------------------- /so-thehive/bin/so-thehive.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -f /tmp/garbage_file 4 | while ! wget -O /tmp/garbage_file ${ELASTICSEARCH_HOST}:9500 2>/dev/null 5 | do 6 | echo "Waiting for Elasticsearch..." 7 | rm -f /tmp/garbage_file 8 | sleep 1 9 | done 10 | rm -f /tmp/garbage_file 11 | sleep 5 12 | 13 | # Remove the old PID 14 | rm -f /opt/thehive/RUNNING_PID 15 | 16 | /opt/thehive/bin/thehive 17 | -------------------------------------------------------------------------------- /so-wazuh/Dockerfile: -------------------------------------------------------------------------------- 1 | # Based off of https://github.com/wazuh/wazuh-docker 2 | FROM centos:7 3 | 4 | LABEL maintainer "Security Onion Solutions, LLC" 5 | LABEL description="Wazuh Manager and API running in Docker container for use with Security Onion" 6 | 7 | ENV WAZUH_VERSION="3.10.2-1" 8 | 9 | RUN yum update -y 10 | 11 | # Install pre-reqs 12 | RUN yum install -y initscripts expect logrotate openssl 13 | 14 | # Creating ossec users 15 | RUN groupadd -g 945 ossec && \ 16 | useradd -u 943 -g 945 -d /var/ossec -s /sbin/nologin ossecm && \ 17 | useradd -u 944 -g 945 -d /var/ossec -s /sbin/nologin ossecr && \ 18 | useradd -u 945 -g 945 -d /var/ossec -s /sbin/nologin ossec 19 | 20 | # Add Wazuh repo 21 | ADD config/repos.bash /repos.bash 22 | RUN chmod +x /repos.bash 23 | RUN /repos.bash 24 | 25 | # Download wazuh-manager pkg 26 | #RUN rpm -i https://packages.wazuh.com/yum/el/7/x86_64/wazuh-manager-2.0.1-1.el7.x86_64.rpm 27 | 28 | # Install wazuh-manager 29 | RUN yum install -y wazuh-manager-$WAZUH_VERSION 30 | 31 | # Install nodejs and wazuh-api 32 | RUN curl -sL https://rpm.nodesource.com/setup_8.x | bash - 33 | RUN yum install -y nodejs 34 | 35 | #RUN rpm -i https://packages.wazuh.com/yum/el/7/x86_64/wazuh-api-2.0.1-1.el7.x86_64.rpm 36 | RUN yum install -y wazuh-api-$WAZUH_VERSION 37 | 38 | # Add OSSEC config 39 | ADD config/securityonion_rules.xml /var/ossec/ruleset/rules/securityonion_rules.xml 40 | ADD config/ossec.conf /var/ossec/etc/ossec.conf 41 | 42 | # Adding first run script. 43 | ADD config/data_dirs.env /data_dirs.env 44 | ADD config/init.bash /init.bash 45 | 46 | # Sync calls are due to https://github.com/docker/docker/issues/9547 47 | RUN chmod 755 /init.bash &&\ 48 | sync && /init.bash &&\ 49 | sync && rm /init.bash 50 | 51 | # Adding entrypoint 52 | ADD config/entrypoint.sh /entrypoint.sh 53 | RUN chmod 755 /entrypoint.sh 54 | 55 | RUN yum clean all 56 | 57 | ENTRYPOINT ["/entrypoint.sh"] 58 | -------------------------------------------------------------------------------- /so-wazuh/README.md: -------------------------------------------------------------------------------- 1 | # securityonion-docker-wazuh 2 | Wazuh Docker container built on CentOS 7 to be used with Security Onion 3 | -------------------------------------------------------------------------------- /so-wazuh/config/data_dirs.env: -------------------------------------------------------------------------------- 1 | i=0 2 | DATA_DIRS[((i++))]="etc" 3 | DATA_DIRS[((i++))]="ruleset" 4 | DATA_DIRS[((i++))]="logs" 5 | DATA_DIRS[((i++))]="stats" 6 | DATA_DIRS[((i++))]="queue" 7 | DATA_DIRS[((i++))]="var/db" 8 | DATA_DIRS[((i++))]="api" 9 | export DATA_DIRS 10 | -------------------------------------------------------------------------------- /so-wazuh/config/init.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Wazuh App Copyright (C) 2018 Wazuh Inc. (License GPLv2) 3 | 4 | # 5 | # Initialize the custom data directory layout 6 | # 7 | source /data_dirs.env 8 | 9 | cd /var/ossec 10 | for ossecdir in "${DATA_DIRS[@]}"; do 11 | mv ${ossecdir} ${ossecdir}-template 12 | ln -s $(realpath --relative-to=$(dirname ${ossecdir}) data)/${ossecdir} ${ossecdir} 13 | done 14 | -------------------------------------------------------------------------------- /so-wazuh/config/repos.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cat > /etc/yum.repos.d/wazuh.repo <<\EOF 3 | [wazuh_repo] 4 | gpgcheck=1 5 | gpgkey=https://packages.wazuh.com/key/GPG-KEY-WAZUH 6 | enabled=1 7 | name=Wazuh repository 8 | baseurl=https://packages.wazuh.com/3.x/yum/ 9 | protect=1 10 | EOF 11 | -------------------------------------------------------------------------------- /so-wazuh/logstash/1001_preprocess_syslogng.conf: -------------------------------------------------------------------------------- 1 | # Updated by: Doug Burks 2 | # Last Update: 5/17/2018 3 | 4 | filter { 5 | if "syslogng" in [tags] { 6 | mutate { 7 | rename => { "MESSAGE" => "message" } 8 | rename => { "PROGRAM" => "type" } 9 | rename => { "FACILITY" => "syslog-facility" } 10 | rename => { "FILE_NAME" => "syslog-file_name" } 11 | rename => { "HOST" => "syslog-host" } 12 | rename => { "HOST_FROM" => "syslog-host_from" } 13 | rename => { "LEGACY_MSGHDR" => "syslog-legacy_msghdr" } 14 | rename => { "PID" => "syslog-pid" } 15 | rename => { "PRIORITY" => "syslog-priority" } 16 | rename => { "SOURCEIP" => "syslog-sourceip" } 17 | rename => { "TAGS" => "syslog-tags" } 18 | lowercase => [ "syslog-host_from" ] 19 | remove_field => [ "ISODATE" ] 20 | remove_field => [ "SEQNUM" ] 21 | #add_tag => [ "conf_file_1001"] 22 | } 23 | if "bro_" in [type] { 24 | mutate { 25 | add_tag => [ "bro" ] 26 | } 27 | } else if [type] !~ "wazuh" { 28 | mutate { 29 | add_tag => [ "syslog"] 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /so-wazuh/logstash/9600_output_ossec.conf: -------------------------------------------------------------------------------- 1 | # Author: Justin Henderson 2 | # SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics 3 | # Updated by: Doug Burks 4 | # Last Update: 5/15/2017 5 | 6 | filter { 7 | if [event_type] =~ "wazuh-" { 8 | json { 9 | source => "message" 10 | } 11 | } 12 | } 13 | output { 14 | if [event_type] =~ "wazuh-" { 15 | elasticsearch { 16 | hosts => elasticsearch 17 | index => "logstash-ossec-%{+YYYY.MM.dd}" 18 | template_name => "logstash-ossec" 19 | template => "/logstash-ossec-template.json" 20 | template_overwrite => true 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-list-agents: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | 5 | if docker ps | grep -q "so-wazuh"; then 6 | docker exec -it so-wazuh /var/ossec/bin/list_agents $1 7 | else 8 | echo "OSSEC is not running." 9 | echo "Start it with --> sudo so-ossec-start" 10 | fi 11 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-logtest: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | 5 | if docker ps | grep -q "so-wazuh"; then 6 | docker exec -it so-wazuh /var/ossec/bin/ossec-logtest 7 | else 8 | echo "OSSEC is not running." 9 | echo "Start it with --> sudo so-ossec-start" 10 | fi 11 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-manage-agents: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | 5 | if docker ps | grep -q "so-wazuh"; then 6 | docker exec -it so-wazuh /var/ossec/bin/manage_agents $1 7 | else 8 | echo "OSSEC is not running." 9 | echo "Start it with --> sudo so-ossec-start" 10 | fi 11 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-restart: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | 5 | /usr/sbin/so-ossec-stop && 6 | /usr/sbin/so-ossec-start 7 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-start: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | . /etc/nsm/securityonion.conf 5 | 6 | if docker ps | grep -q "so-wazuh"; then 7 | docker exec -it so-wazuh /var/ossec/bin/ossec-control start 8 | else 9 | docker run --hostname=${HOSTNAME}-docker --name=so-wazuh -it --detach \ 10 | --publish 0.0.0.0:55000:55000 \ 11 | --publish 0.0.0.0:1514:1514/udp \ 12 | --volume /opt/so/wazuh/:/var/ossec/data/ \ 13 | wlambert/so-wazuh 14 | # Connect container to so-elastic-net Docker network 15 | docker network connect --alias wazuh so-elastic-net so-wazuh 16 | fi 17 | -------------------------------------------------------------------------------- /so-wazuh/scripts/so-ossec-stop: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . /usr/sbin/so-common 4 | 5 | if docker ps | grep -q "so-wazuh"; then 6 | docker exec -it so-wazuh /var/ossec/bin/ossec-control stop 7 | docker stop so-wazuh && docker rm so-wazuh 8 | else 9 | echo "OSSEC already stopped." 10 | fi 11 | -------------------------------------------------------------------------------- /so-zeek/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | LABEL maintainer "Security Onion Solutions, LLC" 4 | LABEL description="Zeek running in a docker with AF_Packet 1.4 for use with Security Onion." 5 | 6 | RUN yum update -y && \ 7 | yum clean all 8 | 9 | # Install epel 10 | RUN yum -y install epel-release bash libpcap iproute && yum clean all 11 | RUN yum -y install jemalloc numactl libnl3 libdnet gdb GeoIP python-ipaddress python3 && \ 12 | yum -y erase epel-release && yum clean all && rm -rf /var/cache/yum 13 | 14 | # Install the Zeek package 15 | RUN rpm -i https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-zeek-3.0.5.0/securityonion-zeek-3.0.5.0.rpm 16 | 17 | VOLUME ["/nsm/zeek/logs", "/nsm/zeek/spool", "/opt/zeek/share/bro", "/opt/zeek/etc/"] 18 | 19 | # Create Bro User. 20 | RUN groupadd --gid 937 zeek && \ 21 | adduser --uid 937 --gid 937 \ 22 | --home-dir /opt/zeek --no-create-home zeek 23 | 24 | # Fix those perms.. big worm 25 | RUN chown -R 937:937 /opt/zeek && \ 26 | chown -R 937:937 /nsm/zeek 27 | 28 | # Copy over the entry script. 29 | COPY files/zeek.sh /usr/local/sbin/zeek.sh 30 | RUN chmod +x /usr/local/sbin/zeek.sh 31 | 32 | ENTRYPOINT ["/usr/local/sbin/zeek.sh"] 33 | -------------------------------------------------------------------------------- /so-zeek/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docker-hh/8888b402d1757c8e4801260c8947ac9f90b15ee6/so-zeek/README.md -------------------------------------------------------------------------------- /so-zeek/files/zeek.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | setcap cap_net_raw,cap_net_admin=eip /opt/zeek/bin/zeek 4 | setcap cap_net_raw,cap_net_admin=eip /opt/zeek/bin/capstats 5 | runuser zeek -c '/opt/zeek/bin/zeekctl deploy' 6 | sleep infinity 7 | --------------------------------------------------------------------------------