├── VERSION ├── scripts ├── System_Scripts │ ├── .gitkeep │ ├── listenNamedPipe.sh │ ├── GenCert.sh │ ├── sendmail.sh │ ├── wipe_part.sh │ ├── box4s_service.sh │ └── wait-for-healthy-container.sh ├── Automation │ ├── diskspacealert.sh │ ├── agent-upgrade.py │ └── versions.py ├── 1stLevelRepair │ ├── repair_restart.sh │ ├── repair_createSnapshot.sh │ ├── repair_snapshot.sh │ ├── repair_format.sh │ └── repair_reset.sh └── Development │ ├── healthy.sh │ └── backup4srepos.sh ├── docker ├── suricata │ ├── .gitignore │ ├── etc │ │ ├── update.yaml │ │ └── reference.config │ ├── scripts │ │ ├── update.sh │ │ └── ingest_testdata.sh │ └── Dockerfile ├── dnsmasq │ ├── resolv.personal │ ├── Dockerfile │ └── dnsmasq.conf ├── logstash │ ├── .env.ls │ └── etc │ │ ├── conf.d │ │ ├── openvas │ │ │ ├── 10_openvas_input.conf │ │ │ ├── 25-openvas-special.conf │ │ │ └── 100_openvas_output_pipe.conf │ │ ├── heartbeat │ │ │ ├── 10_heartbeat_input.conf │ │ │ └── 99_output.conf │ │ ├── suricata │ │ │ ├── 10_input_suricata.conf │ │ │ └── 99_output_pipe.conf │ │ ├── metricbeat │ │ │ ├── 10_metricbeat_input.conf │ │ │ └── 100_output.conf │ │ ├── estransfer │ │ │ ├── metricbeat_es_transfer.conf │ │ │ ├── heartbeat_es_transfer.conf │ │ │ ├── openvas_es_transfer.conf │ │ │ └── suricata_es_transfer.conf │ │ └── inputs │ │ │ ├── input_pipelines.conf │ │ │ └── basic_input.conf │ │ ├── pipelines.yml │ │ ├── startup.options │ │ ├── jvm.options │ │ └── BOX4s │ │ ├── icmp-codes.yaml │ │ ├── icmp-type.yaml │ │ ├── icmpv6-type.yaml │ │ └── suricata-template.json ├── web │ ├── source │ │ ├── static │ │ │ ├── wizard.js │ │ │ ├── faq │ │ │ │ ├── nav.png │ │ │ │ ├── pinfilter.jpg │ │ │ │ ├── filtersource.jpg │ │ │ │ ├── appliedfilters.jpg │ │ │ │ ├── filtercategory.jpg │ │ │ │ ├── filtersignature.jpg │ │ │ │ └── SuppressAlarms.mp4 │ │ │ ├── favicon.ico │ │ │ ├── Box4S_Logo.png │ │ │ ├── external │ │ │ │ ├── icons.woff │ │ │ │ ├── fonts │ │ │ │ │ ├── S6uyw4BMUTPHjx4wXg.woff2 │ │ │ │ │ ├── S6u8w4BMUTPHjxsAXC-q.woff2 │ │ │ │ │ ├── S6uyw4BMUTPHjxAwXjeu.woff2 │ │ │ │ │ ├── S6u8w4BMUTPHjxsAUi-qJCY.woff2 │ │ │ │ │ ├── S6u9w4BMUTPHh6UVSwaPGR_p.woff2 │ │ │ │ │ ├── S6u9w4BMUTPHh6UVSwiPGQ.woff2 │ │ │ │ │ ├── S6u_w4BMUTPHjxsI5wq_Gwft.woff2 │ │ │ │ │ └── S6u_w4BMUTPHjxsI5wq_FQft1dw.woff2 │ │ │ │ └── themes │ │ │ │ │ └── default │ │ │ │ │ └── assets │ │ │ │ │ ├── fonts │ │ │ │ │ ├── icons.eot │ │ │ │ │ ├── icons.otf │ │ │ │ │ ├── icons.ttf │ │ │ │ │ ├── icons.woff │ │ │ │ │ ├── icons.woff2 │ │ │ │ │ ├── brand-icons.eot │ │ │ │ │ ├── brand-icons.ttf │ │ │ │ │ ├── brand-icons.woff │ │ │ │ │ ├── brand-icons.woff2 │ │ │ │ │ ├── outline-icons.eot │ │ │ │ │ ├── outline-icons.ttf │ │ │ │ │ ├── outline-icons.woff │ │ │ │ │ └── outline-icons.woff2 │ │ │ │ │ └── images │ │ │ │ │ └── flags.png │ │ │ └── box4s.css │ │ ├── wizard │ │ │ ├── __init__.py │ │ │ ├── templates │ │ │ │ ├── logstash │ │ │ │ │ ├── drop.jinja2 │ │ │ │ │ ├── netplan.yaml.jinja2 │ │ │ │ │ ├── BOX4s-special.conf.jinja2 │ │ │ │ │ ├── system.jinja2 │ │ │ │ │ └── network.jinja2 │ │ │ │ └── wizard │ │ │ │ │ ├── verify_progress.html │ │ │ │ │ └── index.html │ │ │ ├── forms.py │ │ │ └── schemas.py │ │ ├── templates │ │ │ ├── user │ │ │ │ ├── _authorized_base.html │ │ │ │ ├── emails │ │ │ │ │ ├── base_subject.txt │ │ │ │ │ ├── invite_user_subject.txt │ │ │ │ │ ├── confirm_email_subject.txt │ │ │ │ │ ├── reset_password_subject.txt │ │ │ │ │ ├── password_changed_subject.txt │ │ │ │ │ ├── username_changed_subject.txt │ │ │ │ │ ├── registered_subject.txt │ │ │ │ │ ├── base_message.txt │ │ │ │ │ ├── base_message.html │ │ │ │ │ ├── username_changed_message.html │ │ │ │ │ ├── username_changed_message.txt │ │ │ │ │ ├── password_changed_message.html │ │ │ │ │ ├── reset_password_message.txt │ │ │ │ │ ├── password_changed_message.txt │ │ │ │ │ ├── confirm_email_message.txt │ │ │ │ │ ├── reset_password_message.html │ │ │ │ │ ├── confirm_email_message.html │ │ │ │ │ ├── invite_user_message.txt │ │ │ │ │ ├── registered_message.txt │ │ │ │ │ ├── registered_message.html │ │ │ │ │ └── invite_user_message.html │ │ │ │ ├── invite_user.html │ │ │ │ ├── change_username.html │ │ │ │ ├── resend_confirm_email.html │ │ │ │ ├── forgot_password.html │ │ │ │ ├── edit_user_profile.html │ │ │ │ ├── manage_emails.html │ │ │ │ ├── reset_password.html │ │ │ │ ├── _macros.html │ │ │ │ ├── login_or_register.html │ │ │ │ └── change_password.html │ │ │ ├── application │ │ │ │ ├── elastalert_smtp.yaml.j2 │ │ │ │ ├── smtp.conf.j2 │ │ │ │ ├── alert_email_conf.yaml.j2 │ │ │ │ ├── alert_frequency.yaml.j2 │ │ │ │ ├── msmtprc.j2 │ │ │ │ ├── alert_spike.yaml.j2 │ │ │ │ ├── alert_base.yaml.j2 │ │ │ │ ├── quick_alert_netuse.yaml.j2 │ │ │ │ ├── quick_alert_ids.yaml.j2 │ │ │ │ ├── quick_alert_vuln.yaml.j2 │ │ │ │ └── quick_alert_malware.yaml.j2 │ │ │ ├── spiderfoot.html │ │ │ ├── dashboard.html │ │ │ ├── suricata_suppress.bpf.j2 │ │ │ ├── user_base.html │ │ │ ├── errors │ │ │ │ ├── error_base.html │ │ │ │ └── 403.html │ │ │ ├── docs.html │ │ │ ├── 15_logstash_suppress.conf.j2 │ │ │ └── system.html │ │ ├── extensions.py │ │ ├── __init__.py │ │ ├── forms.py │ │ ├── error.py │ │ └── helpers.py │ ├── tests │ │ ├── updatestatus.json │ │ ├── validBPF.json │ │ └── validLSR.json │ ├── web.env │ ├── main.py │ ├── migrations │ │ ├── script.py.mako │ │ ├── versions │ │ │ ├── d995a93c3a9c_box4s_dhcp_col.py │ │ │ ├── 6845bca64bc8_.py │ │ │ ├── 9f79000ab53d_add_alerts.py │ │ │ ├── c2bdbad3c958_network_system_rel.py │ │ │ ├── 96cfbddbc495_add_role_config.py │ │ │ ├── 045ed1db87f6_.py │ │ │ ├── 1d03ea9e33bd_.py │ │ │ ├── ea1ce32ce8fd_box4s_model.py │ │ │ ├── 532110801da9_.py │ │ │ ├── 56e9b3f51ec8_.py │ │ │ ├── 031dd699edaa_add_wizard_state.py │ │ │ ├── 2bcd96b138e4_.py │ │ │ ├── 9a02836f6117_system_and_types.py │ │ │ ├── b1685fc5f49c_create_types.py │ │ │ ├── 5aadb38f6936_network_and_types.py │ │ │ └── a59fffda1b70_box4security_table.py │ │ └── alembic.ini │ ├── requirements.txt │ └── Dockerfile ├── elasticsearch │ ├── .env.es │ ├── Dockerfile │ └── etc │ │ └── elasticsearch.yml ├── elastalert │ ├── etc │ │ ├── elastalert │ │ │ ├── smtp_auth_file.yaml │ │ │ └── config.yaml │ │ └── elastalert-server │ │ │ ├── config.json │ │ │ ├── elastalert.yaml │ │ │ └── elastalert-test.yaml │ └── Dockerfile ├── openvas │ ├── scripts │ │ ├── vulnwhisp.sh │ │ ├── update.sh │ │ ├── insertconfig.sh │ │ ├── config.py │ │ └── start.sh │ ├── etc │ │ └── vuln_openvas.ini │ └── Dockerfile ├── heartbeat │ ├── Dockerfile │ └── etc │ │ ├── monitors.d │ │ └── box4s.yml │ │ └── heartbeat.yml ├── core4s │ ├── scripts │ │ └── Automation │ │ │ ├── score_calculation │ │ │ ├── res │ │ │ │ ├── insert_template.json │ │ │ │ ├── index_mapping.json │ │ │ │ ├── social_media.query.json │ │ │ │ ├── index_settings.json │ │ │ │ ├── cvss_buckets.query.json │ │ │ │ ├── alerts_buckets.query.json │ │ │ │ └── vuln_score.json │ │ │ ├── install_index.sh │ │ │ └── calculate_scores.sh │ │ │ ├── resourceupdate.sh │ │ │ ├── download_wazuh_clients.sh │ │ │ └── ASN_update.sh │ ├── healthcheck.py │ ├── curator │ │ └── curator.yml │ ├── Dockerfile │ └── core4s.crontab ├── metricbeat │ ├── Dockerfile │ └── etc │ │ ├── modules.d │ │ ├── docker.yml │ │ └── system.yml │ │ └── metricbeat.yml ├── filebeat │ ├── Dockerfile │ └── etc │ │ └── filebeat.yml ├── db │ ├── sql │ │ ├── ASN.sql │ │ ├── uniquevulns.sql │ │ └── filters.sql │ └── Dockerfile ├── wazuh │ ├── Dockerfile │ ├── wazuh.yml │ └── config │ │ └── entrypoint.sh ├── nginx │ └── Dockerfile ├── kibana │ ├── entry.sh │ ├── etc │ │ └── kibana.yml │ └── Dockerfile ├── wiki │ ├── Dockerfile │ └── config.ru └── spiderfoot │ └── Dockerfile ├── config ├── etc │ ├── etc_files │ │ ├── hostname │ │ ├── mail.rc │ │ └── environment │ ├── modules.conf │ ├── logstash │ │ ├── beat_add_clientIP.conf │ │ ├── 20-preBOX4s.conf │ │ ├── BOX4s-special.conf │ │ ├── output.conf │ │ └── dns_resolv.conf │ ├── systemd │ │ └── box4security.service │ └── network │ │ └── interfaces ├── secrets │ ├── wazuh.conf │ ├── openvas.conf │ ├── web.conf │ ├── db.conf │ ├── secrets.conf │ ├── smtp.conf │ └── msmtprc ├── crontab │ └── amadmin.crontab ├── ssl │ └── box4security-ssl.conf └── dashboards │ └── Patterns │ └── scores.ndjson ├── .gitattributes ├── docs └── box4security.png ├── .gitignore ├── Makefile └── CHANGELOG.md /VERSION: -------------------------------------------------------------------------------- 1 | VERSION=0.0.5 2 | -------------------------------------------------------------------------------- /scripts/System_Scripts/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker/suricata/.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | -------------------------------------------------------------------------------- /config/etc/etc_files/hostname: -------------------------------------------------------------------------------- 1 | box4security 2 | -------------------------------------------------------------------------------- /docker/dnsmasq/resolv.personal: -------------------------------------------------------------------------------- 1 | nameserver 8.8.8.8 2 | -------------------------------------------------------------------------------- /docker/logstash/.env.ls: -------------------------------------------------------------------------------- 1 | ES_JAVA_OPTS=-Xms4g -Xmx4g 2 | -------------------------------------------------------------------------------- /docker/web/source/static/wizard.js: -------------------------------------------------------------------------------- 1 | // Networks 2 | 3 | -------------------------------------------------------------------------------- /config/etc/modules.conf: -------------------------------------------------------------------------------- 1 | BOX4s_WAZUH=false 2 | BOX4s_INCMAN=false -------------------------------------------------------------------------------- /docker/elasticsearch/.env.es: -------------------------------------------------------------------------------- 1 | ES_JAVA_OPTS=-Xms8g -Xmx8g 2 | -------------------------------------------------------------------------------- /docker/web/tests/updatestatus.json: -------------------------------------------------------------------------------- 1 | {"status":"running"} 2 | -------------------------------------------------------------------------------- /config/etc/etc_files/mail.rc: -------------------------------------------------------------------------------- 1 | set sendmail="/usr/bin/msmtp -t" 2 | -------------------------------------------------------------------------------- /config/secrets/wazuh.conf: -------------------------------------------------------------------------------- 1 | WAZUH_USER=box4s 2 | WAZUH_PASS=wa3hz0hPW -------------------------------------------------------------------------------- /docker/web/source/wizard/__init__.py: -------------------------------------------------------------------------------- 1 | from .views import bpWizard 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | SuppressAlarms.mp4 filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /config/secrets/openvas.conf: -------------------------------------------------------------------------------- 1 | OPENVAS_USER=amadmin 2 | OPENVAS_PASS=CHANGEME -------------------------------------------------------------------------------- /docker/suricata/etc/update.yaml: -------------------------------------------------------------------------------- 1 | reload-command: suricatasc -c ruleset-reload-nonblocking 2 | -------------------------------------------------------------------------------- /docker/elastalert/etc/elastalert/smtp_auth_file.yaml: -------------------------------------------------------------------------------- 1 | user: "box@4sconsult.de" 2 | password: "CHANGEME" -------------------------------------------------------------------------------- /docker/web/source/templates/user/_authorized_base.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_common_base.html' %} 2 | -------------------------------------------------------------------------------- /docker/web/web.env: -------------------------------------------------------------------------------- 1 | FLASK_APP=source/__init__.py 2 | FLASK_ENV=production 3 | APP_FOLDER=/home/app/web -------------------------------------------------------------------------------- /docs/box4security.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docs/box4security.png -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/openvas/10_openvas_input.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => openvas_pipe } } 2 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/base_subject.txt: -------------------------------------------------------------------------------- 1 | {{ app_name }} - {% block subject %}{% endblock %} -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/heartbeat/10_heartbeat_input.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => heartbeat_pipe } } 2 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/suricata/10_input_suricata.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => suricata_pipe } } 2 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/metricbeat/10_metricbeat_input.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => metricbeat_pipe } } 2 | -------------------------------------------------------------------------------- /config/secrets/web.conf: -------------------------------------------------------------------------------- 1 | # Flask 2 | SECRET_KEY=CHANGEME 3 | DATABASE_URL=postgresql://postgres:CHANGEME@db:5432/box4S_db -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/suricata/99_output_pipe.conf: -------------------------------------------------------------------------------- 1 | output { 2 | pipeline { send_to => [suricata_esoutput] } 3 | } 4 | -------------------------------------------------------------------------------- /docker/openvas/scripts/vulnwhisp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | vuln_whisperer -c /etc/vulnwhisperer/vulnwhisperer.ini -s openvas -F 3 | -------------------------------------------------------------------------------- /docker/web/source/static/faq/nav.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/nav.png -------------------------------------------------------------------------------- /docker/web/source/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/favicon.ico -------------------------------------------------------------------------------- /docker/web/source/templates/application/elastalert_smtp.yaml.j2: -------------------------------------------------------------------------------- 1 | user: "{{ smtp['username'] }}" 2 | password: "{{ smtp['password'] }}" -------------------------------------------------------------------------------- /config/etc/logstash/beat_add_clientIP.conf: -------------------------------------------------------------------------------- 1 | filter 2 | { mutate { copy => {"[@metadata][ip_address]" => "[client][ip]"}} 3 | 4 | } 5 | -------------------------------------------------------------------------------- /docker/web/source/static/Box4S_Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/Box4S_Logo.png -------------------------------------------------------------------------------- /docker/web/source/static/faq/pinfilter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/pinfilter.jpg -------------------------------------------------------------------------------- /docker/web/source/static/external/icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/icons.woff -------------------------------------------------------------------------------- /docker/web/source/static/faq/filtersource.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/filtersource.jpg -------------------------------------------------------------------------------- /config/secrets/db.conf: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=postgres 2 | POSTGRES_PASSWORD=CHANGEME 3 | POSTGRES_DB=box4S_db 4 | POSTGRES_HOST=localhost 5 | POSTGRES_PORT=5432 -------------------------------------------------------------------------------- /docker/heartbeat/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/beats/heartbeat:7.9.0 2 | 3 | COPY --chown=heartbeat:heartbeat etc/* /usr/share/heartbeat/config/ 4 | -------------------------------------------------------------------------------- /docker/web/source/static/faq/appliedfilters.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/appliedfilters.jpg -------------------------------------------------------------------------------- /docker/web/source/static/faq/filtercategory.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/filtercategory.jpg -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /docker/web/source/wazuh/*.deb 2 | /docker/web/source/wazuh/*.pkg 3 | /docker/web/source/wazuh/*.rpm 4 | /docker/web/source/wazuh/*.msi 5 | .vscode/ -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/insert_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "score_type": "%1", 3 | "value": %2, 4 | "timestamp": %3 5 | } 6 | -------------------------------------------------------------------------------- /docker/web/source/static/faq/filtersignature.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/faq/filtersignature.jpg -------------------------------------------------------------------------------- /docker/web/main.py: -------------------------------------------------------------------------------- 1 | from flask.cli import FlaskGroup 2 | from source import app 3 | 4 | cli = FlaskGroup(app) 5 | 6 | if __name__ == '__main__': 7 | cli() 8 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/invite_user_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}Account{% endblock %} 4 | -------------------------------------------------------------------------------- /docker/metricbeat/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/beats/metricbeat:7.9.0 2 | 3 | ADD etc/ /tmp/etc/ 4 | USER root 5 | RUN cp -R /tmp/etc/* /usr/share/metricbeat/ 6 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/heartbeat/99_output.conf: -------------------------------------------------------------------------------- 1 | output { 2 | pipeline { send_to => [heartbeat_esoutput] } 3 | #pipeline { send_to => [heartbeat_sqloutput] } 4 | } 5 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/metricbeat/100_output.conf: -------------------------------------------------------------------------------- 1 | output { 2 | pipeline { send_to => [metricbeat_esoutput] } 3 | #pipeline { send_to => [heartbeat_sqloutput] } 4 | } 5 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/confirm_email_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}E-Mail Bestätigung{% endblock %} 4 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/reset_password_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}Passwort zurücksetzen{% endblock %} -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/password_changed_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}Ihr Passwort wurde geändert{% endblock %} -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6uyw4BMUTPHjx4wXg.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6uyw4BMUTPHjx4wXg.woff2 -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/username_changed_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}Ihr Benutzername wurde geändert{% endblock %} -------------------------------------------------------------------------------- /config/crontab/amadmin.crontab: -------------------------------------------------------------------------------- 1 | MAILTO=box@4sconsult.de 2 | # m h dom mon dow command 3 | 4 | # Check Disk Space Daily 5 | @daily $BASEDIR/$GITDIR/scripts/Automation/diskspacealert.sh 6 | -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u8w4BMUTPHjxsAXC-q.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u8w4BMUTPHjxsAXC-q.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6uyw4BMUTPHjxAwXjeu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6uyw4BMUTPHjxAwXjeu.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/faq/SuppressAlarms.mp4: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:fd9177e28bb5479f0443575d8b6cdbd635e0ffc6a3401b29913a0702586136fa 3 | size 52070607 4 | -------------------------------------------------------------------------------- /docker/web/tests/validBPF.json: -------------------------------------------------------------------------------- 1 | { 2 | "src_port": 80, 3 | "src_ip": "127.0.0.1", 4 | "dst_ip": "0.0.0.0", 5 | "dst_port": 0, 6 | "proto": "" 7 | } 8 | -------------------------------------------------------------------------------- /config/secrets/secrets.conf: -------------------------------------------------------------------------------- 1 | # IP2LOCATION Token, get yours at https://ip2location.com 2 | IP2TOKEN="GET_ME_FROM_IP2LOCATION.COM" 3 | # The username on the host machine 4 | HOST_USER="amadmin" 5 | -------------------------------------------------------------------------------- /docker/filebeat/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/beats/filebeat:7.9.0 2 | USER root 3 | COPY --chown=root:root etc/* /usr/share/filebeat/ 4 | RUN chmod go-w /usr/share/filebeat/filebeat.yml 5 | -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u8w4BMUTPHjxsAUi-qJCY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u8w4BMUTPHjxsAUi-qJCY.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u9w4BMUTPHh6UVSwaPGR_p.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u9w4BMUTPHh6UVSwaPGR_p.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u9w4BMUTPHh6UVSwiPGQ.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u9w4BMUTPHh6UVSwiPGQ.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u_w4BMUTPHjxsI5wq_Gwft.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u_w4BMUTPHjxsI5wq_Gwft.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/icons.eot -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/icons.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/icons.otf -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/icons.ttf -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/icons.woff -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/images/flags.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/images/flags.png -------------------------------------------------------------------------------- /config/secrets/smtp.conf: -------------------------------------------------------------------------------- 1 | # MAIL 2 | MAIL_SERVER=smtp.office365.com 3 | MAIL_PORT=587 4 | MAIL_USE_TLS=True 5 | MAIL_USERNAME=box@4sconsult.de 6 | MAIL_PASSWORD=CHANGEME 7 | MAIL_DEFAULT_SENDER=box@4sconsult.de -------------------------------------------------------------------------------- /docker/web/source/static/external/fonts/S6u_w4BMUTPHjxsI5wq_FQft1dw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/fonts/S6u_w4BMUTPHjxsI5wq_FQft1dw.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/icons.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/icons.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/brand-icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/brand-icons.eot -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/brand-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/brand-icons.ttf -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/logstash/drop.jinja2: -------------------------------------------------------------------------------- 1 | if [source][ip] in {{iplist}} 2 | { 3 | drop { } 4 | } 5 | if [destination][ip] in {{iplist}} 6 | { 7 | drop { } 8 | } 9 | -------------------------------------------------------------------------------- /docker/db/sql/ASN.sql: -------------------------------------------------------------------------------- 1 | CREATE table asn (range_start INET,range_end INET, AS_number VARCHAR(10) ,country_code VARCHAR(7),AS_description VARCHAR(250)); 2 | COPY asn FROM '/tmp/ip2asn-combined.tsv' DELIMITER E'\t'; 3 | -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/brand-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/brand-icons.woff -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/brand-icons.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/brand-icons.woff2 -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/outline-icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/outline-icons.eot -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/outline-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/outline-icons.ttf -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/outline-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/outline-icons.woff -------------------------------------------------------------------------------- /docker/web/source/static/external/themes/default/assets/fonts/outline-icons.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/4sConsult/box4security/HEAD/docker/web/source/static/external/themes/default/assets/fonts/outline-icons.woff2 -------------------------------------------------------------------------------- /docker/openvas/etc/vuln_openvas.ini: -------------------------------------------------------------------------------- 1 | [openvas] 2 | enabled=true 3 | hostname=localhost 4 | port=9392 5 | write_path=/var/lib/logstash/openvas/ 6 | db_path=/var/lib/logstash/openvas/database 7 | verbose=true 8 | username= 9 | password= -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/openvas/25-openvas-special.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | mutate { 3 | #rename => {"report_date" => "timestamp"} 4 | #rename => {"timestamp" => "timestamp_found"} 5 | #remove_field => ["date", "epoch"] 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /docker/core4s/healthcheck.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | 3 | app = Flask(__name__) 4 | 5 | 6 | @app.route('/') 7 | def hello_world(): 8 | return 'UP' 9 | 10 | 11 | if __name__ == '__main__': 12 | app.run(host='0.0.0.0', port=2981) 13 | -------------------------------------------------------------------------------- /docker/wazuh/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM wazuh/wazuh:3.13.1_7.8.0 2 | COPY config/entrypoint.sh /entrypoint.sh 3 | RUN chmod 755 /entrypoint.sh 4 | HEALTHCHECK --retries=10 CMD curl -k -u ${WAZUH_USER}:${WAZUH_PASS} -XGET https://localhost:55000 && exit 0 || exit 1 5 | -------------------------------------------------------------------------------- /docker/nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx:latest 2 | RUN apt update && \ 3 | apt install -y curl && \ 4 | rm -rf /var/lib/apt/lists/* && \ 5 | rm /etc/nginx/conf.d/default.conf 6 | 7 | HEALTHCHECK --retries=10 CMD curl -sk -XGET https://localhost 8 | COPY nginx.conf /etc/nginx/conf.d 9 | -------------------------------------------------------------------------------- /docker/openvas/scripts/update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Updating OpenVAS Feed ..." 3 | greenbone-scapdata-sync --verbose 4 | greenbone-certdata-sync --verbose 5 | greenbone-nvt-sync --verbose 6 | openvas-feed-update --verbose 7 | openvasmd --update --verbose 8 | openvasmd --rebuild 9 | -------------------------------------------------------------------------------- /scripts/Automation/diskspacealert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CURRENT=$(df /data | grep /data | awk '{ print $5}' | sed 's/%//g') 3 | THRESHOLD=66 4 | 5 | if [ "$CURRENT" -gt "$THRESHOLD" ] ; then 6 | echo -e "BOX4s Festplattenspeicher bei Kunde: $KUNDE \n/data ist mit $CURRENT% belegt." 7 | fi 8 | -------------------------------------------------------------------------------- /docker/dnsmasq/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | 3 | ADD dnsmasq.conf /tmp/dnsmasq.conf 4 | 5 | RUN apk --no-cache add dnsmasq && \ 6 | cp /tmp/dnsmasq.conf /etc/dnsmasq.conf 7 | 8 | EXPOSE 53/tcp 53/udp 9 | 10 | RUN dnsmasq --test 11 | USER dnsmasq 12 | CMD ["dnsmasq", "-k"] 13 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/estransfer/metricbeat_es_transfer.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => metricbeat_esoutput } } 2 | output { 3 | elasticsearch { 4 | hosts => [ "elasticsearch:9200" ] 5 | index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}" 6 | }} 7 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/smtp.conf.j2: -------------------------------------------------------------------------------- 1 | # MAIL 2 | MAIL_SERVER={{ smtp['host'] }} 3 | MAIL_PORT={{ smtp['port'] }} 4 | MAIL_USE_TLS={{ smtp['tls'] }} 5 | MAIL_USERNAME={{ smtp['username'] }} 6 | MAIL_PASSWORD={{ smtp['password'] }} 7 | MAIL_DEFAULT_SENDER={{ smtp['senderMail'] }} 8 | -------------------------------------------------------------------------------- /config/etc/logstash/20-preBOX4s.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | if [src_ip] 3 | { 4 | mutate { 5 | copy => { "[src_ip]" => "[source][ip]" } 6 | } 7 | } 8 | if [dest_ip] 9 | { 10 | mutate { 11 | copy => { "[dest_ip]" => "[destination][ip]" } 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/estransfer/heartbeat_es_transfer.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => heartbeat_esoutput } } 2 | output { 3 | elasticsearch { 4 | hosts => [ "elasticsearch:9200" ] 5 | index => "logstash-%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}" 6 | }} 7 | -------------------------------------------------------------------------------- /docker/heartbeat/etc/monitors.d/box4s.yml: -------------------------------------------------------------------------------- 1 | # - type: http 2 | # urls: ["https://box4security:9392"] 3 | # # ssl: 4 | # #certificate_authorities: ['/etc/ca.crt'] 5 | # #supported_protocols: ["TLSv1.0", "TLSv1.1", "TLSv1.2"] 6 | # schedule: '@every 1m' 7 | # id: "Greenbone Security Manager" 8 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/registered_subject.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_subject.txt' %} 2 | 3 | {% block subject %}{% if user_manager.enable_confirm_email and not user.confirmed_at %}Bestätigen Sie Ihre E-Mail-Adresse{% else %}Vielen Dank für Ihre Registrierung{% endif %}{% endblock %} -------------------------------------------------------------------------------- /docker/web/tests/validLSR.json: -------------------------------------------------------------------------------- 1 | { 2 | "src_port": 80, 3 | "src_ip": "127.0.0.1", 4 | "dst_ip": "0.0.0.0", 5 | "dst_port": 0, 6 | "proto": "", 7 | "signature_id":133769, 8 | "signature":"+++ Bayern verhängt Ausgangsbeschränkungen +++" 9 | } 10 | -------------------------------------------------------------------------------- /docker/kibana/entry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | #insert wazuh startup 4 | echo -e "hosts:\n - default:\n url: https://${INT_IP}\n port: 55000\n user: ${WAZUH_USER}\n password: ${WAZUH_PASS}\n" > /usr/share/kibana/optimize/wazuh/config/wazuh.yml 5 | #make sure container does not restart 6 | exec "$@" 7 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/base_message.txt: -------------------------------------------------------------------------------- 1 | {% if user.first_name and user.last_name -%} 2 | Sehr geehrte*r {{user.first_name}} {{user.last_name}}, 3 | {% else -%} 4 | Guten Tag, 5 | {% endif %} 6 | {% block message %} 7 | {% endblock %} 8 | 9 | Mit freundlichen Grüßen, 10 | 4sConsult GmbH 11 | -------------------------------------------------------------------------------- /config/secrets/msmtprc: -------------------------------------------------------------------------------- 1 | defaults 2 | tls on 3 | auth on 4 | tls_trust_file /etc/ssl/certs/ca-certificates.crt 5 | aliases /etc/aliases 6 | account box@4sconsult.de 7 | port 587 8 | host smtp.office365.com 9 | from box@4sconsult.de 10 | user box@4sconsult.de 11 | password CHANGEME 12 | account default : box@4sconsult.de 13 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/base_message.html: -------------------------------------------------------------------------------- 1 | {% if user.first_name and user.last_name -%} 2 |

Sehr geehrte*r {{user.first_name}} {{user.last_name}},

3 | {% else -%} 4 |

Guten Tag,

5 | {% endif %} 6 |
7 | {% block message %} 8 | {% endblock %} 9 | 10 |

Mit freundlichen Grüßen,
11 | 4sConsult GmbH

12 | -------------------------------------------------------------------------------- /docker/web/source/extensions.py: -------------------------------------------------------------------------------- 1 | """The module holding and initializing all Flask extensions.""" 2 | from flask_sqlalchemy import SQLAlchemy 3 | from flask_marshmallow import Marshmallow 4 | from flask_mail import Mail 5 | from flask_migrate import Migrate 6 | 7 | db = SQLAlchemy() 8 | ma = Marshmallow() 9 | mail = Mail() 10 | migrate = Migrate() 11 | -------------------------------------------------------------------------------- /scripts/System_Scripts/listenNamedPipe.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | pipe=/var/lib/box4s/web.pipe 3 | [ -p "$pipe" ] || mkfifo -m 0600 "$pipe" || exit 1 4 | while :; do 5 | while read -r cmd; do 6 | if [ "$cmd" ]; then 7 | printf 'From web container got: %s ...\n' "$cmd" 8 | bash -c "$cmd" sh 9 | fi 10 | done <"$pipe" 11 | done -------------------------------------------------------------------------------- /docker/web/source/templates/application/alert_email_conf.yaml.j2: -------------------------------------------------------------------------------- 1 | alert: 2 | - email 3 | email: 4 | - "{{ target }}" 5 | from_addr: "{{ smtp['senderMail'] }}" 6 | smtp_host: "{{ smtp['host'] }}" 7 | smtp_port: "{{ smtp['port'] }}" 8 | smtp_ssl: {{ smtp['tls'] }} 9 | smtp_auth_file: "/opt/elastalert/smtp_auth_file.yaml" 10 | ca_certs: "/etc/ssl/certs/ca-certificates.crt" -------------------------------------------------------------------------------- /docker/web/source/templates/application/alert_frequency.yaml.j2: -------------------------------------------------------------------------------- 1 | {% extends ./alert_base.yaml.j2 %} 2 | 3 | # (Required, frequency specific) 4 | # Alert when this many documents matching the query occur within a timeframe 5 | num_events: 50 6 | 7 | # (Required, frequency specific) 8 | # num_events must occur within this amount of time to trigger an alert 9 | timeframe: 10 | hours: 4 11 | -------------------------------------------------------------------------------- /docker/wiki/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ruby:latest 2 | RUN apt-get clean && \ 3 | apt-get -y update && \ 4 | apt-get -y install libicu-dev cmake && \ 5 | rm -rf /var/lib/apt/lists/* && \ 6 | gem install github-linguist && \ 7 | gem install rack execjs therubyracer && \ 8 | gem install gollum:5.0.1 && \ 9 | gem install org-ruby 10 | 11 | WORKDIR /wiki 12 | CMD ["rackup"] 13 | EXPOSE 80 14 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/estransfer/openvas_es_transfer.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => openvas_esoutput } } 2 | output { 3 | # stdout { codec => rubydebug } 4 | elasticsearch { 5 | hosts => [ "elasticsearch:9200" ] 6 | index => "logstash-vulnwhisperer-%{+YYYY.MM}" 7 | template => "/etc/logstash/BOX4s/logstash-vulnwhisperer-template_elk7.json" 8 | template_overwrite => "true" 9 | }} 10 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/username_changed_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 |

Ihr Benutzername wurde geändert.

5 |

Wenn Sie die Änderung Ihres Benutzernamens nicht initiiert haben, bitte melden Sie sich an (mit Ihrer E-Mail-Adresse) und ändern Sie Ihr Passwort.

6 | {% endblock %} 7 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/username_changed_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.txt' %} 2 | 3 | {% block message %} 4 | Ihr Benutzername wurde geändert. 5 | 6 | Wenn Sie die Änderung Ihres Benutzernamens nicht initiiert haben, bitte melden Sie sich an (mit Ihrer E-Mail-Adresse) und ändern Sie Ihr Passwort. 7 | {{ url_for('user.login', _external=True) }} 8 | {% endblock %} 9 | 10 | 11 | -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/logstash/netplan.yaml.jinja2: -------------------------------------------------------------------------------- 1 | network: 2 | version: 2 3 | renderer: networkd 4 | ethernets: 5 | ens160: 6 | addresses: 7 | - {{BOX4s.ip_address}}/{{BOX4s.network.cidr}} 8 | dhcp4: {% if BOX4s.dhcp_enabled %}yes{% else %}no{% endif %} 9 | gateway4: {{BOX4s.gateway.ip_address}} 10 | nameservers: 11 | addresses: [{{BOX4s.dns.ip_address}}] 12 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/password_changed_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 |

Ihr Passwort wurde geändert.

5 | {% if user_manager.USER_ENABLE_FORGOT_PASSWORD %} 6 |

Falls Sie die Änderung Ihres Passworts nicht initiiert haben, klicken Sie hier um dies zurückzusetzen.

7 | {% endif %} 8 | {% endblock %} -------------------------------------------------------------------------------- /config/etc/etc_files/environment: -------------------------------------------------------------------------------- 1 | BASEDIR=/home/amadmin/ 2 | GITDIR=/box4s/ 3 | KUNDE="NEWSYSTEM" 4 | 5 | PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" 6 | BOX4SPATH="$BASEDIR$GITDIR" 7 | PYTHONPATH="/home/amadmin/.local/lib/python2.7/site-packages/" 8 | ES_TMPDIR=/tmp/elasticsearch 9 | 10 | IPADRESS=$(landscape-sysinfo --sysinfo-plugins=Network | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b") 11 | 12 | COMPANY_NAME=$KUNDE 13 | -------------------------------------------------------------------------------- /docker/elasticsearch/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/elasticsearch/elasticsearch:7.9.0 2 | 3 | RUN mkdir -p /var/log/elasticsearch/ && \ 4 | mkdir -p /data && \ 5 | touch /var/log/elasticsearch/gc.log && \ 6 | chown elasticsearch:elasticsearch -R /var/log/elasticsearch/ 7 | COPY --chown=elasticsearch:elasticsearch etc/* /usr/share/elasticsearch/config/ 8 | 9 | HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9200/_cat/health' 10 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/reset_password_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.txt' %} 2 | 3 | {% block message %} 4 | Wir haben Ihre Anfrage zur Passwortzurücksetzung erhalten. 5 | 6 | Wenn Sie diese Anfrage initiiert haben, dann klicken Sie auf den nachstehenden Link: 7 | {{ reset_password_link }} 8 | 9 | Falls Sie diese Registrierung nicht initiiert haben, können Sie diese E-Mail ignorieren. 10 | 11 | {% endblock %} -------------------------------------------------------------------------------- /config/etc/logstash/BOX4s-special.conf: -------------------------------------------------------------------------------- 1 | filter 2 | { 3 | if [event][subtype] != "stats" { 4 | # dont do this for suricata stats 5 | 6 | mutate { 7 | add_field => { "company" => "${KUNDE}" } 8 | } 9 | 10 | # Drop if no track 11 | # {! PLACEHOLDER DROP !} 12 | 13 | 14 | # Classify assets by subnet 15 | # {! PLACEHOLDER CIDR !} 16 | 17 | # Classify asset by ip to type 18 | # {! PLACEHOLDER IP !} 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /config/etc/logstash/output.conf: -------------------------------------------------------------------------------- 1 | output { 2 | 3 | # if "nmap" in [tags] { 4 | # elasticsearch { 5 | # # Nmap data usually isn't too bad, so monthly rotation should be fine 6 | # index => "logstash-nmap-%{+YYYY.MM}" 7 | # } 8 | #}else { 9 | 10 | elasticsearch { 11 | hosts => [ "elasticsearch:9200" ] 12 | index => "logstash-%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}" 13 | } 14 | #} 15 | } 16 | -------------------------------------------------------------------------------- /docker/openvas/scripts/insertconfig.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CONFIG=/etc/openvas/4s-OpenVAS.xml 3 | LOCK=/data/imported_4sConsult_Config 4 | if [ ! -f "$LOCK" ]; then 5 | python3 -m venv .venv-openvas 6 | source .venv-openvas/bin/activate 7 | pip install python-gvm 8 | python3 /root/config.py 9 | deactivate 10 | echo "OpenVAS Config Full and Fast without Default Account Check and Bruteforce imported." 11 | rm -r .venv-openvas 12 | touch $LOCK 13 | fi 14 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/password_changed_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.txt' %} 2 | 3 | {% block message %} 4 | Ihr Passwort wurde geändert. 5 | 6 | {% if user_manager.USER_ENABLE_FORGOT_PASSWORD -%} 7 | Falls Sie die Änderung Ihres Passworts nicht initiiert haben, klicken Sie auf den nachstehenden Link um dies zurückzusetzen. 8 | {{ url_for('user.forgot_password', _external=True) }} 9 | {% endif -%} 10 | {% endblock %} 11 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/confirm_email_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.txt' %} 2 | 3 | {% block message %} 4 | Vor der Nutzung der {{ app_name }} muss Ihre E-Mail-Adresse bestätigt werden. 5 | 6 | Wenn Sie diese Bestätigung initiiert haben, dann klicken Sie auf den nachstehenden Link: 7 | {{ confirm_email_link }} 8 | 9 | Falls Sie diese Bestätigung nicht initiiert haben, können Sie diese E-Mail ignorieren. 10 | {% endblock %} 11 | -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/logstash/BOX4s-special.conf.jinja2: -------------------------------------------------------------------------------- 1 | filter 2 | { 3 | if [event][subtype] != "stats" { 4 | # dont do this for suricata stats 5 | 6 | mutate { 7 | add_field => { "company" => "${KUNDE}" } 8 | } 9 | 10 | # Drop if no track 11 | {{templateDrop}} 12 | 13 | 14 | # Classify assets by subnet 15 | {{templateNetworks}} 16 | 17 | # Classify asset by ip to type 18 | {{templateSystems}} 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /scripts/Automation/agent-upgrade.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests 3 | import os 4 | 5 | # TODO: Updatescript for wazuh agents 6 | # https://documentation.wazuh.com/3.12/user-manual/agents/remote-upgrading/upgrading-agent.html 7 | url = "http://wazuh:55000/agents/outdated?pretty" 8 | headers = {'content-type': 'application/json', 'Accept-Charset': 'UTF-8'} 9 | r = requests.get(url, headers=headers, auth=(os.getenv('WAZUH_USER'), os.getenv('WAZUH_PASS'))) 10 | r.json() 11 | -------------------------------------------------------------------------------- /docker/core4s/curator/curator.yml: -------------------------------------------------------------------------------- 1 | client: 2 | hosts: 3 | - elasticsearch 4 | port: 9200 5 | url_prefix: 6 | use_ssl: False 7 | certificate: 8 | client_cert: 9 | client_key: 10 | ssl_no_validate: False 11 | http_auth: 12 | timeout: 30 13 | master_only: False 14 | logging: 15 | loglevel: INFO 16 | logfile: 17 | logformat: default 18 | blacklist: 19 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/invite_user.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_authorized_base.html' %} 2 | 3 | {% block content %} 4 | {% from "flask_user/_macros.html" import render_field, render_submit_field %} 5 |

{%trans%}Invite User{%endtrans%}

6 | 7 |
8 | {{ form.hidden_tag() }} 9 | {{ render_field(form.email, tabindex=10) }} 10 | {{ render_submit_field(form.submit, tabindex=90) }} 11 |
12 | 13 | {% endblock %} 14 | -------------------------------------------------------------------------------- /scripts/System_Scripts/GenCert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Creates self signed certificate in a interactive session using 3 | # default values from box4security-ssl.conf 4 | # Key is without a passphrase 5 | openssl req -config ../../config/ssl/box4security-ssl.conf -new -x509 -sha256 -newkey rsa:4096 -nodes -keyout ../../config/secrets/box4security.key.pem -days 365 -out ../../config/ssl/box4security.cert.pem 6 | 7 | chmod 600 ../../config/secrets/box4security.key.pem 8 | chmod 644 ../../config/ssl/box4security.cert.pem 9 | -------------------------------------------------------------------------------- /docker/openvas/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM securecompliance/gvm:20.08-v1 2 | 3 | ADD etc/ /etc/openvas/ 4 | ADD scripts/ /root/ 5 | 6 | RUN apt-get update && \ 7 | apt-get install -y python3-venv && \ 8 | rm -rf /var/cache/apk/* && \ 9 | chmod +x /root/insertconfig.sh && \ 10 | chmod +x /root/start.sh && \ 11 | chmod +x /root/update.sh && \ 12 | chmod +x /root/vulnwhisp.sh 13 | 14 | EXPOSE 9392 15 | EXPOSE 9390 16 | HEALTHCHECK --retries=10 CMD curl -sLk -XGET 'https://127.0.0.1:9392' 17 | CMD /root/start.sh 18 | -------------------------------------------------------------------------------- /docker/elastalert/etc/elastalert-server/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "appName": "elastalert-server", 3 | "port": 3030, 4 | "wsport": 3333, 5 | "elastalertPath": "/opt/elastalert", 6 | "verbose": false, 7 | "es_debug": false, 8 | "debug": false, 9 | "rulesPath": { 10 | "relative": true, 11 | "path": "/rules" 12 | }, 13 | "templatesPath": { 14 | "relative": true, 15 | "path": "/rule_templates" 16 | }, 17 | "es_host": "elasticsearch", 18 | "es_port": 9200, 19 | "writeback_index": "elastalert_status" 20 | } 21 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/reset_password_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 | 5 |

Wir haben Ihre Anfrage zur Passwortzurücksetzung erhalten.

6 | 7 |

Wenn Sie diese Anfrage initiiert haben, dann klicken Sie auf den nachstehenden Link:
8 |     Passwort zurücksetzen.

9 | 10 |

Falls Sie diese Registrierung nicht initiiert haben, können Sie diese E-Mail ignorieren.

11 | 12 | {% endblock %} -------------------------------------------------------------------------------- /docker/web/source/templates/application/msmtprc.j2: -------------------------------------------------------------------------------- 1 | defaults 2 | tls {% if smtp['tls'] or smtp['port'] == 587 %}on{% else %}off{% endif %} 3 | {% if not smtp['tls'] and smtp['port'] == 587 %}tls_starttls on{% endif -%} 4 | auth on 5 | tls_trust_file /etc/ssl/certs/ca-certificates.crt 6 | aliases /etc/aliases 7 | account {{ smtp['senderMail'] }} 8 | port {{ smtp['port'] }} 9 | host {{ smtp['host'] }} 10 | from {{ smtp['senderMail'] }} 11 | user {{ smtp['username'] }} 12 | password {{ smtp['password'] }} 13 | account default : {{ smtp['senderMail'] }} 14 | -------------------------------------------------------------------------------- /config/etc/systemd/box4security.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=box4security 3 | Requires=docker.service 4 | After=docker.service 5 | 6 | [Service] 7 | Restart=always 8 | RestartSec=5 9 | TimeoutSec=infinity 10 | # Pass environmentfile to service 11 | EnvironmentFile=/etc/environment 12 | EnvironmentFile=/etc/box4s/modules.conf 13 | 14 | # Compose Box4Security up 15 | ExecStart=/usr/bin/box4s/box4s_service.sh up 16 | 17 | # Compose Box4Security down 18 | ExecStop=/usr/bin/box4s/box4s_service.sh down 19 | 20 | [Install] 21 | WantedBy=multi-user.target 22 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/confirm_email_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 |

Vor der Nutzung der {{ app_name }} muss Ihre E-Mail-Adresse bestätigt werden.

5 | 6 |

Wenn Sie diese Bestätigung initiiert haben, dann klicken Sie auf den nachstehenden Link:
7 |     E-Mail-Adresse bestätigen.

8 | 9 |

Falls Sie diese Bestätigung nicht initiiert haben, können Sie diese E-Mail ignorieren.

10 | 11 | {% endblock %} 12 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/invite_user_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 | Ihnen wurde ein Account für die BOX4security angelegt. 5 | 6 | Verwenden Sie zum Login bitte: 7 | E-Mail: {{ user.email }} 8 | Passwort: {{ user_pass }} 9 | 10 | Bitte beachten Sie, dass es sich bei dem Kennwort um ein automatisch generiertes Kennwort handelt. Sie sollten dieses so schnell wie möglich ändern. 11 | Eine Änderung ist hier möglich: {{ url_for('user.change_password',_external=True) }} 12 | {% endblock %} 13 | -------------------------------------------------------------------------------- /docker/openvas/scripts/config.py: -------------------------------------------------------------------------------- 1 | from gvm.connections import TLSConnection 2 | from gvm.protocols.gmp import Gmp 3 | from gvm.transforms import EtreeTransform 4 | from gvm.xml import pretty_print 5 | import os 6 | 7 | conn = TLSConnection() 8 | transform = EtreeTransform() 9 | 10 | with Gmp(conn, transform=transform) as gmp: 11 | # Login 12 | gmp.authenticate(os.getenv('OPENVAS_USER'), os.getenv('OPENVAS_PASS')) 13 | with open('/etc/openvas/4s-OpenVAS.xml', 'r') as fxml: 14 | xml_string = fxml.read() 15 | gmp.import_config(xml_string) 16 | -------------------------------------------------------------------------------- /docker/wazuh/wazuh.yml: -------------------------------------------------------------------------------- 1 | version: "2.3" 2 | services: 3 | wazuh: 4 | container_name: wazuh 5 | image: 4sconsult/wazuh:0.0.5 6 | build: . 7 | hostname: wazuh-manager 8 | restart: always 9 | ports: 10 | - "1514:1514/udp" 11 | - "1515:1515" 12 | - "514:514/udp" 13 | - "55000:55000" 14 | volumes: 15 | - /var/lib/box4s/wazuh-authd.pass:/wazuh-config-mount/etc/authd.pass 16 | env_file: 17 | - ../../config/secrets/wazuh.conf 18 | - /etc/box4s/modules.conf 19 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/change_username.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_authorized_base.html' %} 2 | 3 | {% block content %} 4 | {% from "flask_user/_macros.html" import render_field, render_submit_field %} 5 |

{%trans%}Change username{%endtrans%}

6 | 7 |
8 | {{ form.hidden_tag() }} 9 | {{ render_field(form.new_username, tabindex=10) }} 10 | {{ render_field(form.old_password, tabindex=20) }} 11 | {{ render_submit_field(form.submit, tabindex=90) }} 12 |
13 | 14 | {% endblock %} -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/registered_message.txt: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.txt' %} 2 | 3 | {% block message %} 4 | Vielen Dank für Ihre Registrierung für die {{ app_name }}. 5 | 6 | {% if confirm_email_link -%} 7 | Für die Nutzung ist die Bestätigung Ihrer E-Mail-Adresse erforderlich. 8 | 9 | Wenn Sie diese Registrierung initiiert haben, dann klicken Sie auf den nachstehenden Link: 10 | {{ confirm_email_link }} 11 | 12 | Falls Sie diese Registrierung nicht initiiert haben, können Sie diese E-Mail ignorieren. 13 | 14 | {%- endif %} 15 | {% endblock %} -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/index_mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "score_type": { 4 | "type": "text" 5 | }, 6 | "value": { 7 | "type": "float" 8 | }, 9 | "timestamp": { 10 | "type": "date", 11 | "format": "epoch_millis" 12 | }, 13 | "rules": { 14 | "properties" : { 15 | "text":{ 16 | "type": "keyword" 17 | }, 18 | "weight":{ 19 | "type": "float" 20 | } 21 | } 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /docker/web/source/static/box4s.css: -------------------------------------------------------------------------------- 1 | .text-justify { 2 | text-align: justify; 3 | } 4 | video { 5 | width: 100% !important; 6 | height: auto !important; 7 | display: block; 8 | } 9 | 10 | .foursgrey { 11 | color: #36515A !important; 12 | 13 | } 14 | .foursgrey-bg { 15 | background-color: #36515A !important; 16 | color: white !important; 17 | } 18 | 19 | small.helper { 20 | color: #36515A !important; 21 | } 22 | 23 | .ui.action.input.cert input[type="file"] { 24 | display: none; 25 | } 26 | 27 | .foursfooter { 28 | border-bottom: 0 !important; 29 | } -------------------------------------------------------------------------------- /docker/db/Dockerfile: -------------------------------------------------------------------------------- 1 | # vim:set ft=dockerfile: 2 | FROM postgres:latest 3 | 4 | # Official Postgres docker image will run .sql scripts found in this folder 5 | COPY sql/* /docker-entrypoint-initdb.d/ 6 | 7 | # Initial IP2ASN setup 8 | RUN apt update && \ 9 | apt install -y curl gzip && \ 10 | rm -rf /var/lib/apt/lists/* && \ 11 | curl https://iptoasn.com/data/ip2asn-combined.tsv.gz -o /tmp/ip2asn-combined.tsv.gz && \ 12 | gunzip -f /tmp/ip2asn-combined.tsv.gz 13 | 14 | # From official Postgres Image 15 | ENTRYPOINT ["docker-entrypoint.sh"] 16 | 17 | EXPOSE 5432 18 | CMD ["postgres"] 19 | -------------------------------------------------------------------------------- /docker/web/source/templates/spiderfoot.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 | {# Default content: render the page as iframe #} 4 | {# Overwrite this by including block content in a derived template, like in faq.html #} 5 | 6 | {% endblock %} 7 | {% block scripts %} 8 | 12 | {% endblock %} 13 | -------------------------------------------------------------------------------- /docker/web/source/templates/dashboard.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 | {# Default content: render the page as iframe #} 4 | {# Overwrite this by including block content in a derived template, like in faq.html #} 5 | 6 | {% endblock %} 7 | {% block scripts %} 8 | 12 | {% endblock %} 13 | -------------------------------------------------------------------------------- /docker/web/source/templates/suricata_suppress.bpf.j2: -------------------------------------------------------------------------------- 1 | {# 2 | # Creates concatenated BPF rule set 3 | # for each rule from rules variable 4 | #} 5 | {% for rule in rules -%} 6 | not {% if rule.src_ip != '0.0.0.0' %}src host {{ rule.src_ip }} {% endif -%} 7 | {% if rule.src_port != 0 %}src port {{ rule.src_port }} {% endif -%} 8 | {% if rule.dst_ip != '0.0.0.0' %}dst host {{ rule.dst_ip }} {% endif -%} 9 | {% if rule.dst_port != 0 %}dst port {{ rule.dst_port }} {% endif -%} 10 | {% if rule.proto.strip() %}ip proto {{ rule.proto }} {% endif -%} 11 | {% if not loop.last %} && {% endif %} 12 | {% endfor %} 13 | -------------------------------------------------------------------------------- /scripts/System_Scripts/sendmail.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # $1 = Receipient 3 | # $2 = Subject 4 | # Body from stdin 5 | BODY="" 6 | while read LINE; do 7 | if [[ ! -z $LINE ]]; then 8 | BODY="$BODY\n$LINE" 9 | fi 10 | done < /dev/stdin 11 | if [[ -z $BODY ]]; then 12 | exit 13 | fi 14 | ctr=0 15 | while [ $ctr -lt 6 ]; do 16 | echo -e $BODY | sed "1 i\To:BOX4s \nSubject: [Kunde: $KUNDE] BOX4s $2\n\n" | msmtp $1 17 | retVal=$? 18 | if [ $retVal -eq 0 ]; then 19 | break 20 | fi 21 | ctr=$[$ctr+1] 22 | # sleep a bit 23 | sleep ${ctr}m 24 | done 25 | -------------------------------------------------------------------------------- /docker/kibana/etc/kibana.yml: -------------------------------------------------------------------------------- 1 | #server.port: 5601 2 | server.host: "0.0.0.0" 3 | #path.data needs to be commented out for 7.9.0 4 | #path.data: "/var/lib/kibana" 5 | server.basePath: "/kibana" 6 | server.rewriteBasePath: true 7 | server.name: "box4security" 8 | elasticsearch.hosts: "http://elasticsearch:9200" 9 | xpack.canvas.enabled: false 10 | #Canvas disabled for now since it crashes kibana with wazuh enabled 11 | kibana.index: ".kibana" 12 | #server.ssl.enabled: True 13 | logging.dest: /var/log/kibana/kibana.log 14 | logging.silent: false 15 | logging.quiet: false 16 | logging.verbose: false 17 | telemetry.enabled: false 18 | -------------------------------------------------------------------------------- /config/etc/logstash/dns_resolv.conf: -------------------------------------------------------------------------------- 1 | filter { 2 | mutate { copy => { "[client][ip]" => "[client][domain]" } } 3 | mutate { copy => { "[destination][ip]" => "[destination][domain]" } } 4 | if [destination][domain] { 5 | dns { 6 | reverse => ["[destination][domain]"] 7 | action => "replace" 8 | } 9 | } 10 | if [client][domain] { 11 | dns { 12 | reverse => ["[client][domain]"] 13 | action => "replace" 14 | } 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /docker/web/source/__init__.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from source.config import Config 3 | from source.extensions import db, ma, mail, migrate 4 | from source.models import User 5 | from source.creator import CreatorUserMan 6 | from source.wizard import bpWizard 7 | 8 | app = Flask(__name__) 9 | app.config.from_object(Config) 10 | db.init_app(app) 11 | ma.init_app(app) 12 | mail.init_app(app) 13 | migrate.init_app(app, db) 14 | userman = CreatorUserMan(app, db, User) 15 | app.register_blueprint(bpWizard, url_prefix="/wizard") 16 | 17 | from . import helpers # noqa 18 | from . import routes # noqa 19 | # disable pep8 checks for this one 20 | -------------------------------------------------------------------------------- /docker/openvas/scripts/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting Authentication" 3 | export USERNAME=$OPENVAS_USER 4 | export PASSWORD=$OPENVAS_PASS 5 | 6 | # Make sure the API listens to 0.0.0.0 and is thus accessible by other containers 7 | sed -i 's/su -c "gvmd --listen=127.0.0.1 --port=9390" gvm/su -c "gvmd --listen=0.0.0.0 --port=9390" gvm/g' /start.sh 8 | 9 | # Insert our config insertion before the end of start script.. 10 | sed -i "\$i echo 'Inserting 4sConsult config ...'" /start.sh 11 | sed -i "\$i chmod +x /root/insertconfig.sh" /start.sh 12 | sed -i "\$i /root/insertconfig.sh" /start.sh 13 | 14 | echo "Starting OpenVAS" 15 | /start.sh 16 | -------------------------------------------------------------------------------- /docker/elastalert/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM bitsensor/elastalert:3.0.0-beta.0 2 | 3 | USER root 4 | ADD etc/ /root/etc/ 5 | RUN cp /root/etc/elastalert-server/config.yaml /opt/elastalert-server/config/config.yaml && \ 6 | cp /root/etc/elastalert-server/config.yaml /opt/elastalert-server/config/elastalert-test.yaml && \ 7 | cp /root/etc/elastalert-server/config.yaml /opt/elastalert-server/config/elastalert.yaml && \ 8 | cp /root/etc/elastalert-server/config.json /opt/elastalert-server/config/config.json && \ 9 | cp /root/etc/elastalert/config.yaml /opt/elastalert/config.yaml && \ 10 | cp /root/etc/elastalert/smtp_auth_file.yaml /opt/elastalert/smtp_auth_file.yaml 11 | -------------------------------------------------------------------------------- /config/etc/network/interfaces: -------------------------------------------------------------------------------- 1 | # This file describes the network interfaces available on your system 2 | # and how to activate them. For more information, see interfaces(5). 3 | 4 | source /etc/network/interfaces.d/* 5 | 6 | # The loopback network interface 7 | auto lo 8 | iface lo inet loopback 9 | 10 | # The primary network interface 11 | auto ens160 12 | iface ens160 inet dhcp 13 | 14 | auto ens192 15 | iface ens192 inet manual 16 | up ifconfig ens192 promisc up 17 | down ifconfig ens192 promisc down 18 | 19 | auto ens224 20 | iface ens224 inet manual 21 | up ifconfig ens224 promisc up 22 | down ifconfig ens224 promisc down 23 | 24 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/registered_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | 3 | {% block message %} 4 | 5 |

Vielen Dank für Ihre Registrierung für die {{ app_name }}.

6 | 7 | {% if confirm_email_link -%} 8 |

Für die Nutzung ist die Bestätigung Ihrer E-Mail-Adresse notwendig.

9 | 10 |

Wenn Sie diese Registrierung initiiert haben, dann klicken Sie auf den nachstehenden Link
11 |     E-Mail-Adresse bestätigen.

12 | 13 |

Falls Sie diese Registrierung nicht initiiert haben, können Sie diese E-Mail ignorieren.

14 | {%- endif %} 15 | 16 | {% endblock %} -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/social_media.query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "bool": { 4 | "must": [ 5 | { 6 | "term": { 7 | "alert.category.keyword": "Social Media Alerts by 4sConsult" 8 | } 9 | }, 10 | { 11 | "range": { 12 | "@timestamp": { 13 | "gte": "now-1d/d", 14 | "lte": "now" 15 | } 16 | } 17 | } 18 | ] 19 | } 20 | } 21 | } 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docker/web/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | ${upgrades if upgrades else "pass"} 22 | 23 | 24 | def downgrade(): 25 | """Downgrade to migration.""" 26 | ${downgrades if downgrades else "pass"} 27 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/alert_spike.yaml.j2: -------------------------------------------------------------------------------- 1 | {% extends ./alert_base.yaml.j2 %} 2 | 3 | # (Required, spike specific) 4 | # The size of the window used to determine average event frequency 5 | # We use two sliding windows each of size timeframe 6 | # To measure the 'reference' rate and the current rate 7 | timeframe: 8 | hours: 2 9 | 10 | # (Required, spike specific) 11 | # The spike rule matches when the current window contains spike_height times more 12 | # events than the reference window 13 | spike_height: 3 14 | 15 | # (Required, spike specific) 16 | # The direction of the spike 17 | # 'up' matches only spikes, 'down' matches only troughs 18 | # 'both' matches both spikes and troughs 19 | spike_type: "up" 20 | 21 | -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/logstash/system.jinja2: -------------------------------------------------------------------------------- 1 | {% for s in systems %} 2 | if [source][ip] == "{{s.ip_address}}" 3 | { 4 | mutate { 5 | add_field => {"[soure][host][name]" => "{{s.name}}" } 6 | {% for t in s.types %} 7 | add_field => {"[soure][host][type]" => "{{t.name}}" } 8 | {% endfor %} 9 | } 10 | } 11 | if [destination][ip] == "{{s.ip_address}}" 12 | { 13 | mutate { 14 | add_field => {"[destination][host][name]" => "{{s.name}}" } 15 | {% for t in s.types %} 16 | add_field => {"[destination][host][type]" => "{{t.name}}" } 17 | {% endfor %} 18 | } 19 | } 20 | {% endfor %} 21 | -------------------------------------------------------------------------------- /docker/suricata/scripts/update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Move Own rules to correct folder - Only do if folder not empty 4 | if find /root/var_lib -mindepth 1 | read; then 5 | mv -f /root/var_lib/* /var/lib/suricata/rules 6 | fi 7 | 8 | suricata-update update-sources 9 | suricata-update enable-source et/open 10 | suricata-update enable-source oisf/trafficid 11 | suricata-update enable-source ptresearch/attackdetection 12 | suricata-update enable-source sslbl/ssl-fp-blacklist 13 | suricata-update enable-source etnetera/aggressive 14 | suricata-update enable-source tgreen/hunting 15 | suricata-update 16 | 17 | # If this is not during install, reload the rules 18 | if [ -z "$1" ]; then 19 | suricatasc -c ruleset-reload-nonblocking; 20 | fi 21 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/resourceupdate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Updating ASN 3 | /bin/bash /core4s/scripts/Automation/ASN_update.sh 4 | 5 | # Updating Geo-IP 6 | source /core4s/config/secrets/secrets.conf 7 | cd /tmp/ 8 | curl -sL "https://www.ip2location.com/download/?token=$IP2TOKEN&file=DB5LITEBIN" -o IP2LOCATION-LITE-DB5.BIN.zip 9 | curl -sL "https://www.ip2location.com/download/?token=$IP2TOKEN&file=DB5LITEBINIPV6" -o IP2LOCATION-LITE-DB5.IPV6.BIN.zip 10 | unzip -o IP2LOCATION-LITE-DB5.BIN.zip 11 | mv -f IP2LOCATION-LITE-DB5.BIN /core4s/workfolder/var/lib/box4s/IP2LOCATION-LITE-DB5.BIN 12 | unzip -o IP2LOCATION-LITE-DB5.IPV6.BIN.zip 13 | mv -f IP2LOCATION-LITE-DB5.IPV6.BIN /core4s/workfolder/var/lib/box4s/IP2LOCATION-LITE-DB5.IPV6.BIN 14 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/index_settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings": { 3 | "number_of_shards": 1 4 | }, 5 | "mappings": { 6 | "properties": { 7 | "score_type": { 8 | "type": "text" 9 | }, 10 | "rules": { 11 | "properties" : { 12 | "text":{ 13 | "type": "keyword" 14 | }, 15 | "weight":{ 16 | "type": "float" 17 | } 18 | } 19 | }, 20 | "value": { 21 | "type": "float" 22 | }, 23 | "timestamp": { 24 | "type": "date", 25 | "format": "epoch_millis" 26 | } 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/d995a93c3a9c_box4s_dhcp_col.py: -------------------------------------------------------------------------------- 1 | """BOX4s: DHCP col 2 | 3 | Revision ID: d995a93c3a9c 4 | Revises: a59fffda1b70 5 | Create Date: 2020-10-30 07:24:16.155013 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'd995a93c3a9c' 14 | down_revision = 'a59fffda1b70' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.add_column('box4security', sa.Column('dhcp_enabled', sa.Boolean(), nullable=True)) 22 | 23 | 24 | def downgrade(): 25 | """Downgrade to migration.""" 26 | op.drop_column('box4security', 'dhcp_enabled') 27 | -------------------------------------------------------------------------------- /docker/db/sql/uniquevulns.sql: -------------------------------------------------------------------------------- 1 | CREATE SEQUENCE public.uniquevulns_vul_id_seq 2 | INCREMENT 1 3 | START 27275 4 | MINVALUE 1 5 | MAXVALUE 2147483647 6 | CACHE 1; 7 | ALTER SEQUENCE public.uniquevulns_vul_id_seq 8 | OWNER TO postgres; 9 | CREATE TABLE public.uniquevulns 10 | ( 11 | vul_id integer NOT NULL DEFAULT nextval('uniquevulns_vul_id_seq'::regclass), 12 | uniqueidentifier character varying(50) COLLATE pg_catalog."default" NOT NULL, 13 | CONSTRAINT uniquevulns_pkey PRIMARY KEY (vul_id), 14 | CONSTRAINT uniquevulns_uniqueidentifier_key UNIQUE (uniqueidentifier) 15 | 16 | ) 17 | WITH ( 18 | OIDS = FALSE 19 | ) 20 | TABLESPACE pg_default; 21 | 22 | ALTER TABLE public.uniquevulns 23 | OWNER to postgres; 24 | -------------------------------------------------------------------------------- /docker/metricbeat/etc/modules.d/docker.yml: -------------------------------------------------------------------------------- 1 | # Module: docker 2 | # Docs: https://www.elastic.co/guide/en/beats/metricbeat/7.6/metricbeat-module-docker.html 3 | 4 | - module: docker 5 | metricsets: 6 | - container 7 | - cpu 8 | - diskio 9 | - event 10 | - healthcheck 11 | - info 12 | - memory 13 | - network 14 | period: 15s 15 | hosts: ["unix:///var/run/docker.sock"] 16 | 17 | # If set to true, replace dots in labels with `_`. 18 | #labels.dedot: false 19 | 20 | # To connect to Docker over TLS you must specify a client and CA certificate. 21 | #ssl: 22 | #certificate_authority: "/etc/pki/root/ca.pem" 23 | #certificate: "/etc/pki/client/cert.pem" 24 | #key: "/etc/pki/client/cert.key" -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/wizard/verify_progress.html: -------------------------------------------------------------------------------- 1 | {% extends "wizard/base.html" %} 2 | {% block content %} 3 |
4 |
5 |
6 |
7 |
8 |
Gleich ist es soweit...
9 |
Sie haben Ihre Eingaben bestätigt. Die BOX4security wird nun eingerichtet. Bitte haben Sie einen Moment Geduld.
10 |
11 |
12 | {% endblock %} 13 | {% block scripts %} 14 | 19 | {% endblock %} 20 | -------------------------------------------------------------------------------- /scripts/1stLevelRepair/repair_restart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | # Log file to use 4 | # Create path if allowed or do NOP 5 | mkdir -p /var/log/box4s/1stLevelRepair || : 6 | LOG_DIR="/var/log/box4s/1stLevelRepair" 7 | if [[ ! -w $LOG_DIR ]]; then 8 | LOG_DIR="$HOME" 9 | fi 10 | 11 | LOG=$LOG_DIR/restart_service.log 12 | 13 | # Do not use interactive debian frontend. 14 | export DEBIAN_FRONTEND=noninteractive 15 | 16 | # Forward fd2 to the console 17 | # exec 2>&1 18 | # Forward fd1 to $LOG 19 | exec 2>&1 1>>${LOG} 20 | echo -n "Stopping BOX4security Service.. " 1>&2 21 | sudo systemctl stop box4security.service 22 | echo "[ DONE ]" 1>&2 23 | echo -n "Starting BOX4security Service.. " 1>&2 24 | sudo systemctl start box4security.service 25 | echo "[ DONE ]" 1>&2 26 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/inputs/input_pipelines.conf: -------------------------------------------------------------------------------- 1 | input { 2 | # Localhost Beats-Interface 3 | beats { 4 | id => "input_beats" 5 | client_inactivity_timeout => 180 6 | host => "127.0.0.1" 7 | port => "5044" 8 | # we dismiss ssl for transport on same machine 9 | #ssl => true 10 | #ssl_certificate => "/etc/logstash/LogstashNode.crt" 11 | #ssl_key => "/etc/logstash/LogstashNode.key" 12 | } 13 | } 14 | # Beats-Interface für andere Hosts 15 | #beats { 16 | # host => 17 | #port => "5046" 18 | # we dismiss ssl for transport on same machine 19 | #ssl => true 20 | #ssl_certificate => "/etc/logstash/LogstashNode.crt" 21 | #ssl_key => "/etc/logstash/LogstashNode.key" 22 | # 23 | output { 24 | if [id] == "input_beats" { 25 | pipeline { send_to => beats_pipe } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /docker/wazuh/config/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Wazuh Docker Copyright (C) 2020 Wazuh Inc. (License GPLv2) 3 | 4 | # It will run every .sh script located in entrypoint-scripts folder in lexicographical order 5 | for script in `ls /entrypoint-scripts/*.sh | sort -n`; do 6 | bash "$script" 7 | 8 | done 9 | 10 | ############################################################################## 11 | # Start Wazuh Server. 12 | ############################################################################## 13 | # use agent password 14 | sed -i 's/no<\/use_password>/yes<\/use_password>/g' /var/ossec/etc/ossec.conf || : 15 | #set new password 16 | cd /var/ossec/api/configuration/auth/ 17 | node htpasswd -b -c user ${WAZUH_USER} ${WAZUH_PASS} 18 | service wazuh-api restart 19 | 20 | /sbin/my_init -------------------------------------------------------------------------------- /docker/web/source/forms.py: -------------------------------------------------------------------------------- 1 | from wtforms_alchemy import ModelForm 2 | from flask_wtf import FlaskForm 3 | from wtforms import TextField, BooleanField, SelectMultipleField 4 | from source.models import User 5 | 6 | 7 | class AddUserForm(ModelForm, FlaskForm): 8 | """Add User Form.""" 9 | 10 | class Meta: 11 | """Build form from User Model. 12 | 13 | Exclude internal stuff and password. 14 | Don't set validators so the three name fields are not required. 15 | """ 16 | 17 | model = User 18 | exclude = ['active', 'email_confirmed_at', 'password'] 19 | first_name = TextField(validators=[]) 20 | last_name = TextField(validators=[]) 21 | email_copy = BooleanField() 22 | roles = SelectMultipleField( 23 | 'Rollen', 24 | coerce=int 25 | ) 26 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/emails/invite_user_message.html: -------------------------------------------------------------------------------- 1 | {% extends 'user/emails/base_message.html' %} 2 | {% block message %} 3 | Ihnen wurde ein Account für die BOX4security angelegt. 4 |
5 | Verwenden Sie zum Einloggen bitte:
6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 |
E-Mail:{{ user.email }}
Passwort: {{ user_pass }}
16 |
17 | 18 | Bitte beachten Sie, dass sich bei dem Kennwort um ein automatisch generiertes Kennwort handelt. Sie sollten dieses so schnell wie möglich ändern. 19 | Eine Änderung ist hier möglich: BOX4security: Passwort ändern 20 | {% endblock %} 21 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/6845bca64bc8_.py: -------------------------------------------------------------------------------- 1 | """Wiki 2 | 3 | Revision ID: 6845bca64bc8 4 | Revises: 1d03ea9e33bd 5 | Create Date: 2020-05-18 10:22:11.938319 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.models import Role 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '6845bca64bc8' 15 | down_revision = '1d03ea9e33bd' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | """Upgrade to migration.""" 22 | op.bulk_insert(Role.__table__, 23 | [ 24 | {'id': 11, 'name': 'Wiki', 'description': 'Freigabe für die Dokumentation'}, 25 | ]) 26 | 27 | 28 | def downgrade(): 29 | """Downgrade to migration.""" 30 | op.execute('DELETE FROM "role" WHERE id=11') 31 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/cvss_buckets.query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "range": { 4 | "@timestamp": { 5 | "gte": "now-7d/d", 6 | "lt": "now" 7 | } 8 | } 9 | }, 10 | "size": 0, 11 | "aggs": 12 | { 13 | "cvss": { 14 | "range": { 15 | "field": "cvss", 16 | "ranges": [ 17 | { "key": "critical" , "from": 7.5 }, 18 | { "key": "high" , "from": 5, "to": 7.5 }, 19 | { "key": "medium" , "from": 2.5, "to": 5 }, 20 | { "key": "low" , "to": 2.5 } 21 | ] 22 | }, 23 | "aggs": { 24 | "cvssUniqueVul": { 25 | "cardinality": {"field": "uniqueVul.keyword" } 26 | } 27 | } 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/9f79000ab53d_add_alerts.py: -------------------------------------------------------------------------------- 1 | """Add Alerts 2 | 3 | Revision ID: 9f79000ab53d 4 | Revises: 6845bca64bc8 5 | Create Date: 2020-06-19 14:51:24.689902 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.models import Role 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '9f79000ab53d' 15 | down_revision = '6845bca64bc8' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | """Upgrade to migration.""" 22 | op.bulk_insert(Role.__table__, 23 | [ 24 | {'id': 12, 'name': 'Alerts', 'description': 'Kontrolle der Alarmierungen'}, 25 | ]) 26 | 27 | 28 | def downgrade(): 29 | """Downgrade to migration.""" 30 | op.execute('DELETE FROM "role" WHERE id=12') 31 | -------------------------------------------------------------------------------- /docker/web/source/templates/user_base.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 |
4 | {# One-time system messages called Flash messages #} 5 | {% block flash_messages %} 6 | {%- with messages = get_flashed_messages(with_categories=true) -%} 7 | {% if messages %} 8 | {% for category, message in messages %} 9 |
{{ message|safe }}
10 | {% endfor %} 11 | {% endif %} 12 | {%- endwith %} 13 | {% endblock %} 14 | {% block user %}{% endblock %} 15 | {% endblock %} 16 |
17 | {% block scripts %} 18 | 22 | {% endblock %} 23 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/c2bdbad3c958_network_system_rel.py: -------------------------------------------------------------------------------- 1 | """Relation Network <-> Systems 2 | 3 | Revision ID: c2bdbad3c958 4 | Revises: b1685fc5f49c 5 | Create Date: 2020-10-27 08:26:08.007801 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c2bdbad3c958' 14 | down_revision = 'b1685fc5f49c' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.add_column('system', sa.Column('network_id', sa.Integer(), nullable=True)) 22 | op.create_foreign_key(None, 'system', 'network', ['network_id'], ['id']) 23 | 24 | 25 | def downgrade(): 26 | """Downgrade to migration.""" 27 | op.drop_constraint(None, 'system', type_='foreignkey') 28 | op.drop_column('system', 'network_id') 29 | -------------------------------------------------------------------------------- /docker/metricbeat/etc/modules.d/system.yml: -------------------------------------------------------------------------------- 1 | # Module: system 2 | # Docs: https://www.elastic.co/guide/en/beats/metricbeat/6.4/metricbeat-module-system.html 3 | 4 | - module: system 5 | period: 30s 6 | metricsets: 7 | - cpu 8 | - load 9 | - memory 10 | - network 11 | - process 12 | - process_summary 13 | - socket_summary 14 | - core 15 | - diskio 16 | - socket 17 | cpu.metrics: [normalized_percentages] 18 | process.include_top_n: 19 | by_cpu: 5 # include top 5 processes by CPU 20 | by_memory: 5 # include top 5 processes by memory 21 | 22 | - module: system 23 | period: 1m 24 | metricsets: 25 | - filesystem 26 | - fsstat 27 | processors: 28 | - drop_event.when.regexp: 29 | system.filesystem.mount_point: '^/(sys|cgroup|proc|dev|etc|host|lib)($|/)' 30 | 31 | - module: system 32 | period: 5m 33 | metricsets: 34 | - uptime 35 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/96cfbddbc495_add_role_config.py: -------------------------------------------------------------------------------- 1 | """Add Role 'Config' 2 | 3 | Revision ID: 96cfbddbc495 4 | Revises: 9f79000ab53d 5 | Create Date: 2020-07-17 12:23:24.189549 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.models import Role 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '96cfbddbc495' 15 | down_revision = '9f79000ab53d' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | """Upgrade to migration.""" 22 | op.bulk_insert(Role.__table__, 23 | [ 24 | {'id': 13, 'name': 'Config', 'description': 'Einsicht und Bearbeiten der BOX4s-Konfiguration'}, 25 | ]) 26 | 27 | 28 | def downgrade(): 29 | """Downgrade to migration.""" 30 | op.execute('DELETE FROM "role" WHERE id=13') 31 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/045ed1db87f6_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 045ed1db87f6 4 | Revises: 56e9b3f51ec8 5 | Create Date: 2020-04-22 07:57:41.568929 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '045ed1db87f6' 14 | down_revision = '56e9b3f51ec8' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.add_column('role', sa.Column('description', sa.String(length=255), nullable=True)) 23 | 24 | 25 | def downgrade(): 26 | """Downgrade to migration.""" 27 | op.drop_column('role', 'description') 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/alert_base.yaml.j2: -------------------------------------------------------------------------------- 1 | # Elasticsearch host 2 | es_host: elasticsearch 3 | 4 | # Elasticsearch port 5 | es_port: 9200 6 | 7 | # Rule name, must be unique 8 | name: {{alert.name}} 9 | 10 | # Type of rule 11 | type: {{alert.type}} 12 | 13 | # Index to search, wildcard supported 14 | index: {{alert.index}} 15 | 16 | # A list of Elasticsearch filters used for find events 17 | # These filters are joined with AND and nested in a filtered query 18 | # For more info: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl.html 19 | filter: 20 | {% for query in alert.queries %} 21 | - query: 22 | query_string: 23 | query: "{{query.field}}: {{query.value}}" 24 | {% endfor %} 25 | 26 | 27 | # The alert is used when a match is found 28 | alert: 29 | - "email" 30 | 31 | # a list of email addresses to send alerts to 32 | email: 33 | - "box@4sconsult.de" 34 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/install_index.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DIR=$(echo "/core4s/scripts/Automation/score_calculation") 4 | 5 | echo "Install the 'scores' index" 6 | # Delete an old index, which might exist, so there is no conflict 7 | curl -s -X DELETE http://elasticsearch:9200/scores > /dev/null 8 | # Create a new index called 'scores' with specific settings configured in index_settings.json. 9 | # Also apply a specific mapping, so everything stays the same all the time 10 | curl -s -H "Content-type: application/json" -X PUT http://elasticsearch:9200/scores --data-binary @$DIR/res/index_settings.json 11 | curl -s -H "Content-type: application/json" -X PUT http://elasticsearch:9200/scores/_mapping --data-binary @$DIR/res/index_mapping.json 12 | 13 | # Create an suricata index of the current month. score calculation will fail without an existing index. 14 | curl -sLkX PUT elasticsearch:9200/suricata-$(date +%Y.%m) > /dev/null 15 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/inputs/basic_input.conf: -------------------------------------------------------------------------------- 1 | input { 2 | # Localhost Beats-Interface 3 | beats { 4 | id => "input_beats" 5 | client_inactivity_timeout => 180 6 | host => "127.0.0.1" 7 | port => "5044" 8 | # we dismiss ssl for transport on same machine 9 | #ssl => true 10 | #ssl_certificate => "/etc/logstash/LogstashNode.crt" 11 | #ssl_key => "/etc/logstash/LogstashNode.key" 12 | } 13 | } 14 | # Beats-Interface für andere Hosts 15 | #beats { 16 | # host => 17 | #port => "5046" 18 | # we dismiss ssl for transport on same machine 19 | #ssl => true 20 | #ssl_certificate => "/etc/logstash/LogstashNode.crt" 21 | #ssl_key => "/etc/logstash/LogstashNode.key" 22 | # 23 | #output { 24 | #[ if [event][type] == "suricata" { 25 | # pipeline { send_to => suricata_pipe } 26 | #}else 27 | # pipeline { send_to => beats_pipe } 28 | #} 29 | # 30 | 31 | # Seti Logstash 6.2 32 | filter { 33 | mutate { 34 | remove_field => [ "host" ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/openvas/100_openvas_output_pipe.conf: -------------------------------------------------------------------------------- 1 | output{ 2 | #Schreibe isUnique als output in die Postgres 3 | if [isUnique] { 4 | jdbc { 5 | # jdbc_driver_library => "/etc/logstash/BOX4s/mysql-connector-java-8.0.17.jar" 6 | # jdbc_driver_class => "com.mysql.jdbc.Driver" 7 | # jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/Box4_db" 8 | # jdbc_user => "Box4S" 9 | # jdbc_password => "zgJnwauCAsHrR6JB*" 10 | 11 | driver_jar_path => "/usr/share/logstash/logstash-core/lib/jars/postgresql-42.2.8.jar" 12 | max_pool_size =>"1" 13 | #driver_class => "org.postgresql.Driver" 14 | connection_string => "jdbc:postgresql://db:5432/${POSTGRES_DB}" 15 | username => "${POSTGRES_USER}" 16 | password => "${POSTGRES_PASSWORD}" 17 | statement => ["INSERT INTO uniquevulns(uniqueidentifier) VALUES(?)","%{uniqueVul}"] 18 | } #jdbc 19 | } #if 20 | pipeline { send_to => ["openvas_esoutput"] } 21 | } #output 22 | -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/logstash/network.jinja2: -------------------------------------------------------------------------------- 1 | {% for n in networks %} 2 | cidr { 3 | address => [ "%{[source][ip]}" ] 4 | network => [ "{{n.ip_address}}/{{n.cidr}}" ] 5 | add_field => {"[source][network][name]" => "{{n.name}}"} 6 | add_field => {"[source][network][vlan]" => "{{n.vlan}}"} 7 | {% for t in n.types %} 8 | add_field => {"[source][network][types]" => "{{t.name}}" } 9 | {% endfor %} 10 | } 11 | cidr { 12 | address => [ "%{[destination][ip]}" ] 13 | network => [ "{{n.ip_address}}/{{n.cidr}}" ] 14 | add_field => {"[destination][network][name]" => "{{n.name}}"} 15 | add_field => {"[destination][network][vlan]" => "{{n.vlan}}"} 16 | {% for t in n.types %} 17 | add_field => {"[destination][network][types]" => "{{t.name}}" } 18 | {% endfor %} 19 | } 20 | {% endfor %} 21 | -------------------------------------------------------------------------------- /scripts/System_Scripts/wipe_part.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DEVICE="/dev/sdc1" #actually partition not DEVICE 3 | 4 | if [[ $EUID -ne 0 ]]; then 5 | # root check 6 | echo "Wipe-Prozess erfordert Root-Privilegien." 1>&2 7 | exit 1 8 | fi 9 | 10 | read -p "Are you sure to wipe the elasticsearch data on partition $DEVICE? Press [y] to continue." -n 1 -r 11 | echo 12 | 13 | if [[ $REPLY =~ ^[Yy]$ ]] 14 | #sanity check 15 | then 16 | 17 | PASS=$(tr -cd '[:alnum:]' < /dev/urandom | head -c128) 18 | # generate random key 19 | openssl enc -aes-256-ctr -pass pass:"$PASS" -nosalt ') 11 | def forbidden(e): 12 | """Handle 403 Forbidden Error.""" 13 | userRoleURLs = [] 14 | # loop over all roles 15 | for r in current_user.roles: 16 | for d in RoleURLs: 17 | if d['name'] == r.name: 18 | # and create a copy of the Role URL configuration 19 | d_copy = d.copy() 20 | # add the text description of role to the Role URL element 21 | d_copy['description'] = r.description 22 | userRoleURLs.append(d_copy) 23 | # render the 403 navigation page with the user roles, their description and URL 24 | return render_template('errors/403.html', roleURLs=userRoleURLs), 403 25 | -------------------------------------------------------------------------------- /docker/web/source/templates/errors/error_base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {% block head %} 6 | 7 | 8 | BOX4Security 9 | {% endblock %} 10 | 11 | 12 | {% block content %} 13 | {% endblock %} 14 | 15 | 16 | {% block scripts %}{% endblock %} 17 | 18 | 19 | -------------------------------------------------------------------------------- /docker/web/source/templates/docs.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 | {# One-time system messages called Flash messages #} 4 | {% block flash_messages %} 5 | {%- with messages = get_flashed_messages(with_categories=true) -%} 6 | {% if messages %} 7 | {% for category, message in messages %} 8 |
{{ message|safe }}
9 | {% endfor %} 10 | {% endif %} 11 | {%- endwith %} 12 | {% endblock %} 13 | {# Default content: render the page as iframe #} 14 | {# Overwrite this by including block content in a derived template, like in faq.html #} 15 | 16 | {% endblock %} 17 | {% block scripts %} 18 | 22 | {% endblock %} 23 | -------------------------------------------------------------------------------- /docker/web/requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.4.2 2 | aniso8601==8.0.0 3 | bcrypt==3.1.7 4 | blinker==1.4 5 | certifi==2020.6.20 6 | cffi==1.14.0 7 | chardet==3.0.4 8 | click==7.1.2 9 | cryptography==2.9.2 10 | decorator==4.4.2 11 | dnspython==1.16.0 12 | email-validator==1.1.1 13 | Flask==1.1.2 14 | Flask-Login==0.5.0 15 | Flask-Mail==0.9.1 16 | flask-marshmallow==0.13.0 17 | Flask-Migrate==2.5.3 18 | Flask-RESTful==0.3.8 19 | Flask-SQLAlchemy==2.4.3 20 | Flask-User==1.0.2.2 21 | Flask-WTF==0.14.3 22 | gunicorn==20.0.4 23 | idna==2.9 24 | infinity==1.5 25 | intervals==0.8.1 26 | itsdangerous==1.1.0 27 | Jinja2==2.11.2 28 | Mako==1.1.3 29 | MarkupSafe==1.1.1 30 | marshmallow==3.6.1 31 | passlib==1.7.2 32 | psycopg2-binary==2.8.5 33 | pycparser==2.20 34 | python-dateutil==2.8.1 35 | python-editor==1.0.4 36 | pytz==2020.1 37 | requests==2.24.0 38 | six==1.15.0 39 | SQLAlchemy==1.3.18 40 | SQLAlchemy-Utils==0.36.8 41 | urllib3==1.25.9 42 | validators==0.15.0 43 | Werkzeug==1.0.1 44 | WTForms==2.3.1 45 | WTForms-Alchemy==0.17.0 46 | WTForms-Components==0.10.4 47 | docker -------------------------------------------------------------------------------- /docker/web/source/helpers.py: -------------------------------------------------------------------------------- 1 | from . import app 2 | import os 3 | import string 4 | import secrets 5 | 6 | 7 | @app.template_filter() 8 | def custom_getenv(default, var): 9 | """Get an environment variable in jinja2 template. 10 | Return content of `default` if the variable does not exist. 11 | """ 12 | try: 13 | return os.getenv(var, default) 14 | except Exception: 15 | return default 16 | 17 | 18 | def generate_password(): 19 | """Generate a ten-character alphanumeric password. 20 | 21 | with at least one lowercase character, 22 | at least one uppercase character, 23 | and at least three digits 24 | See: https://docs.python.org/3/library/secrets.html#recipes-and-best-practices 25 | """ 26 | alphabet = string.ascii_letters + string.digits 27 | while True: 28 | password = ''.join(secrets.choice(alphabet) for i in range(10)) 29 | if (any(c.islower() for c in password) and any(c.isupper() for c in password) and sum(c.isdigit() for c in password) >= 3): 30 | break 31 | return password 32 | -------------------------------------------------------------------------------- /scripts/System_Scripts/box4s_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | source /etc/box4s/modules.conf 3 | # Construct set of compose files, depending on enabled modules 4 | COMPOSE_FILES="-f $BOX4s_INSTALL_DIR/docker/box4security.yml" 5 | if [ $BOX4s_WAZUH == "true" ]; then 6 | COMPOSE_FILES="$COMPOSE_FILES -f $BOX4s_INSTALL_DIR/docker/wazuh/wazuh.yml" 7 | fi 8 | if [ $1 == "up" ] 9 | then 10 | 11 | # perform commands to set the service up 12 | # Stop and remove old container 13 | /usr/local/bin/docker-compose $COMPOSE_FILES down -v 14 | /usr/local/bin/docker-compose $COMPOSE_FILES rm -v 15 | /usr/local/bin/docker-compose $COMPOSE_FILES up --no-color --no-build --remove-orphans 16 | # Listen to the web named pipe. 17 | /bin/bash $BOX4s_INSTALL_DIR/scripts/System_Scripts/listenNamedPipe.sh & 18 | elif [ $1 == "down" ] 19 | then 20 | # perform commands to set the service down 21 | /usr/local/bin/docker-compose $COMPOSE_FILES down -v 22 | else 23 | echo "You have to submit up/down as the first parameter to the BOX4s service script." 24 | exit 1 25 | fi 26 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/1d03ea9e33bd_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 1d03ea9e33bd 4 | Revises: 2bcd96b138e4 5 | Create Date: 2020-04-23 13:20:52.008628 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.models import Role 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '1d03ea9e33bd' 15 | down_revision = '2bcd96b138e4' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | """Upgrade to migration.""" 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.bulk_insert(Role.__table__, 24 | [ 25 | {'id': 10, 'name': 'Startseite', 'description': 'Freigabe für die Startseite'}, 26 | ]) 27 | # ### end Alembic commands ### 28 | 29 | 30 | def downgrade(): 31 | """Downgrade to migration.""" 32 | op.execute('DELETE FROM "role" WHERE id=10') 33 | # ### commands auto generated by Alembic - please adjust! ### 34 | # ### end Alembic commands ### 35 | -------------------------------------------------------------------------------- /docker/suricata/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | 5 | ADD scripts/ /root/scripts/ 6 | ADD etc/ /root/etc/ 7 | ADD var_lib/ /root/var_lib/ 8 | 9 | RUN apt update && \ 10 | apt install -y apt-transport-https software-properties-common && \ 11 | add-apt-repository ppa:oisf/suricata-stable && \ 12 | apt update && \ 13 | apt install -y curl wget sudo suricata && \ 14 | rm -rf /var/lib/apt/lists/* && \ 15 | mkdir -p /var/lib/suricata/scripts/ && \ 16 | mkdir -p /var/lib/suricata/rules/ && \ 17 | mkdir -p /var/log/suricata && \ 18 | mkdir -p /var/run/suricata && \ 19 | mkdir -p /etc/suricata/ && \ 20 | mkdir -p /var/lib/box4s && \ 21 | touch /var/lib/box4s/suricata_suppress.bpf && \ 22 | mv /root/etc/* /etc/suricata/ && \ 23 | chmod 777 /var/lib/suricata/rules -R && \ 24 | chmod +x /root/scripts/update.sh && \ 25 | chmod +x /root/scripts/ingest_testdata.sh 26 | 27 | USER root 28 | CMD /usr/bin/suricata -vvv -c /etc/suricata/suricata.yaml -F /var/lib/box4s/suricata_suppress.bpf -i $SURI_INTERFACE 29 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/download_wazuh_clients.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ $# -eq 0 ] 3 | then 4 | WAZUH_VERSION=3.12.1 5 | else 6 | WAZUH_VERSION=$1 7 | fi 8 | echo "Downloadindg Wazuh Client Files of Version" $WAZUH_VERSION 9 | workdir=/core4s/workfolder/wazuh_files 10 | if [ ! -d "$workdir" ];then 11 | mkdir $workdir 12 | fi 13 | cd $workdir 14 | #download redhat/centos 15 | redhat_download=https://packages.wazuh.com/3.x/yum/wazuh-agent-$WAZUH_VERSION-1.x86_64.rpm 16 | wget $redhat_download -q -O redhat_centos-wazuh-agent.rpm 17 | #download debian/ubuntu 18 | debian_download=https://packages.wazuh.com/3.x/apt/pool/main/w/wazuh-agent/wazuh-agent_$WAZUH_VERSION-1_amd64.deb 19 | wget $debian_download -q -O debian_ubuntu-wazuh-agent.deb 20 | #download windows 21 | windows_doanload=https://packages.wazuh.com/3.x/windows/wazuh-agent-$WAZUH_VERSION-1.msi 22 | wget $windows_doanload -q -O windows-wazuh-agent.msi 23 | #download macos 24 | macos_download=https://packages.wazuh.com/3.x/osx/wazuh-agent-$WAZUH_VERSION-1.pkg 25 | wget $macos_download -q -O macos-wazuh-agent.pkg 26 | 27 | echo "Done" 28 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/ea1ce32ce8fd_box4s_model.py: -------------------------------------------------------------------------------- 1 | """BOX4security model and relation to system 2 | 3 | Revision ID: ea1ce32ce8fd 4 | Revises: c2bdbad3c958 5 | Create Date: 2020-10-27 09:18:08.417000 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'ea1ce32ce8fd' 14 | down_revision = 'c2bdbad3c958' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.add_column('system', sa.Column('dns_id', sa.Integer())) 22 | op.add_column('system', sa.Column('gateway_id', sa.Integer())) 23 | op.create_foreign_key(None, 'system', 'system', ['gateway_id'], ['id']) 24 | op.create_foreign_key(None, 'system', 'system', ['dns_id'], ['id']) 25 | 26 | 27 | def downgrade(): 28 | """Downgrade to migration.""" 29 | op.drop_constraint(None, 'system', type_='foreignkey') 30 | op.drop_constraint(None, 'system', type_='foreignkey') 31 | op.drop_column('system', 'gateway_id') 32 | op.drop_column('system', 'dns_id') 33 | -------------------------------------------------------------------------------- /scripts/Development/healthy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Must be run as root. 3 | 4 | CONTAINERS_WITH_HEALTHCHECK=(core4s elasticsearch nginx db spiderfoot logstash kibana web wazuh openvas) 5 | 6 | # Define the time how long containers must stay healthy (after being healthy for the first time) to accept state. 7 | SAFETY_TIME=300 8 | 9 | stay_healthy() { 10 | 11 | i=0 12 | while [ $i -lt $SAFETY_TIME ]; do 13 | for name in "${CONTAINERS_WITH_HEALTHCHECK[@]}" 14 | do 15 | state=$(docker inspect -f '{{ .State.Health.Status }}' $name 2>/dev/null) 16 | if [[ "${state}" != "healthy" ]]; then 17 | echo "ERROR: $name went from healthy to $state." 18 | exit 1 19 | fi 20 | done 21 | sleep 1 22 | ((i++)) 23 | done 24 | } 25 | 26 | for name in "${CONTAINERS_WITH_HEALTHCHECK[@]}" 27 | do 28 | # first let all get healthy, then check every second if one got unhealthy! 29 | /home/amadmin/box4security/scripts/System_Scripts/wait-for-healthy-container.sh $name 30 | done 31 | stay_healthy 32 | echo "All containers staid healthy for $SAFETY_TIME seconds." 33 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/resend_confirm_email.html: -------------------------------------------------------------------------------- 1 | {% extends 'user_base.html' %} 2 | {% block user %} 3 | {% from "user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 4 |
5 |
6 |

7 |
8 | E-Mail-Bestätigung anfordern 9 |
10 |

11 |
12 | {{ form.hidden_tag() }} 13 |
14 |
15 |
16 | 17 | 18 |
19 |
20 | {% if request.method == "POST" %} 21 |

Ihre Anfrage wird bearbeitet.

22 | {% endif %} 23 | 24 |
25 |
26 |
27 |
28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/alerts_buckets.query.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "bool": { 4 | "must": [ 5 | { 6 | "exists": { 7 | "field": "alert.severity" 8 | } 9 | }, 10 | { 11 | "range": { 12 | "@timestamp": { 13 | "gte": "now-16m/m", 14 | "lte": "now" 15 | } 16 | } 17 | } 18 | ] 19 | } 20 | }, 21 | "size": 0, 22 | "aggs": 23 | { 24 | "severity": { 25 | "range": { 26 | "field": "alert.severity", 27 | "ranges": [ 28 | { "key": "critical" , "from": 4.1 }, 29 | { "key": "high" , "from": 3.1, "to": 4.1 }, 30 | { "key": "medium" , "from": 2.1, "to": 3.1 }, 31 | { "key": "low" , "from": 1.1, "to": 2.1 }, 32 | { "key": "info", "to": 1.1} 33 | ] 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/ASN_update.sh: -------------------------------------------------------------------------------- 1 | source /core4s/config/secrets/db.conf 2 | cd /tmp/ 3 | curl -O -s https://iptoasn.com/data/ip2asn-combined.tsv.gz 4 | gunzip -f ip2asn-combined.tsv.gz 5 | docker cp ip2asn-combined.tsv db:/tmp/ip2asn-combined.tsv 6 | if true; then 7 | # assume db exists.. because we create it from docker build.. 8 | echo "DROP table asn; CREATE table asn (range_start INET,range_end INET, AS_number VARCHAR(10) ,country_code VARCHAR(7),AS_description VARCHAR(250)); COPY asn FROM '/tmp/ip2asn-combined.tsv' DELIMITER E'\t';" | PGPASSWORD=$POSTGRES_PASSWORD PGUSER=$POSTGRES_USER psql postgres://db/box4S_db 9 | else 10 | echo "box4S_db Database does not exist, creating it!" 11 | echo "CREATE DATABASE \"box4S_db\" OWNER postgres;" |sudo -u postgres psql 12 | echo "CREATE table asn (range_start INET,range_end INET, AS_number VARCHAR(10) ,country_code VARCHAR(7),AS_description VARCHAR(250)); COPY asn FROM '/tmp/ip2asn-combined.tsv' DELIMITER E'\t';" | PGPASSWORD=$POSTGRES_PASSWORD PGUSER=$POSTGRES_USER psql postgres://db/box4S_db 13 | fi 14 | echo "ASN Daten aktualisiert." 15 | rm ip2asn-combined.tsv 16 | docker exec db /bin/bash -c "rm /tmp/ip2asn-combined.tsv*" 17 | -------------------------------------------------------------------------------- /docker/web/source/templates/15_logstash_suppress.conf.j2: -------------------------------------------------------------------------------- 1 | filter { 2 | {# 3 | # Creates a concatenated valid logstash filter rule 4 | # for each rule from rules variable 5 | # it has to start with 1 == 1 because then we can just chain with and 6 | # THIS assumes that at least one field is not empty 7 | # and I think that makes sense, empty filter shouldn't be allowed to create. 8 | #} 9 | {% for rule in rules %} 10 | if 1 == 1 {% if rule.src_ip != '0.0.0.0' -%} 11 | and [source][ip] == "{{ rule.src_ip }}" {% endif -%} 12 | {% if rule.src_port != 0 -%} 13 | and [source][port][number] == "{{ rule.src_port }}" {% endif -%} 14 | {% if rule.dst_ip != '0.0.0.0' -%} 15 | and [destination][ip] == "{{ rule.dst_ip }}" {% endif -%} 16 | {% if rule.dst_port != 0 -%} 17 | and [destination][port][number] == "{{ rule.dst_port }}" {% endif -%} 18 | {% if rule.proto != '' -%} 19 | and [network][transport] == "{{ rule.proto }}" {% endif -%} 20 | {% if rule.signature_id != '' -%} 21 | and [alert][signature_id] == "{{ rule.signature_id }}" 22 | {% endif -%} 23 | { 24 | drop { } 25 | } 26 | {% endfor %} 27 | } 28 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/quick_alert_netuse.yaml.j2: -------------------------------------------------------------------------------- 1 | name: Network Usage Spike 2 | # (Required) 3 | # Type of alert. 4 | # the frequency rule type alerts when num_events events occur with timeframe time 5 | # The any rule will match everything. Every hit that the query returns will generate an alert. 6 | type: spike_aggregation 7 | 8 | # (Required) 9 | # Index to search, wildcard supported 10 | index: suricata-* 11 | 12 | filter: 13 | - term: 14 | event.subtype: "flow" 15 | 16 | # calculated once per 3h 17 | run_every: 18 | hours: 3 19 | 20 | # This is the name of the field over which the metric value will be calculated. 21 | metric_agg_key: "network.bytes" 22 | metric_agg_type: "sum" 23 | # The ratio of the metric value in the last timeframe to the previous timeframe that when hit will trigger an alert. 24 | spike_height: 5 25 | spike_type: "up" 26 | 27 | # Spike over the last day 28 | timeframe: 29 | days: 1 30 | 31 | {% include "application/alert_email_conf.yaml.j2" %} 32 | {% raw %} 33 | alert_subject: "BOX4s Network Usage Spike" 34 | {% endraw %} 35 | alert_text: "The current network usage is 5x higher than in the last 24 hours." 36 | alert_text_type: alert_text_only 37 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/forgot_password.html: -------------------------------------------------------------------------------- 1 | {% extends 'user_base.html' %} 2 | {% block user %} 3 | {% from "user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 4 |
5 |
6 |

7 |
8 | Passwort zurücksetzen 9 |
10 |

11 |
12 | {{ form.hidden_tag() }} 13 |
14 |
15 |
16 | 17 | 18 |
19 |
20 | {% if request.method == "POST" %} 21 |

Ihre Anfrage wird bearbeitet.

22 | {% endif %} 23 | 24 |
25 |
26 |
27 |
28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /docker/web/source/templates/system.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block content %} 3 |
4 | {% block flash_messages %} 5 | {%- with messages = get_flashed_messages(with_categories=true) -%} 6 | {% if messages %} 7 | {% for category, message in messages %} 8 |
{{ message|safe }}
9 | {% endfor %} 10 | {% endif %} 11 | {%- endwith %} 12 | {% endblock %} 13 |

BOX4security - System
Monitoring & Verwaltung

14 |
15 |
16 | 17 |
18 |
19 | 20 | {% endblock %} 21 | {% block scripts %} 22 | 26 | {% endblock %} -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL=/bin/bash 2 | PYTHON = python3 3 | COMPOSE = docker-compose 4 | COMPOSE_F = ./docker/box4security.yml 5 | 6 | images = `arg="$(filter-out $@,$(MAKECMDGOALS))" && echo $${arg:-${1}}` 7 | 8 | 9 | help: 10 | @echo "---------------HELP-----------------" 11 | @echo "Using this make file may require root. Commands:" 12 | @echo "make build # builds the container, e.g. make build web" 13 | @echo "make push # pushes the container, e.g. make push web" 14 | @echo "make recreate # recreates the container, e.g. make recreate web" 15 | @echo "make nocache # builds the container without cache, e.g. make nocache web" 16 | @echo "make logs " # tails the log of container or all containers, e.g. make logs web" 17 | @echo "------------------------------------" 18 | 19 | build: 20 | sudo ${COMPOSE} -f ${COMPOSE_F} build $(call images) 21 | 22 | push: 23 | sudo ${COMPOSE} -f ${COMPOSE_F} push $(call images) 24 | 25 | nocache: 26 | sudo ${COMPOSE} -f ${COMPOSE_F} build --nocache $(call images) 27 | 28 | recreate: 29 | sudo ${COMPOSE} -f ${COMPOSE_F} up -d --force-recreate $(call images) 30 | 31 | logs: 32 | sudo ${COMPOSE} -f ${COMPOSE_F} logs -f $(call images) 33 | 34 | %: 35 | @: -------------------------------------------------------------------------------- /docker/web/source/templates/user/edit_user_profile.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_authorized_base.html' %} 2 | 3 | {% block content %} 4 | {% from "flask_user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 5 |

{%trans%}User profile{%endtrans%}

6 | 7 |
8 | {{ form.hidden_tag() }} 9 | {% for field in form %} 10 | {% if not field.flags.hidden %} 11 | {% if field.type=='SubmitField' %} 12 | {{ render_submit_field(field, tabindex=loop.index*10) }} 13 | {% else %} 14 | {{ render_field(field, tabindex=loop.index*10) }} 15 | {% endif %} 16 | {% endif %} 17 | {% endfor %} 18 |
19 |
20 | 21 | {% if not user_manager.USER_ENABLE_AUTH0 %} 22 | {% if user_manager.USER_ENABLE_CHANGE_USERNAME %} 23 |

{%trans%}Change username{%endtrans%}

24 | {% endif %} 25 | {% if user_manager.USER_ENABLE_CHANGE_PASSWORD %} 26 |

{%trans%}Change password{%endtrans%}

27 | {% endif %} 28 | {% endif %} 29 | 30 | {% endblock %} -------------------------------------------------------------------------------- /docker/core4s/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | # Install all programs 4 | RUN apt-get update && apt-get install -y \ 5 | bc\ 6 | python3\ 7 | python3-venv\ 8 | python3-pip\ 9 | curl\ 10 | jq\ 11 | msmtp\ 12 | cron\ 13 | postgresql-client\ 14 | unzip\ 15 | wget 16 | 17 | # Install docker 18 | RUN curl -sSL https://get.docker.com/ | sh 19 | 20 | # Install Pyip dependencies 21 | RUN pip3 install Flask untangle python-gvm 22 | # TODO: REMOVE TEMPORARY FIX: https://github.com/pypa/pip/issues/9108#issuecomment-723198497 23 | RUN pip3 install boto3==1.16.12 urllib3==1.24.3 botocore==1.19.12 24 | RUN pip3 install elasticsearch-curator==5.8.1 25 | 26 | # Add files from git 27 | ADD scripts /core4s/scripts 28 | ADD curator /core4s/curator 29 | ADD core4s.crontab /core4s/core4s.crontab 30 | ADD healthcheck.py /healthcheck.py 31 | ADD exporter.py /core4s/openvas/exporter.py 32 | 33 | # Add Crontab 34 | RUN crontab /core4s/core4s.crontab 35 | 36 | # Create cronchecker directory 37 | RUN mkdir -p /var/log/cronchecker/ 38 | 39 | #Check health 40 | HEALTHCHECK CMD curl --fail http://localhost:2981/ || exit 1 41 | 42 | # Run the command on container startup 43 | CMD cron &&\ 44 | python3 healthcheck.py 45 | -------------------------------------------------------------------------------- /docker/logstash/etc/conf.d/estransfer/suricata_es_transfer.conf: -------------------------------------------------------------------------------- 1 | input { pipeline { address => suricata_esoutput } } 2 | 3 | output { 4 | if [event][type] == "suricata" { 5 | if [event][subtype] == "stats" { 6 | elasticsearch { 7 | id => "output_elasticsearch_stats" 8 | hosts => [ "elasticsearch:9200" ] 9 | index => "suricata_stats-%{+YYYY.MM.dd}" 10 | # template => "${SYNLITE_SURICATA_TEMPLATE_PATH:/etc/logstash/synlite_suricata/templates}/synlite_suricata_stats.template.json" 11 | # template_name => "synlite-suricata_stats-1.1.0" 12 | # template_overwrite => "true" 13 | } } else { 14 | elasticsearch { 15 | id => "output_elasticsearch" 16 | hosts => [ "elasticsearch:9200" ] 17 | index => "suricata-%{+YYYY.MM.dd}" 18 | template => "/etc/logstash/BOX4s/suricata-template.json" 19 | template_name => "suricata-4s" 20 | template_overwrite => "true" 21 | }}}} 22 | -------------------------------------------------------------------------------- /docker/wiki/config.ru: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | require 'rubygems' 3 | require 'gollum/app' 4 | 5 | gollum_path = File.expand_path('/wiki') 6 | wiki_options = {:universal_toc => false, index_page: "BOX4security", page_file_dir: "BOX4security"} 7 | Precious::App.set(:gollum_path, gollum_path) 8 | Precious::App.set(:default_markup, :markdown) 9 | Precious::App.set(:wiki_options, wiki_options) 10 | 11 | require 'rack' 12 | 13 | # set author 14 | class Precious::App 15 | before do 16 | session['gollum.author'] = { 17 | :name => env['HTTP_X_AUTH_USERNAME'], 18 | :email => "box@4sconsult.de", 19 | } 20 | end 21 | end 22 | 23 | class MapGollum 24 | def initialize base_path 25 | @mg = Rack::Builder.new do 26 | map '/' do 27 | run Proc.new { [302, { 'Location' => "/#{base_path}" }, []] } 28 | end 29 | map "/#{base_path}" do 30 | run Precious::App 31 | end 32 | end 33 | end 34 | 35 | def call(env) 36 | @mg.call(env) 37 | end 38 | end 39 | 40 | # Rack::Handler does not work with Ctrl + C. Use Rack::Server instead. 41 | Rack::Server.new(:app => MapGollum.new("wiki"), :Port => 80, :Host => '0.0.0.0').start 42 | 43 | -------------------------------------------------------------------------------- /docker/db/sql/filters.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE blocks_by_bpffilter 2 | ( 3 | id SERIAL primary key, 4 | src_ip inet, 5 | src_port integer, 6 | dst_ip inet, 7 | dst_port integer, 8 | proto varchar(4) 9 | ) 10 | WITH ( 11 | OIDS = FALSE 12 | ) 13 | TABLESPACE pg_default; 14 | ALTER TABLE blocks_by_bpffilter 15 | OWNER to postgres; 16 | INSERT INTO blocks_by_bpffilter (src_ip, src_port, dst_ip, dst_port, proto) VALUES ('127.0.0.1',0,'0.0.0.0',0,''); 17 | INSERT INTO blocks_by_bpffilter (src_ip, src_port, dst_ip, dst_port, proto) VALUES ('0.0.0.0',0,'127.0.0.1',0,''); 18 | CREATE TABLE blocks_by_logstashfilter 19 | ( 20 | id SERIAL primary key, 21 | src_ip inet, 22 | src_port integer, 23 | dst_ip inet, 24 | dst_port integer, 25 | proto varchar(4), 26 | signature_id varchar(10), 27 | signature varchar(256) 28 | ) 29 | WITH ( 30 | OIDS = FALSE 31 | ) 32 | TABLESPACE pg_default; 33 | ALTER TABLE blocks_by_logstashfilter 34 | OWNER to postgres; 35 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/quick_alert_ids.yaml.j2: -------------------------------------------------------------------------------- 1 | name: Critical Intrusion Detection 2 | # (Required) 3 | # Type of alert. 4 | # the frequency rule type alerts when num_events events occur with timeframe time 5 | # The any rule will match everything. Every hit that the query returns will generate an alert. 6 | type: any 7 | 8 | {# realert: 9 | hours: 4 #} 10 | 11 | # take the whole day into account, vulns have the timestamp of the scan 12 | timeframe: 13 | minutes: 1 14 | 15 | # (Required) 16 | # Index to search, wildcard supported 17 | index: suricata-* 18 | 19 | filter: 20 | # Ignore 4s Alerts for now (gid=2) 21 | - query: 22 | query_string: 23 | query: "NOT alert.gid:2" 24 | - range: 25 | alert.severity: 26 | from: 4 27 | to: 5 28 | 29 | {% include "application/alert_email_conf.yaml.j2" %} 30 | {% raw %} 31 | alert_subject: "BOX4s IDS: {0} - {1} <-> {2}" 32 | {% endraw %} 33 | alert_subject_args: 34 | - alert.signature 35 | - source.domain 36 | - destination.domain 37 | alert_text: "{4} ({7}/{5}) \nSource: {0}:{1} <-> Destination: {2}:{3}\n Payload:\n{6}" 38 | alert_text_type: alert_text_only 39 | alert_text_args: ["source.domain", "source.port.number", "destination.domain", "destination.port.name", "alert.signature","log.severity", "payload_printable", "alert.severity"] 40 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/532110801da9_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 532110801da9 4 | Revises: 5 | Create Date: 2020-04-16 14:31:23.045988 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '532110801da9' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Create Users table.""" 21 | op.create_table( 22 | 'user', 23 | sa.Column('id', sa.Integer(), nullable=False), 24 | sa.Column('is_active', sa.Boolean(), server_default='1', nullable=False), 25 | sa.Column('email', sa.String(length=255), nullable=False), 26 | sa.Column('email_confirmed_at', sa.DateTime(), nullable=True), 27 | sa.Column('password', sa.String(length=255), server_default='', nullable=False), 28 | sa.Column('first_name', sa.String(length=100), server_default='', nullable=False), 29 | sa.Column('last_name', sa.String(length=100), server_default='', nullable=False), 30 | sa.PrimaryKeyConstraint('id'), 31 | sa.UniqueConstraint('email'), 32 | ) 33 | 34 | 35 | def downgrade(): 36 | """Drop Users table.""" 37 | op.drop_table('user') 38 | # ### end Alembic commands ### 39 | -------------------------------------------------------------------------------- /scripts/Automation/versions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | """Fetches and returns all versions greater than installed one.""" 3 | import requests 4 | import semver 5 | import urllib3 6 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 7 | API_VER = requests.get('http://localhost/api/ver/', verify=False).json() 8 | CURRVER = str(API_VER['version']) 9 | ENV = str(API_VER['env']) 10 | tags = requests.get('http://localhost/api/ver/releases/', verify=False).json() 11 | VERSIONS = [] 12 | # Source: https://stackoverflow.com/questions/11887762/how-do-i-compare-version-numbers-in-python 13 | for t in tags: 14 | # now compare the versions 15 | # discard all lower and equal versions 16 | if semver.compare(CURRVER, str(t['version'])) < 0: 17 | # semver.compare returns -1 if second argument is newer 18 | if not semver.parse(t['version'])['prerelease']: 19 | # Hide prereleases from VERSIONS 20 | VERSIONS.append(t['version']) 21 | 22 | # For development systems: 23 | if ENV == "dev": 24 | # add the latest tag if it is not in VERSIONS yet 25 | # so it is a prerelease actually 26 | latest = tags[0]['version'] 27 | if latest not in VERSIONS: 28 | VERSIONS.insert(0, latest) 29 | 30 | # !! Script Output!! 31 | # All Versions greater than installed one 32 | # Latest Release last 33 | for t in reversed(VERSIONS): 34 | print(t) 35 | -------------------------------------------------------------------------------- /scripts/System_Scripts/wait-for-healthy-container.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | container_name=$1 3 | shift 4 | 5 | RETURN_HEALTHY=0 6 | RETURN_STARTING=1 7 | RETURN_UNHEALTHY=2 8 | RETURN_UNKNOWN=3 9 | RETURN_ERROR=99 10 | 11 | function usage() { 12 | echo " 13 | Usage: wait-for-healthy-container.sh 14 | " 15 | return 16 | } 17 | 18 | function get_health_state { 19 | state=$(docker inspect -f '{{ .State.Health.Status }}' ${container_name} 2>/dev/null) 20 | return_code=$? 21 | if [[ "${state}" == "healthy" ]]; then 22 | return ${RETURN_HEALTHY} 23 | elif [[ "${state}" == "unhealthy" ]]; then 24 | return ${RETURN_UNHEALTHY} 25 | elif [[ "${state}" == "starting" ]]; then 26 | return ${RETURN_STARTING} 27 | else 28 | # Return unknown also in case of error, because we can retry 29 | return ${RETURN_UNKNOWN} 30 | fi 31 | } 32 | 33 | function wait_for() { 34 | echo "Wait for container '$container_name' to be healthy.." 35 | i=0 36 | while true; do 37 | get_health_state 38 | state=$? 39 | if [ ${state} -eq 0 ]; then 40 | echo "Container '$container_name' is healthy after ${i} seconds." 41 | exit 0 42 | fi 43 | sleep 1 44 | ((i++)) 45 | done 46 | } 47 | 48 | if [ -z ${container_name} ]; then 49 | usage 50 | exit 1 51 | else 52 | wait_for 53 | fi 54 | -------------------------------------------------------------------------------- /docker/elasticsearch/etc/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | # ======================== Elasticsearch Configuration ========================= 2 | # Please consult the documentation for further information on configuration options: 3 | # https://www.elastic.co/guide/en/elasticsearch/reference/index.html 4 | # ---------------------------------- Cluster ----------------------------------- 5 | cluster.name: box4scluster 6 | # ------------------------------------ Node ------------------------------------ 7 | node.name: box4security 8 | # ----------------------------------- Paths ------------------------------------ 9 | path.data: /data/elasticsearch 10 | path.logs: /var/log/elasticsearch 11 | # Snapshot Repo 12 | path.repo: /data/elasticsearch_backup/Snapshots 13 | # ----------------------------------- Memory ----------------------------------- 14 | # ---------------------------------- Network ----------------------------------- 15 | network.host: 0.0.0.0 16 | discovery.type: single-node 17 | # --------------------------------- Discovery ---------------------------------- 18 | discovery.zen.ping.unicast.hosts: ["localhost"] 19 | discovery.zen.minimum_master_nodes: 1 20 | # ---------------------------------- Gateway ----------------------------------- 21 | gateway.recover_after_nodes: 1 22 | # ---------------------------------- Various ----------------------------------- 23 | xpack.ml.enabled: false 24 | xpack.security.enabled: false 25 | xpack.security.transport.ssl.enabled: false 26 | -------------------------------------------------------------------------------- /docker/web/source/wizard/templates/wizard/index.html: -------------------------------------------------------------------------------- 1 | {% extends "wizard/base.html" %} 2 | {% block content %} 3 | {% from "user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 4 |
5 |
6 |
7 |
8 | Initiale Einrichtung der BOX4security 9 |
10 |

11 | Zur Benutzung der BOX4security ist eine initale Einrichtung erforderlich.
12 | Die Einrichtung erfolgt über eine Reihe von Formularen, die Sie nacheinander bearbeiten.
13 | Nach Bestätigung jedes Schrittes werden die hinterlegten Daten gespeichert.
14 | Sie haben jederzeit die Möglichkeit innerhalb der Schritte zurückzuspringen, um eine Einstellung zu korrigieren.
15 | Nachdem Sie in Schritt 6 die Daten abschließend geprüft haben, werden die Änderungen wirksam. 16 |

17 |
18 | Einrichtung starten 19 |
20 |
21 | {% endblock %} 22 | {% block scripts %} 23 | 27 | {% endblock %} 28 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/quick_alert_vuln.yaml.j2: -------------------------------------------------------------------------------- 1 | name: Critical Vulnerability 2 | # (Required) 3 | # Type of alert. 4 | # the frequency rule type alerts when num_events events occur with timeframe time 5 | # The any rule will match everything. Every hit that the query returns will generate an alert. 6 | type: new_term 7 | 8 | {# realert: 9 | hours: 4 #} 10 | 11 | # Monitor the field uniqueVul 12 | fields: 13 | - "uniqueVul" 14 | 15 | # take the whole day into account, as vulns have the timestamp of the scan 16 | {# timeframe: 17 | days: 1 #} 18 | 19 | # run this alert every 4h 20 | run_every: 21 | minutes: 5 22 | 23 | # This means that we will query 90 days worth of data when ElastAlert starts to find which values of uniqueVul already exist 24 | terms_window_size: 25 | days: 90 26 | 27 | # (Required) 28 | # Index to search, wildcard supported 29 | index: logstash-vulnwhisperer-* 30 | 31 | filter: 32 | - range: 33 | risk_score: 34 | from: 7.5 35 | to: 10 36 | 37 | {% include "application/alert_email_conf.yaml.j2" %} 38 | {% raw %} 39 | alert_subject: "BOX4s Critical Vulnerability: {0}: {1} ({2})" 40 | {% endraw %} 41 | alert_subject_args: 42 | - client.domain 43 | - plugin_name 44 | - risk_score 45 | alert_text: "Client: {0} (Task: {6})\n{1} ({3} - {4})\n{2}\n\nSolution: {5}" 46 | alert_text_type: alert_text_only 47 | alert_text_args: ["client.domain","plugin_name","plugin_output", "risk_score", "risk_score_name", "solution", "task_name"] 48 | -------------------------------------------------------------------------------- /docker/suricata/etc/reference.config: -------------------------------------------------------------------------------- 1 | # config reference: system URL 2 | 3 | config reference: bugtraq http://www.securityfocus.com/bid/ 4 | config reference: bid http://www.securityfocus.com/bid/ 5 | config reference: cve http://cve.mitre.org/cgi-bin/cvename.cgi?name= 6 | #config reference: cve http://cvedetails.com/cve/ 7 | config reference: secunia http://www.secunia.com/advisories/ 8 | 9 | #whitehats is unfortunately gone 10 | config reference: arachNIDS http://www.whitehats.com/info/IDS 11 | 12 | config reference: McAfee http://vil.nai.com/vil/content/v_ 13 | config reference: nessus http://cgi.nessus.org/plugins/dump.php3?id= 14 | config reference: url http:// 15 | config reference: et http://doc.emergingthreats.net/ 16 | config reference: etpro http://doc.emergingthreatspro.com/ 17 | config reference: telus http:// 18 | config reference: osvdb http://osvdb.org/show/osvdb/ 19 | config reference: threatexpert http://www.threatexpert.com/report.aspx?md5= 20 | config reference: md5 http://www.threatexpert.com/report.aspx?md5= 21 | config reference: exploitdb http://www.exploit-db.com/exploits/ 22 | config reference: openpacket https://www.openpacket.org/capture/grab/ 23 | config reference: securitytracker http://securitytracker.com/id? 24 | config reference: secunia http://secunia.com/advisories/ 25 | config reference: xforce http://xforce.iss.net/xforce/xfdb/ 26 | config reference: msft http://technet.microsoft.com/security/bulletin/ 27 | -------------------------------------------------------------------------------- /docker/dnsmasq/dnsmasq.conf: -------------------------------------------------------------------------------- 1 | port=5353 2 | resolv-file=/var/lib/box4s/resolv.personal 3 | # By default, dnsmasq will send queries to any of the upstream 4 | # servers it knows about and tries to favour servers to are known 5 | # to be up. Uncommenting this forces dnsmasq to try each query 6 | # with each server strictly in the order they appear in 7 | # /etc/resolv.conf 8 | #strict-order 9 | 10 | # Queue länge für das Logfile festlegen (5-100 - Standard 5). 11 | log-async = 10 12 | 13 | # On systems which support it, dnsmasq binds the wildcard address, 14 | # even when it is listening on only some interfaces. It then discards 15 | # requests that it shouldn't reply to. This has the advantage of 16 | # working even when interfaces come and go and change address. If you 17 | # want dnsmasq to really bind only the interfaces it is listening on, 18 | # uncomment this option. About the only time you may need this is when 19 | # running another nameserver on the same machine. 20 | #bind-interfaces 21 | 22 | # Set the cachesize here. 23 | cache-size=15000 24 | 25 | # Normally responses which come from /etc/hosts and the DHCP lease 26 | # file have Time-To-Live set as zero, which conventionally means 27 | # do not cache further. If you are happy to trade lower load on the 28 | # server for potentially stale date, you can set a time-to-live (in 29 | # seconds) here. 30 | local-ttl=36000 31 | 32 | # For debugging purposes, log each DNS query as it passes through 33 | # dnsmasq. 34 | #log-queries 35 | -------------------------------------------------------------------------------- /docker/kibana/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/kibana/kibana:7.9.0 2 | 3 | ARG ELASTIC_VERSION=7.9.0 4 | ARG WAZUH_VERSION=3.13.1 5 | ARG WAZUH_APP_VERSION="${WAZUH_VERSION}_${ELASTIC_VERSION}" 6 | 7 | COPY --chown=kibana:kibana etc/* /usr/share/kibana/config/ 8 | COPY --chown=root:root ./entry.sh /entrypoint.sh 9 | 10 | USER root 11 | RUN mkdir -p /usr/share/kibana/optimize/wazuh/config && \ 12 | mkdir -p /var/log/kibana/ && \ 13 | mkdir -p /var/lib/kibana && \ 14 | touch /var/log/kibana/kibana.log && \ 15 | chown -f kibana:kibana /usr/share/kibana/optimize/wazuh/config && \ 16 | chown kibana:kibana -R /var/log/kibana/ && \ 17 | chown kibana:kibana -R /var/lib/kibana/ && \ 18 | chown -R kibana:kibana /opt/kibana/optimize && \ 19 | chmod g+s /opt/kibana/optimize && \ 20 | chmod +x /entrypoint.sh 21 | 22 | RUN echo 'NODE_OPTIONS="--max-old-space-size=2048"' >> /etc/default/kibana 23 | 24 | USER kibana 25 | WORKDIR /usr/share/kibana 26 | RUN ./bin/kibana-plugin install https://packages.wazuh.com/wazuhapp/wazuhapp-${WAZUH_APP_VERSION}.zip && \ 27 | echo -e "hosts:\n - default:\n url: https://${INT_IP}\n port: 55000\n user: ${WAZUH_USER}\n password: ${WAZUH_PASS}\n" > /usr/share/kibana/optimize/wazuh/config/wazuh.yml 28 | 29 | HEALTHCHECK --retries=50 CMD curl -s -XGET 'http://127.0.0.1:5601/kibana/api/status' | grep -v "Kibana server is not ready yet" || exit 1 30 | USER kibana:kibana 31 | ENTRYPOINT ["/entrypoint.sh"] 32 | CMD ["/usr/share/kibana/bin/kibana"] 33 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/manage_emails.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_authorized_base.html' %} 2 | 3 | {% block content %} 4 | {% from "flask_user/_macros.html" import render_field, render_submit_field %} 5 |

{%trans%}Manage Emails{%endtrans%}

6 | 7 | 8 | 9 | {% for user_email in user_emails %} 10 | 11 | 12 | 19 | 29 | 30 | {% endfor %} 31 |
EmailStatusActions
{{ user_email.email }} 13 | {% if user_email.email_confirmed_at %} 14 | Confirmed 15 | {% else %} 16 | Confirm Email 17 | {% endif %} 18 | 20 | {% if user_email.is_primary %} 21 | Primary email 22 | {% else %} 23 | {% if user_email.email_confirmed_at %} 24 | Make primary | 25 | {% endif %} 26 | Delete 27 | {% endif %} 28 |
32 | 33 |
34 | {{ form.hidden_tag() }} 35 | {{ render_field(form.email) }} 36 | {{ render_submit_field(form.submit) }} 37 |
38 | 39 | {% endblock %} -------------------------------------------------------------------------------- /scripts/1stLevelRepair/repair_createSnapshot.sh: -------------------------------------------------------------------------------- 1 | # Called by web application - creates snapshot at /var/lib/box4s/snapshots/ 2 | # Create dir if not present 3 | timestamp=$(date +%d-%m-%Y_%H-%M-%S) 4 | #location to save snapshot to in the end 5 | snaplocation="/var/lib/box4s/snapshots" 6 | #location where snapshot first gets assembled - cannot contain copy targets 7 | templocation="/tmp" 8 | name="Snapshot-$timestamp" 9 | folder="$templocation/$name" 10 | #Create folder to store files temporarily 11 | mkdir -p $folder 12 | 13 | #helper to delete files 14 | function delete_If_Exists(){ 15 | # Helper to delete files and directories if they exist 16 | if [ -d $1 ]; then 17 | # Directory to remove 18 | sudo rm $1 -r 19 | fi 20 | if [ -f $1 ]; then 21 | # File to remove 22 | sudo rm $1 23 | fi 24 | } 25 | 26 | function copyFolder(){ 27 | #Copy input folder to temporary folder 28 | outFolder=$folder/$1 29 | mkdir $outFolder -p 30 | cp -r $1 $folder/$1 31 | } 32 | 33 | #COPY FILES 34 | #copy version to check if snapshot can be copied 35 | cp /var/lib/box4s/VERSION $folder 36 | copyFolder /etc/box4s 37 | copyFolder /var/lib/box4s 38 | delete_If_Exists $folder/var/lib/box4s/snapshots 39 | copyFolder /var/lib/postgresql 40 | copyFolder /var/lib/box4s_suricata_rules 41 | copyFolder /var/lib/logstash 42 | copyFolder /var/lib/elastalert 43 | copyFolder /var/lib/box4s_docs 44 | 45 | 46 | #create zip and remove snap_folder 47 | cd $templocation 48 | sudo zip -r $name.zip $name/ 49 | #move file to snaplocation and remove folder in temp location 50 | sudo mv $name.zip $snaplocation 51 | rm $folder -R 52 | -------------------------------------------------------------------------------- /scripts/1stLevelRepair/repair_snapshot.sh: -------------------------------------------------------------------------------- 1 | #$1 contains name of the snapshot to restore 2 | snaplocation="/var/lib/box4s/snapshots" 3 | #check if snap has .zip ending or not 4 | snap="$snaplocation/$1" 5 | directory="${1%.*}" 6 | tempDir="/tmp" 7 | snapDir="$tempDir/$directory" 8 | #Unzip snapshot 9 | sudo unzip $snap -d $tempDir 10 | #check version for equality 11 | if ! cmp -s /var/lib/box4s/VERSION $snapDir/VERSION 12 | then 13 | #versions not equal, exit 14 | exit 1 15 | fi 16 | 17 | #move saved files and change permissions 18 | sudo cp -rf $snapDir/etc / 19 | sudo cp -rf $snapDir/var / 20 | 21 | #### /etc/box4s #### 22 | sudo chown root:root /etc/box4s/ 23 | sudo chmod -R 777 /etc/box4s/ 24 | sudo chown -R root:44269 /etc/box4s/logstash 25 | sudo chmod 760 -R /etc/box4s/logstash 26 | 27 | #### /var/lib/box4s #### 28 | sudo chown root:root /var/lib/box4s 29 | sudo chmod -R 777 /var/lib/box4s 30 | 31 | #### /var/lib/postgresql #### 32 | sudo chown -R root:44269 /var/lib/postgresql/data 33 | sudo chmod 760 -R /var/lib/postgresql/data 34 | 35 | #### /var/lib/box4s_suricata_rules #### 36 | sudo chown root:root /var/lib/box4s_suricata_rules/ 37 | sudo chmod -R 777 /var/lib/box4s_suricata_rules/ 38 | 39 | #### /var/lib/logstash #### 40 | sudo chown root:root /var/lib/logstash 41 | sudo chmod -R 777 /var/lib/logstash 42 | 43 | #### /var/lib/elastalert #### 44 | sudo chown root:root /var/lib/elastalert/rules 45 | sudo chmod -R 777 /var/lib/elastalert/rules 46 | 47 | #### /var/lib/box4s_docs #### 48 | sudo chown root:root /var/lib/box4s_docs 49 | sudo chmod -R 777 /var/lib/box4s_docs 50 | 51 | sudo rm $snapDir -r 52 | -------------------------------------------------------------------------------- /docker/web/source/wizard/forms.py: -------------------------------------------------------------------------------- 1 | from flask_wtf import FlaskForm 2 | from wtforms_alchemy import ModelForm 3 | from .models import Network, NetworkType, System, SystemType, BOX4security 4 | from wtforms import SelectMultipleField, SelectField 5 | 6 | 7 | class NetworkForm(ModelForm, FlaskForm): 8 | """Form for Network model.""" 9 | class Meta: 10 | model = Network 11 | types = SelectMultipleField( 12 | 'Netz-Typ', 13 | coerce=int 14 | ) 15 | scancategory_id = SelectField( 16 | 'Scan-Kategorie', 17 | coerce=int 18 | ) 19 | 20 | 21 | class NetworkTypeForm(ModelForm, FlaskForm): 22 | """Form for NetworkType model.""" 23 | class Meta: 24 | model = NetworkType 25 | 26 | 27 | class SystemForm(ModelForm, FlaskForm): 28 | """Form for NetworkType model.""" 29 | class Meta: 30 | model = System 31 | types = SelectMultipleField( 32 | 'System-Typ', 33 | coerce=int 34 | ) 35 | network_id = SelectField( 36 | 'Netz', 37 | coerce=int 38 | ) 39 | 40 | 41 | class BOX4sForm(ModelForm, FlaskForm): 42 | """Form for BOX4s.""" 43 | class Meta: 44 | model = BOX4security 45 | dns_id = SelectField( 46 | 'DNS-Server', 47 | coerce=int 48 | ) 49 | gateway_id = SelectField( 50 | 'Gateway', 51 | coerce=int 52 | ) 53 | network_id = SelectField( 54 | 'Netz', 55 | coerce=int 56 | ) 57 | 58 | 59 | class SystemTypeForm(ModelForm, FlaskForm): 60 | """Form for SystemType model.""" 61 | class Meta: 62 | model = SystemType 63 | -------------------------------------------------------------------------------- /docker/suricata/scripts/ingest_testdata.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Location of the suricata conf 4 | CONF_FILE=/etc/suricata/suricata.yaml 5 | 6 | # Directory of where all the PCAP files are 7 | # Example of files to be downloaded with the wget command 8 | # wget -r -np -k https://archive.wrccdc.org/pcaps/2018/ 9 | mkdir -p /data/suricata/pcap/ 10 | chmod 777 -R /data/suricata/pcap/ 11 | PCAP_DIR=/data/suricata/pcap/ 12 | 13 | # Download just a few pcaps 14 | wget -P /data/suricata/pcap/ https://archive.wrccdc.org/pcaps/2018/wrccdc.2018-03-23.010356000000000.pcap.gz 15 | wget -P /data/suricata/pcap/ https://archive.wrccdc.org/pcaps/2018/wrccdc.2018-03-23.011834000000000.pcap.gz 16 | wget -P /data/suricata/pcap/ https://archive.wrccdc.org/pcaps/2018/wrccdc.2018-03-23.013421000000000.pcap.gz 17 | wget -P /data/suricata/pcap/ https://archive.wrccdc.org/pcaps/2018/wrccdc.2018-03-23.020844000000000.pcap.gz 18 | wget -P /data/suricata/pcap/ https://archive.wrccdc.org/pcaps/2018/wrccdc.2018-03-23.021720000000000.pcap.gz 19 | 20 | # Get all files in a list 21 | file_list=() 22 | while IFS= read -d $'\0' -r file ; do 23 | file_list=("${file_list[@]}" "$file") 24 | done < <(find "${PCAP_DIR}" -name *.pcap.gz -print0) 25 | 26 | # Simple for loop over all the files 27 | for i in "${!file_list[@]}" 28 | do 29 | PCAP_FILE=${file_list[$i]} 30 | echo "$i/${#file_list[@]} Processing file: $PCAP_FILE" 31 | gunzip -c "$PCAP_FILE" > /tmp/temp.pcap 32 | sudo suricata -v -c $CONF_FILE -r "/tmp/temp.pcap" --set unix-command.enabled=false 33 | done 34 | 35 | # Delete the pcaps, to save some storage 36 | sudo rm -r /data/suricata/pcap/* 37 | sudo rm /tmp/temp.pcap -------------------------------------------------------------------------------- /docker/web/Dockerfile: -------------------------------------------------------------------------------- 1 | from python:3.8.2-slim-buster as builder 2 | 3 | # Prevents Python from writing pyc files to disc 4 | ENV PYTHONDONTWRITEBYTECODE 1 5 | # Prevents Python from buffering stdout and stderr 6 | ENV PYTHONUNBUFFERED 1 7 | 8 | WORKDIR /usr/src/app 9 | ADD requirements.txt /usr/src/app/requirements.txt 10 | RUN pip install --upgrade pip && \ 11 | pip install flake8 && \ 12 | pip wheel --no-cache-dir --no-deps --wheel-dir /usr/src/app/wheels -r requirements.txt 13 | 14 | COPY . /usr/src/app 15 | RUN flake8 --ignore=E501,F401 . 16 | 17 | 18 | # Final 19 | FROM python:3.8.2-slim-buster 20 | ENV HOME=/home/app 21 | ENV APP_HOME=/home/app/web 22 | 23 | RUN mkdir -p /home/app && \ 24 | addgroup app && \ 25 | addgroup --gid 44269 boxforsecurity && \ 26 | useradd -g app -G boxforsecurity app && \ 27 | mkdir $APP_HOME 28 | WORKDIR $APP_HOME 29 | 30 | 31 | # Install curl 32 | RUN apt update && \ 33 | apt install -y curl && \ 34 | rm -rf /var/lib/apt/lists/* 35 | 36 | 37 | # copy and install from builder 38 | COPY --from=builder /usr/src/app/wheels /wheels 39 | COPY --from=builder /usr/src/app/requirements.txt . 40 | RUN pip install --upgrade pip && \ 41 | pip install --no-cache /wheels/* 42 | 43 | # copy source 44 | COPY . $APP_HOME 45 | RUN chown -R app:app $HOME && \ 46 | mkdir -p /var/log/box4s/ && \ 47 | chown -R app:app /var/log/box4s/ && \ 48 | mkdir -p /var/lib/box4s && \ 49 | chown -R app:app /var/lib/box4s/ && \ 50 | mkdir -p /etc/box4s && \ 51 | chown -R app:app /etc/box4s && \ 52 | mkdir -p /etc/nginx/certs && \ 53 | chown -R root:44269 /etc/nginx/certs 54 | 55 | # change to the app user 56 | USER app 57 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/calculate_scores.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Make the commands a little easier to read by putting the much used path into a variable 4 | DIR=$(echo "/core4s/scripts/Automation/score_calculation") 5 | 6 | # Get the data for the alert score 7 | curl -s -H "Content-type: application/json" -X POST http://elasticsearch:9200/suricata*/_search --data-binary @$DIR/res/alerts_buckets.query.json > $DIR/alerts_buckets.json 8 | # Social Media count of last 24hr: 9 | curl -s -H "Content-type: application/json" -X POST http://elasticsearch:9200/suricata*/_count --data-binary @$DIR/res/social_media.query.json > $DIR/social_media_count.json 10 | 11 | # Get the data for the vuln score 12 | curl -s -H "Content-type: application/json" -X POST http://elasticsearch:9200/logstash-vulnwhisperer-*/_search --data-binary @$DIR/res/cvss_buckets.query.json > $DIR/cvss_buckets.json 13 | 14 | # Calulate, echo and post value for ... 15 | # ... the alertscore 16 | ALERTSCORE=$(python3 $DIR/calculate_alert_score.py) 17 | curl -s -H "Content-type: application/json" -X POST http://elasticsearch:9200/scores/_doc --data-binary @/tmp/alerts.scores.json > /dev/null 18 | echo "Alertscore: $ALERTSCORE" 19 | 20 | # ... the vulnscore 21 | VULNSCORE=$(python3 $DIR/calculate_vuln_score.py) 22 | curl -s -H "Content-type: application/json" -X POST http://elasticsearch:9200/scores/_doc --data-binary @/tmp/vuln.scores.json > /dev/null 23 | echo "Vulnscore: $VULNSCORE" 24 | 25 | # Delete all temp files to keep the directory clean 26 | rm $DIR/cvss_buckets.json 27 | rm $DIR/alerts_buckets.json 28 | rm $DIR/social_media_count.json 29 | rm /tmp/alerts.scores.json 30 | rm /tmp/vuln.scores.json 31 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/56e9b3f51ec8_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 56e9b3f51ec8 4 | Revises: 532110801da9 5 | Create Date: 2020-04-22 07:36:12.800596 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '56e9b3f51ec8' 14 | down_revision = '532110801da9' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table('role', 23 | sa.Column('id', sa.Integer(), nullable=False), 24 | sa.Column('name', sa.String(length=50), nullable=True), 25 | sa.PrimaryKeyConstraint('id'), 26 | sa.UniqueConstraint('name') 27 | ) 28 | op.create_table('user_role', 29 | sa.Column('id', sa.Integer(), nullable=False), 30 | sa.Column('user_id', sa.Integer(), nullable=True), 31 | sa.Column('role_id', sa.Integer(), nullable=True), 32 | sa.ForeignKeyConstraint(['role_id'], ['role.id'], ondelete='CASCADE'), 33 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), 34 | sa.PrimaryKeyConstraint('id') 35 | ) 36 | # ### end Alembic commands ### 37 | 38 | 39 | def downgrade(): 40 | """Downgrade to migration.""" 41 | # ### commands auto generated by Alembic - please adjust! ### 42 | op.drop_table('user_role') 43 | op.drop_table('role') 44 | # ### end Alembic commands ### 45 | -------------------------------------------------------------------------------- /docker/elastalert/etc/elastalert/config.yaml: -------------------------------------------------------------------------------- 1 | # The elasticsearch hostname for metadata writeback 2 | # Note that every rule can have its own elasticsearch host 3 | es_host: elasticsearch 4 | 5 | # The elasticsearch port 6 | es_port: 9200 7 | 8 | # This is the folder that contains the rule yaml files 9 | # Any .yaml file will be loaded as a rule 10 | rules_folder: rules 11 | 12 | # How often ElastAlert will query elasticsearch 13 | # The unit can be anything from weeks to seconds 14 | run_every: 15 | minutes: 1 16 | 17 | # ElastAlert will buffer results from the most recent 18 | # period of time, in case some log sources are not in real time 19 | buffer_time: 20 | minutes: 15 21 | 22 | # Optional URL prefix for elasticsearch 23 | #es_url_prefix: elasticsearch 24 | 25 | # Connect with TLS to elasticsearch 26 | #use_ssl: True 27 | 28 | # Verify TLS certificates 29 | #verify_certs: True 30 | 31 | # GET request with body is the default option for Elasticsearch. 32 | # If it fails for some reason, you can pass 'GET', 'POST' or 'source'. 33 | # See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport 34 | # for details 35 | #es_send_get_body_as: GET 36 | 37 | # Option basic-auth username and password for elasticsearch 38 | #es_username: someusername 39 | #es_password: somepassword 40 | 41 | # The index on es_host which is used for metadata storage 42 | # This can be a unmapped index, but it is recommended that you run 43 | # elastalert-create-index to set a mapping 44 | writeback_index: elastalert_status 45 | 46 | # If an alert fails for some reason, ElastAlert will retry 47 | # sending the alert until this time period has elapsed 48 | alert_time_limit: 49 | days: 2 50 | -------------------------------------------------------------------------------- /docker/web/source/templates/application/quick_alert_malware.yaml.j2: -------------------------------------------------------------------------------- 1 | name: Malware Network Traffic 2 | 3 | # (Required) 4 | # Type of alert. 5 | # the frequency rule type alerts when num_events events occur with timeframe time 6 | # The any rule will match everything. Every hit that the query returns will generate an alert. 7 | type: any 8 | 9 | {# realert: 10 | hours: 4 #} 11 | 12 | timeframe: 13 | minutes: 1 14 | 15 | # (Required) 16 | # Index to search, wildcard supported 17 | index: suricata-* 18 | 19 | # (Required, new_term specific) 20 | # Monitor the field ip_address 21 | #fields: 22 | # - "source.ip" 23 | 24 | filter: 25 | - query: 26 | query_string: 27 | query: "(alert.signature:\"ET TROJAN\" OR alert.signature:\"ET CURRENT EVENTS\" OR alert.signature:\"ET MALWARE\" OR alert.signature:\"ET MOBILE MALWARE\" OR alert.signature:\"ET CNC\") AND (alert.signature:*ransom* OR alert.signature:*crypto* OR alert.signature:\"check-in\" OR alert.signature:checkin OR alert.signature:download* OR alert.signature:DL OR alert.signature:macro OR alert.signature:cnc OR alert.signature:dropper OR alert.signature:beacon OR alert.signature:backdoor OR alert.signature:c2 OR alert.signature:m2)" 28 | 29 | {% include "application/alert_email_conf.yaml.j2" %} 30 | {% raw %} 31 | alert_subject: "BOX4s Malware: {0} - {1} <-> {2}" 32 | {% endraw %} 33 | alert_subject_args: 34 | - alert.signature 35 | - source.domain 36 | - destination.domain 37 | alert_text: "{4} ({5}) \nSource: {0}:{1} <-> Destination: {2}:{3}\n Payload:\n{6}" 38 | alert_text_type: alert_text_only 39 | alert_text_args: ["source.domain", "source.port.number", "destination.domain", "destination.port.name", "alert.signature","log.severity", "payload_printable"] 40 | -------------------------------------------------------------------------------- /config/ssl/box4security-ssl.conf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 4096 3 | default_keyfile = key.pem 4 | distinguished_name = subject 5 | req_extensions = req_ext 6 | x509_extensions = x509_ext 7 | string_mask = utf8only 8 | 9 | [ subject ] 10 | countryName = Country Name (2 letter code) 11 | countryName_default = DE 12 | 13 | stateOrProvinceName = State or Province Name (full name) 14 | stateOrProvinceName_default = NRW 15 | 16 | localityName = Locality Name (eg, city) 17 | localityName_default = Dortmund 18 | 19 | organizationName = Organization Name (eg, company) 20 | organizationName_default = 4sConsult GmbH 21 | 22 | commonName = Common Name (e.g. server FQDN or YOUR name) 23 | commonName_default = BOX4security 24 | 25 | emailAddress = Email Address 26 | emailAddress_default = box@4sconsult.de 27 | 28 | [ x509_ext ] 29 | 30 | subjectKeyIdentifier = hash 31 | authorityKeyIdentifier = keyid,issuer 32 | 33 | basicConstraints = CA:FALSE 34 | keyUsage = digitalSignature, keyEncipherment 35 | subjectAltName = @alternate_names 36 | nsComment = "BOX4security - Selbstsigniertes Zertifikat" 37 | 38 | # extendedKeyUsage = serverAuth, clientAuth 39 | 40 | [ req_ext ] 41 | 42 | subjectKeyIdentifier = hash 43 | 44 | basicConstraints = CA:FALSE 45 | keyUsage = digitalSignature, keyEncipherment 46 | subjectAltName = @alternate_names 47 | nsComment = "BOX4security - Selbstsigniertes Zertifikat" 48 | 49 | [ alternate_names ] 50 | 51 | DNS.1 = localhost 52 | DNS.2 = box4security 53 | DNS.3 = 127.0.0.1 54 | DNS.4 = ::1 55 | -------------------------------------------------------------------------------- /docker/elastalert/etc/elastalert-server/elastalert.yaml: -------------------------------------------------------------------------------- 1 | # The elasticsearch hostname for metadata writeback 2 | # Note that every rule can have its own elasticsearch host 3 | es_host: elasticsearch 4 | 5 | # The elasticsearch port 6 | es_port: 9200 7 | 8 | # This is the folder that contains the rule yaml files 9 | # Any .yaml file will be loaded as a rule 10 | rules_folder: rules 11 | 12 | # How often ElastAlert will query elasticsearch 13 | # The unit can be anything from weeks to seconds 14 | run_every: 15 | seconds: 5 16 | 17 | # ElastAlert will buffer results from the most recent 18 | # period of time, in case some log sources are not in real time 19 | buffer_time: 20 | minutes: 1 21 | 22 | # Optional URL prefix for elasticsearch 23 | #es_url_prefix: elasticsearch 24 | 25 | # Connect with TLS to elasticsearch 26 | #use_ssl: True 27 | 28 | # Verify TLS certificates 29 | #verify_certs: True 30 | 31 | # GET request with body is the default option for Elasticsearch. 32 | # If it fails for some reason, you can pass 'GET', 'POST' or 'source'. 33 | # See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport 34 | # for details 35 | #es_send_get_body_as: GET 36 | 37 | # Option basic-auth username and password for elasticsearch 38 | #es_username: someusername 39 | #es_password: somepassword 40 | 41 | # The index on es_host which is used for metadata storage 42 | # This can be a unmapped index, but it is recommended that you run 43 | # elastalert-create-index to set a mapping 44 | writeback_index: elastalert_status 45 | 46 | # If an alert fails for some reason, ElastAlert will retry 47 | # sending the alert until this time period has elapsed 48 | alert_time_limit: 49 | days: 2 50 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/031dd699edaa_add_wizard_state.py: -------------------------------------------------------------------------------- 1 | """Add Wizard State 2 | 3 | Revision ID: 031dd699edaa 4 | Revises: d995a93c3a9c 5 | Create Date: 2020-11-04 10:34:08.622945 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.wizard.models import WizardState, WizardStateNames 12 | # revision identifiers, used by Alembic. 13 | revision = '031dd699edaa' 14 | down_revision = 'd995a93c3a9c' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table( 23 | 'wizardstatenames', 24 | sa.Column('id', sa.Integer(), nullable=False), 25 | sa.Column('name', sa.String(), nullable=True), 26 | sa.PrimaryKeyConstraint('id') 27 | ) 28 | op.create_table( 29 | 'wizardstate', 30 | sa.Column('id', sa.Integer(), nullable=False), 31 | sa.Column('state_id', sa.Integer(), nullable=False), 32 | sa.ForeignKeyConstraint(['state_id'], ['wizardstatenames.id'], ), 33 | sa.PrimaryKeyConstraint('id') 34 | ) 35 | op.bulk_insert(WizardStateNames.__table__, [ 36 | {'id': 1, 'name': 'Force Disable'}, 37 | {'id': 2, 'name': 'Enabled'}, 38 | {'id': 3, 'name': 'Completed'}, 39 | ]) 40 | op.bulk_insert(WizardState.__table__, [ 41 | {'state_id': 2}, 42 | ]) 43 | # ### end Alembic commands ### 44 | 45 | 46 | def downgrade(): 47 | """Downgrade to migration.""" 48 | # ### commands auto generated by Alembic - please adjust! ### 49 | op.drop_table('wizardstate') 50 | op.drop_table('wizardstatenames') 51 | # ### end Alembic commands ### 52 | -------------------------------------------------------------------------------- /docker/core4s/core4s.crontab: -------------------------------------------------------------------------------- 1 | # m h dom mon dow command 2 | 3 | # Execute Curator Daily 4 | @daily /usr/local/bin/curator --config /core4s/curator/curator.yml /core4s/curator/actions.yml && sh /core4s/scripts/Automation/croncheck.sh curator SUCCESS || sh /core4s/scripts/Automation/croncheck.sh curator FAILURE 5 | 6 | # Suricata update 7 | @daily docker exec suricata /root/scripts/update.sh > /dev/null 2>&1 && sh /core4s/scripts/Automation/croncheck.sh suricata_update SUCCESS || sh /core4s/scripts/Automation/croncheck.sh suricata_update FAILURE 8 | 9 | # OpenVAS update 10 | @daily docker exec openvas /root/update.sh > /dev/null 2>&1 && sh /core4s/scripts/Automation/croncheck.sh openvas_update SUCCESS || sh /core4s/scripts/Automation/croncheck.sh openvas_update FAILURE 11 | 12 | # Collect OpenVAS Reports 13 | 50 * * * * python3 /core4s/openvas/exporter.py > /var/log/cronchecker/vulnwhisp.log && sh /core4s/scripts/Automation/croncheck.sh vulnwhisperer SUCCESS || sh /core4s/scripts/Automation/croncheck.sh vulnwhisperer FAILURE 14 | 15 | # Leere eve.json 16 | 0 */6 * * * echo "" > /core4s/workfolder/suricata/eve.json && sh /core4s/scripts/Automation/croncheck.sh delete_suricata_log SUCCESS || sh /core4s/scripts/Automation/croncheck.sh delete_suricata_log FAILURE 17 | 18 | # Update Resources on Sunday at 9 am 19 | 0 9 * * 0 /bin/bash /core4s/scripts/Automation/resourceupdate.sh && sh /core4s/scripts/Automation/croncheck.sh resourceupdate_sunday SUCCESS || sh /core4s/scripts/Automation/croncheck.sh resourceupdate_sunday FAILURE 20 | 21 | # Score calculation 22 | */15 * * * * /bin/bash /core4s/scripts/Automation/score_calculation/calculate_scores.sh > /dev/null 2>&1 && sh /core4s/scripts/Automation/croncheck.sh score_calc SUCCESS || sh /core4s/scripts/Automation/croncheck.sh score_calc FAILURE 23 | -------------------------------------------------------------------------------- /docker/elastalert/etc/elastalert-server/elastalert-test.yaml: -------------------------------------------------------------------------------- 1 | # NOTE: This config is used when testing a rule 2 | 3 | # The elasticsearch hostname for metadata writeback 4 | # Note that every rule can have its own elasticsearch host 5 | es_host: elasticsearch 6 | 7 | # The elasticsearch port 8 | es_port: 9200 9 | 10 | # This is the folder that contains the rule yaml files 11 | # Any .yaml file will be loaded as a rule 12 | rules_folder: rules 13 | 14 | # How often ElastAlert will query elasticsearch 15 | # The unit can be anything from weeks to seconds 16 | run_every: 17 | seconds: 5 18 | 19 | # ElastAlert will buffer results from the most recent 20 | # period of time, in case some log sources are not in real time 21 | buffer_time: 22 | minutes: 1 23 | 24 | # Optional URL prefix for elasticsearch 25 | #es_url_prefix: elasticsearch 26 | 27 | # Connect with TLS to elasticsearch 28 | #use_ssl: True 29 | 30 | # Verify TLS certificates 31 | #verify_certs: True 32 | 33 | # GET request with body is the default option for Elasticsearch. 34 | # If it fails for some reason, you can pass 'GET', 'POST' or 'source'. 35 | # See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport 36 | # for details 37 | #es_send_get_body_as: GET 38 | 39 | # Option basic-auth username and password for elasticsearch 40 | #es_username: someusername 41 | #es_password: somepassword 42 | 43 | # The index on es_host which is used for metadata storage 44 | # This can be a unmapped index, but it is recommended that you run 45 | # elastalert-create-index to set a mapping 46 | writeback_index: elastalert_status 47 | 48 | # If an alert fails for some reason, ElastAlert will retry 49 | # sending the alert until this time period has elapsed 50 | alert_time_limit: 51 | days: 2 52 | -------------------------------------------------------------------------------- /docker/heartbeat/etc/heartbeat.yml: -------------------------------------------------------------------------------- 1 | ################### Heartbeat Configuration Example ######################### 2 | # You can find the full configuration reference here: 3 | # https://www.elastic.co/guide/en/beats/heartbeat/index.html 4 | ############################# Heartbeat ###################################### 5 | # Configure monitors 6 | heartbeat.config.monitors: 7 | path: /etc/heartbeat/monitors.d/*.yml 8 | reload.enabled: true 9 | reload.period: 10s 10 | #==================== Elasticsearch template setting ========================== 11 | setup.template.settings: 12 | index.number_of_shards: 1 13 | index.codec: best_compression 14 | #_source.enabled: false 15 | #================================ General ===================================== 16 | #============================== Dashboards ===================================== 17 | #============================== Kibana ===================================== 18 | #============================= Elastic Cloud ================================== 19 | #================================ Outputs ===================================== 20 | #-------------------------- Elasticsearch output ------------------------------ 21 | #----------------------------- Logstash output -------------------------------- 22 | output.logstash: 23 | hosts: ["logstash:5044"] 24 | #================================ Logging ===================================== 25 | # Available log levels are: error, warning, info, debug 26 | logging.level: info 27 | logging.to_files: true 28 | logging.files: 29 | path: /var/log/heartbeat 30 | name: heartbeat 31 | keepfiles: 7 32 | permissions: 0644 33 | #============================== Xpack Monitoring =============================== 34 | xpack.monitoring.enabled: true 35 | xpack.monitoring.elasticsearch: 36 | hosts: ["elasticsearch:9200"] 37 | -------------------------------------------------------------------------------- /scripts/1stLevelRepair/repair_format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | # Log file to use 4 | # Create path if allowed or do NOP 5 | mkdir -p /var/log/box4s/1stLevelRepair || : 6 | LOG_DIR="/var/log/box4s/1stLevelRepair" 7 | if [[ ! -w $LOG_DIR ]]; then 8 | LOG_DIR="$HOME" 9 | fi 10 | 11 | LOG=$LOG_DIR/format_drive.log 12 | 13 | # Do not use interactive debian frontend. 14 | export DEBIAN_FRONTEND=noninteractive 15 | 16 | # Forward fd2 to the console 17 | # exec 2>&1 18 | # Forward fd1 to $LOG 19 | exec 2>&1 1>>${LOG} 20 | 21 | # 22 | #flag: empty - when used the /data structure is not recreated 23 | # 24 | echo -n "Starting to wipe Data.. " 1>&2 25 | sudo srm -zr /data 26 | echo "[ DONE ]" 1>&2 27 | 28 | 29 | echo -n "Recreating file structure to allow new Data.. " 1>&2 30 | sudo mkdir -p /data 31 | sudo chown root:root /data 32 | sudo chmod 777 /data 33 | sudo mkdir -p /data/suricata/ 34 | sudo touch /data/suricata/eve.json 35 | 36 | #Recreate Docker Volumes 37 | #Recreate Data 38 | sudo docker volume create --driver local --opt type=none --opt device=/data --opt o=bind data 39 | sudo chown -R root:44269 /data 40 | sudo chmod 760 -R /data 41 | 42 | 43 | #Recreate Postgresql 44 | sudo mkdir -p /var/lib/postgresql/data 45 | sudo docker volume create --driver local --opt type=none --opt device=/var/lib/postgresql/data --opt o=bind varlib_postgresql 46 | sudo chown -R root:44269 /var/lib/postgresql/data 47 | sudo chmod 760 -R /var/lib/postgresql/data 48 | 49 | # Recreate Elastic Volume 50 | sudo mkdir /data/elasticsearch -p 51 | sudo mkdir /data/elasticsearch_backup/Snapshots -p 52 | # Elasticsearch is somewhat special... 53 | sudo chown -R 1000:0 /data/elasticsearch 54 | sudo chown -R 1000:0 /data/elasticsearch_backup 55 | sudo chmod 760 -R /data/elasticsearch 56 | sudo chmod 760 -R /data/elasticsearch_backup 57 | echo "[ DONE ]" 1>&2 58 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/reset_password.html: -------------------------------------------------------------------------------- 1 | {% extends 'user_base.html' %} 2 | {% block user %} 3 | {% from "user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 4 | 5 | 6 |
7 |
8 |

9 |
10 |

Passwort zurücksetzen

11 |
12 |

13 |
14 | {{ form.hidden_tag() }} 15 |
16 |
17 |
18 | 19 | 20 |
21 |
22 | {% if form.new_password.errors %} 23 | {% for e in form.new_password.errors %} 24 |

{{ e }}

25 | {% endfor %} 26 | {% endif %} 27 | {% if user_manager.USER_REQUIRE_RETYPE_PASSWORD %} 28 |
29 |
30 | 31 | 32 |
33 |
34 | {% if form.retype_password.errors %} 35 | {% for e in form.retype_password.errors %} 36 |

{{ e }}

37 | {% endfor %} 38 | {% endif %} 39 | {% endif %} 40 | 41 |
42 |
43 | 44 | {% endblock %} 45 | -------------------------------------------------------------------------------- /docker/metricbeat/etc/metricbeat.yml: -------------------------------------------------------------------------------- 1 | ###################### Metricbeat Configuration Example ####################### 2 | # You can find the full configuration reference here: 3 | # https://www.elastic.co/guide/en/beats/metricbeat/index.html 4 | #========================== Modules configuration ============================ 5 | metricbeat.config.modules: 6 | path: ${path.config}/modules.d/*.yml 7 | reload.enabled: true 8 | #==================== Elasticsearch template setting ========================== 9 | setup.template.settings: 10 | index.number_of_shards: 1 11 | index.codec: best_compression 12 | #_source.enabled: false 13 | #================================ General ===================================== 14 | #============================== Dashboards ===================================== 15 | #============================== Kibana ===================================== 16 | setup.kibana: 17 | #============================= Elastic Cloud ================================== 18 | #================================ Outputs ===================================== 19 | #-------------------------- Elasticsearch output ------------------------------ 20 | output.elasticsearch: 21 | hosts: ["elasticsearch:9200"] 22 | #----------------------------- Logstash output -------------------------------- 23 | #================================ Processors ===================================== 24 | processors: 25 | - add_host_metadata: ~ 26 | #================================ Logging ===================================== 27 | logging.level: info 28 | logging.to_files: true 29 | logging.files: 30 | path: /var/log/metricbeat 31 | name: metricbeat 32 | keepfiles: 7 33 | permissions: 0644 34 | #============================== Xpack Monitoring =============================== 35 | monitoring.enabled: true 36 | #================================= Migration ================================== 37 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/2bcd96b138e4_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 2bcd96b138e4 4 | Revises: 045ed1db87f6 5 | Create Date: 2020-04-22 08:22:55.727836 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.models import Role 12 | 13 | # revision identifiers, used by Alembic. 14 | revision = '2bcd96b138e4' 15 | down_revision = '045ed1db87f6' 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade(): 21 | """Upgrade to migration.""" 22 | op.bulk_insert(Role.__table__, 23 | [ 24 | {'id': 1, 'name': 'Super Admin', 'description': 'Super Admin'}, 25 | {'id': 2, 'name': 'Filter', 'description': 'Sehen und Anlegen von Filtern'}, 26 | {'id': 3, 'name': 'Updates', 'description': 'Ansehen und Starten von Updates'}, 27 | {'id': 4, 'name': 'User-Management', 'description': 'Bearbeiten und Anlegen von Benutzern'}, 28 | {'id': 5, 'name': 'FAQ', 'description': 'Ansicht FAQ und Benutzung 4sConsult Kontaktformular'}, 29 | {'id': 6, 'name': 'Dashboards-Master', 'description': 'Freigabe für alle Dashboards'}, 30 | {'id': 7, 'name': 'SIEM', 'description': 'Freigabe für SIEM-Dashboards'}, 31 | {'id': 8, 'name': 'Schwachstellen', 'description': 'Freigabe für Schwachstellen-Dashboards'}, 32 | {'id': 9, 'name': 'Netzwerk', 'description': 'Freigabe für Netzwerk-Dashboards'}, 33 | ]) 34 | # ### end Alembic commands ### 35 | 36 | 37 | def downgrade(): 38 | """Downgrade to migration.""" 39 | # ### commands auto generated by Alembic - please adjust! ### 40 | op.execute('DELETE FROM "role" WHERE id<10') 41 | # ### end Alembic commands ### 42 | -------------------------------------------------------------------------------- /docker/core4s/scripts/Automation/score_calculation/res/vuln_score.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "range": { 4 | "@timestamp": { 5 | "gte": "now-7d/d", 6 | "lt": "now" 7 | } 8 | } 9 | }, 10 | "size": 0, 11 | "aggs": { 12 | "uniqueVul": { 13 | "terms": { 14 | "field": "uniqueVul", 15 | "size": 5000 16 | }, 17 | "aggs": { 18 | "cvssUniqueVul": { 19 | "term": { 20 | "field": "cvss" 21 | } 22 | }, 23 | "topUniqueVul": { 24 | "top_hits": { 25 | "sort": [ 26 | { 27 | "@timestamp": "asc" 28 | } 29 | ], 30 | "_source": ["uniqueVul", "client.domain", "cvss", "@timestamp"], 31 | "size": 1 32 | } 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docker/logstash/etc/pipelines.yml: -------------------------------------------------------------------------------- 1 | - pipeline.id: estransfer_openvas_output 2 | path.config: "/etc/logstash/conf.d/estransfer/openvas_es_transfer.conf" 3 | - pipeline.id: estransfer_suricata_output 4 | path.config: "/etc/logstash/conf.d/estransfer/suricata_es_transfer.conf" 5 | - pipeline.id: estransfer_heartbeat_output 6 | path.config: "/etc/logstash/conf.d/estransfer/heartbeat_es_transfer.conf" 7 | - pipeline.id: estransfer_metricbeat_output 8 | path.config: "/etc/logstash/conf.d/estransfer/metricbeat_es_transfer.conf" 9 | 10 | - pipeline.id: openvas_filter 11 | path.config: "/etc/logstash/conf.d/openvas/*.conf" 12 | pipeline.workers: 1 13 | - pipeline.id: suricata_filter 14 | queue.type: memory 15 | pipeline.batch.delay: 150 16 | queue.checkpoint.interval: 500 17 | path.config: "/etc/logstash/conf.d/suricata/*.conf" 18 | - pipeline.id: heartbeat_filter 19 | path.config: "/etc/logstash/conf.d/heartbeat/*.conf" 20 | - pipeline.id: metricbeat_filter 21 | path.config: "/etc/logstash/conf.d/metricbeat/*.conf" 22 | 23 | - pipeline.id: beats_input 24 | queue.type: memory 25 | queue.checkpoint.interval: 500 26 | config.string: | 27 | input { beats { 28 | id => "input_beats" 29 | client_inactivity_timeout => 180 30 | host => "0.0.0.0" 31 | port => "5044" 32 | }} 33 | output { 34 | if [@metadata][beat] == "heartbeat" { 35 | pipeline { send_to => ["heartbeat_pipe"] } 36 | } 37 | else if [fields][event][type] == "suricata" { 38 | pipeline { send_to => ["suricata_pipe"] } 39 | } 40 | else if [@metadata][beat] == "metricbeat" { 41 | pipeline { send_to => ["metricbeat_pipe"] } 42 | } 43 | else if [fields][event][type] == "openvas" { 44 | pipeline { send_to => ["openvas_pipe"] } 45 | } 46 | 47 | else { file { path => "/var/log/logstash/logstash_debug" } } 48 | } 49 | 50 | -------------------------------------------------------------------------------- /docker/logstash/etc/startup.options: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # These settings are ONLY used by $LS_HOME/bin/system-install to create a custom 3 | # startup script for Logstash and is not used by Logstash itself. It should 4 | # automagically use the init system (systemd, upstart, sysv, etc.) that your 5 | # Linux distribution uses. 6 | # 7 | # After changing anything here, you need to re-run $LS_HOME/bin/system-install 8 | # as root to push the changes to the init script. 9 | ################################################################################ 10 | 11 | # Override Java location 12 | #JAVACMD=/usr/bin/java 13 | 14 | # Set a home directory 15 | LS_HOME=/usr/share/logstash 16 | 17 | # logstash settings directory, the path which contains logstash.yml 18 | LS_SETTINGS_DIR=/etc/logstash 19 | 20 | # Arguments to pass to logstash 21 | LS_OPTS="--path.settings ${LS_SETTINGS_DIR}" 22 | 23 | # Arguments to pass to java 24 | LS_JAVA_OPTS="" 25 | 26 | # pidfiles aren't used the same way for upstart and systemd; this is for sysv users. 27 | LS_PIDFILE=/var/run/logstash.pid 28 | 29 | # user and group id to be invoked as 30 | LS_USER=logstash 31 | LS_GROUP=logstash 32 | 33 | # Enable GC logging by uncommenting the appropriate lines in the GC logging 34 | # section in jvm.options 35 | LS_GC_LOG_FILE=/var/log/logstash/gc.log 36 | 37 | # Open file limit 38 | LS_OPEN_FILES=16384 39 | 40 | # Nice level 41 | LS_NICE=19 42 | 43 | # Change these to have the init script named and described differently 44 | # This is useful when running multiple instances of Logstash on the same 45 | # physical box or vm 46 | SERVICE_NAME="logstash" 47 | SERVICE_DESCRIPTION="logstash" 48 | 49 | # If you need to run a command or script before launching Logstash, put it 50 | # between the lines beginning with `read` and `EOM`, and uncomment those lines. 51 | ### 52 | ## read -r -d '' PRESTART << EOM 53 | ## EOM 54 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/_macros.html: -------------------------------------------------------------------------------- 1 | {% macro render_field(field, label=None, label_visible=true, right_url=None, right_label=None) -%} 2 |
3 | {% if field.type != 'HiddenField' and label_visible %} 4 | {% if not label %}{% set label=field.label.text %}{% endif %} 5 | 6 | {% endif %} 7 | {{ field(class_='field', **kwargs) }} 8 | {% if field.errors %} 9 | {% for e in field.errors %} 10 |

{{ e }}

11 | {% endfor %} 12 | {% endif %} 13 |
14 | {%- endmacro %} 15 | 16 | {% macro render_checkbox_field(field, label=None) -%} 17 | {% if not label %}{% set label=field.label.text %}{% endif %} 18 |
19 | 22 |
23 | {%- endmacro %} 24 | 25 | {% macro render_radio_field(field) -%} 26 | {% for value, label, checked in field.iter_choices() %} 27 |
28 | 32 |
33 | {% endfor %} 34 | {%- endmacro %} 35 | 36 | {% macro render_submit_field(field, label=None, tabindex=None) -%} 37 | {% if not label %}{% set label=field.label.text %}{% endif %} 38 | {##} 39 | 42 | {%- endmacro %} 43 | -------------------------------------------------------------------------------- /config/dashboards/Patterns/scores.ndjson: -------------------------------------------------------------------------------- 1 | {"attributes":{"fields":"[{\"name\":\"_id\",\"type\":\"string\",\"esTypes\":[\"_id\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_index\",\"type\":\"string\",\"esTypes\":[\"_index\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_source\",\"type\":\"_source\",\"esTypes\":[\"_source\"],\"count\":0,\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"_type\",\"type\":\"string\",\"esTypes\":[\"_type\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"name\":\"rules.text\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"rules.weight\",\"type\":\"number\",\"esTypes\":[\"float\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"score_type\",\"type\":\"string\",\"esTypes\":[\"text\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"name\":\"timestamp\",\"type\":\"date\",\"esTypes\":[\"date\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"name\":\"value\",\"type\":\"number\",\"esTypes\":[\"float\"],\"count\":0,\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true}]","timeFieldName":"timestamp","title":"scores*"},"id":"8d0c6650-426d-11ea-bbd4-bb7e0278945f","migrationVersion":{"index-pattern":"7.6.0"},"references":[],"type":"index-pattern","updated_at":"2020-08-28T12:32:25.904Z","version":"WzY2NSwyMzRd"} 2 | {"exportedCount":1,"missingRefCount":0,"missingReferences":[]} -------------------------------------------------------------------------------- /docker/web/migrations/versions/9a02836f6117_system_and_types.py: -------------------------------------------------------------------------------- 1 | """system and types 2 | 3 | Revision ID: 9a02836f6117 4 | Revises: 5aadb38f6936 5 | Create Date: 2020-10-21 09:40:39.988041 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '9a02836f6117' 14 | down_revision = '5aadb38f6936' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.create_table( 22 | 'system', 23 | sa.Column('id', sa.Integer(), nullable=False), 24 | sa.Column('name', sa.String(length=100), nullable=True), 25 | sa.Column('ip_address', sa.String(length=24), nullable=True), 26 | sa.Column('location', sa.String(length=255), nullable=True), 27 | sa.Column('scan_enabled', sa.Boolean(), nullable=True), 28 | sa.Column('ids_enabled', sa.Boolean(), nullable=True), 29 | sa.PrimaryKeyConstraint('id'), 30 | sa.UniqueConstraint('name') 31 | ) 32 | op.create_table( 33 | 'systemtype', 34 | sa.Column('id', sa.Integer(), nullable=False), 35 | sa.Column('name', sa.String(length=100), nullable=True), 36 | sa.PrimaryKeyConstraint('id') 37 | ) 38 | op.create_table( 39 | 'system_systemtype', 40 | sa.Column('id', sa.Integer(), nullable=False), 41 | sa.Column('system_id', sa.Integer(), nullable=True), 42 | sa.Column('systemtype_id', sa.Integer(), nullable=True), 43 | sa.ForeignKeyConstraint(['system_id'], ['system.id'], ondelete='CASCADE'), 44 | sa.ForeignKeyConstraint(['systemtype_id'], ['systemtype.id'], ondelete='CASCADE'), 45 | sa.PrimaryKeyConstraint('id') 46 | ) 47 | 48 | 49 | def downgrade(): 50 | """Downgrade to migration.""" 51 | op.drop_table('system_systemtype') 52 | op.drop_table('systemtype') 53 | op.drop_table('system') 54 | -------------------------------------------------------------------------------- /docker/filebeat/etc/filebeat.yml: -------------------------------------------------------------------------------- 1 | # You can find the full configuration reference here: 2 | # https://www.elastic.co/guide/en/beats/filebeat/index.html 3 | #=========================== Filebeat inputs ============================= 4 | filebeat.inputs: 5 | - type: log 6 | enabled: false 7 | - type: log 8 | enabled: true 9 | #harvester_limit: 1 10 | close_eof: true 11 | json.keys_under_root: true 12 | json.add_error_key: true 13 | json.message_key: log 14 | scan.sort: filename 15 | paths: 16 | - /var/lib/logstash/openvas/*.json 17 | fields: 18 | event.type: openvas 19 | 20 | - type: log 21 | enabled: true 22 | paths: 23 | - /data/suricata/eve.json 24 | fields: 25 | event.type: suricata 26 | #============================= Filebeat modules =============================== 27 | filebeat.config.modules: 28 | path: ${path.config}/modules.d/*.yml 29 | reload.enabled: false 30 | #==================== Elasticsearch template setting ========================== 31 | #================================ General ===================================== 32 | #============================== Dashboards ===================================== 33 | #============================== Kibana ===================================== 34 | #============================= Elastic Cloud ================================== 35 | #================================ Outputs ===================================== 36 | #-------------------------- Elasticsearch output ------------------------------ 37 | #----------------------------- Logstash output -------------------------------- 38 | output.logstash: 39 | hosts: ["logstash:5044"] 40 | #================================ Logging ===================================== 41 | logging.level: info 42 | logging.to_files: true 43 | logging.files: 44 | path: /var/log/filebeat 45 | name: filebeat 46 | keepfiles: 7 47 | permissions: 0644 48 | #============================== Xpack Monitoring =============================== 49 | #xpack.monitoring.enabled: false 50 | -------------------------------------------------------------------------------- /scripts/Development/backup4srepos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | REPOS=( 3 | ssh://git@gitlab.com/4sconsult/box4s-license-server.git 4 | ssh://git@gitlab.com/4sconsult/elastic-standard.git 5 | ssh://git@gitlab.com/4sconsult/azure.git 6 | ssh://git@gitlab.com/4sconsult/encryptpitsa.git 7 | ssh://git@gitlab.com/4sconsult/docs.git 8 | ssh://git@gitlab.com/4sconsult/scrum-guide.git 9 | ssh://git@gitlab.com/4sconsult/box4s.git 10 | ) 11 | DATE=$(date +%d.%m.%Y) 12 | echo "Creating encrypted 4sConsult repo backups for $DATE." 13 | echo -n "Recreating working folders.. " 14 | rm -rf /tmp/backup4srepos 15 | mkdir -p /tmp/backup4srepos 16 | echo -n "[ /tmp/backup4srepos " 17 | rm -rf /tmp/backup4sbundles 18 | mkdir -p /tmp/backup4sbundles 19 | echo " /tmp/backup4sbundles ]" 20 | cd /tmp/backup4srepos 21 | echo -n "Mirroring repos.. [ " 22 | for r in "${REPOS[@]}"; do 23 | echo -n "${r##*/} " 24 | git clone --mirror $r >/dev/null 2>&1 25 | done 26 | echo " ]" 27 | echo "Bundling repos.." 28 | for D in `find /tmp/backup4srepos -mindepth 1 -maxdepth 1 -type d` 29 | do 30 | cd $D 31 | repo=${PWD##*/} 32 | echo -n "- $repo " 33 | git bundle create $repo.bundle --all >/dev/null 2>&1 34 | echo "[OK]" 35 | # No such file may occur here if the repository is empty. 36 | # No bundles are created for emtpy repositories. 37 | cp $repo.bundle /tmp/backup4sbundles 38 | done 39 | 40 | echo -n "Archiving the bundles.. " 41 | cd /tmp/backup4sbundles 42 | tar cfz 4srepos-$DATE.tar.gz * 43 | echo "[OK]" 44 | echo "Archive contents: " 45 | tar -ztvf 4srepos-$DATE.tar.gz 46 | 47 | echo -n "Encrypting archive for developers' keys.. " 48 | gpg --encrypt \ 49 | --recipient christoph.meyer@4sconsult.de \ 50 | --recipient constantin.tillmann@4sconsult.de \ 51 | --recipient jan.guenther@4sconsult.de \ 52 | 4srepos-$DATE.tar.gz 53 | echo "[OK]" 54 | cp 4srepos-$DATE.tar.gz.gpg /tmp/4srepos-$DATE.tar.gz.gpg 55 | rm -rf /tmp/backup4srepos 56 | rm -rf /tmp/backup4sbundles 57 | echo "Success! Find your encrypted archive file at /tmp/4srepos-$DATE.tar.gz.gpg" 58 | -------------------------------------------------------------------------------- /docker/logstash/etc/jvm.options: -------------------------------------------------------------------------------- 1 | ## JVM configuration 2 | 3 | ################################################################ 4 | ## Expert settings 5 | ################################################################ 6 | ## 7 | ## All settings below this section are considered 8 | ## expert settings. Don't tamper with them unless 9 | ## you understand what you are doing 10 | ## 11 | ################################################################ 12 | 13 | ## GC configuration 14 | -XX:+UseConcMarkSweepGC 15 | -XX:CMSInitiatingOccupancyFraction=75 16 | -XX:+UseCMSInitiatingOccupancyOnly 17 | 18 | ## Locale 19 | # Set the locale language 20 | #-Duser.language=en 21 | 22 | # Set the locale country 23 | #-Duser.country=US 24 | 25 | # Set the locale variant, if any 26 | #-Duser.variant= 27 | 28 | ## basic 29 | 30 | # set the I/O temp directory 31 | #-Djava.io.tmpdir=$HOME 32 | 33 | # set to headless, just in case 34 | -Djava.awt.headless=true 35 | 36 | # ensure UTF-8 encoding by default (e.g. filenames) 37 | -Dfile.encoding=UTF-8 38 | 39 | # use our provided JNA always versus the system one 40 | #-Djna.nosys=true 41 | 42 | # Turn on JRuby invokedynamic 43 | -Djruby.compile.invokedynamic=true 44 | # Force Compilation 45 | -Djruby.jit.threshold=0 46 | # Make sure joni regexp interruptability is enabled 47 | -Djruby.regexp.interruptible=true 48 | 49 | ## heap dumps 50 | 51 | # generate a heap dump when an allocation from the Java heap fails 52 | # heap dumps are created in the working directory of the JVM 53 | -XX:+HeapDumpOnOutOfMemoryError 54 | 55 | # specify an alternative path for heap dumps 56 | # ensure the directory exists and has sufficient space 57 | #-XX:HeapDumpPath=${LOGSTASH_HOME}/heapdump.hprof 58 | 59 | ## GC logging 60 | #-XX:+PrintGCDetails 61 | #-XX:+PrintGCTimeStamps 62 | #-XX:+PrintGCDateStamps 63 | #-XX:+PrintClassHistogram 64 | #-XX:+PrintTenuringDistribution 65 | #-XX:+PrintGCApplicationStoppedTime 66 | 67 | # log GC status to a file with time stamps 68 | # ensure the directory exists 69 | #-Xloggc:${LS_GC_LOG_FILE} 70 | 71 | # Entropy source for randomness 72 | -Djava.security.egd=file:/dev/urandom 73 | 74 | # Copy the logging context from parent threads to children 75 | -Dlog4j2.isThreadContextMapInheritable=true 76 | -------------------------------------------------------------------------------- /docker/spiderfoot/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | # source: https://github.com/telekom-security/tpotce/blob/master/docker/spiderfoot/Dockerfile 3 | # Get and install dependencies & packages 4 | RUN sed -i 's/dl-cdn/dl-2/g' /etc/apk/repositories && \ 5 | apk -U --no-cache add \ 6 | build-base \ 7 | curl \ 8 | git \ 9 | jpeg-dev \ 10 | libffi-dev \ 11 | libxml2 \ 12 | libxml2-dev \ 13 | libxslt \ 14 | libxslt-dev \ 15 | musl \ 16 | musl-dev \ 17 | openjpeg-dev \ 18 | openssl \ 19 | openssl-dev \ 20 | python3 \ 21 | python3-dev \ 22 | py-cffi \ 23 | py-pillow \ 24 | py-future \ 25 | py3-pip \ 26 | swig \ 27 | tinyxml \ 28 | tinyxml-dev \ 29 | zlib-dev && \ 30 | # 31 | # Setup user 32 | addgroup -g 2000 spiderfoot && \ 33 | adduser -S -s /bin/ash -u 2000 -D -g 2000 spiderfoot && \ 34 | # 35 | # Install spiderfoot 36 | git clone --depth=1 -b v3.1 https://github.com/smicallef/spiderfoot /home/spiderfoot && \ 37 | cd /home/spiderfoot && \ 38 | pip3 install --no-cache-dir wheel && \ 39 | pip3 install --no-cache-dir -r requirements.txt && \ 40 | chown -R spiderfoot:spiderfoot /home/spiderfoot && \ 41 | sed -i "s#'__docroot': ''#'__docroot': '\/spiderfoot'#" /home/spiderfoot/sf.py && \ 42 | sed -i 's#raise cherrypy.HTTPRedirect("\/")#raise cherrypy.HTTPRedirect("\/spiderfoot")#' /home/spiderfoot/sfwebui.py && \ 43 | # 44 | # Clean up 45 | apk del --purge build-base \ 46 | gcc \ 47 | git \ 48 | libffi-dev \ 49 | libxml2-dev \ 50 | libxslt-dev \ 51 | musl-dev \ 52 | openssl-dev \ 53 | python3-dev \ 54 | swig \ 55 | tinyxml-dev && \ 56 | rm -rf /var/cache/apk/* 57 | # 58 | # Healthcheck 59 | #HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:8080' 60 | HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:8080/spiderfoot/' 61 | # 62 | # Set user, workdir and start spiderfoot 63 | USER spiderfoot:spiderfoot 64 | WORKDIR /home/spiderfoot 65 | CMD ["/usr/bin/python3.8", "sf.py","-l", "0.0.0.0:8080"] -------------------------------------------------------------------------------- /docker/logstash/etc/BOX4s/icmp-codes.yaml: -------------------------------------------------------------------------------- 1 | "16.0" : "No Code" # 2 | "18.0" : "No Code" # 3 | "40.0" : "Bad SPI" # 4 | "40.1" : "Authentication Failed" # 5 | "40.2" : "Decompression Failed" # 6 | "40.3" : "Decryption Failed" # 7 | "40.4" : "Need Authentication" # 8 | "40.5" : "Need Authorization" # 9 | "10.0" : "No Code" # 10 | "15.0" : "No Code" # 11 | "4.0" : "No Code" # 12 | "3.0" : "Net Unreachable" #[RFC792] 13 | "3.1" : "Host Unreachable" #[RFC792] 14 | "3.2" : "Protocol Unreachable" #[RFC792] 15 | "3.3" : "Port Unreachable" #[RFC792] 16 | "3.4" : "Fragmentation Needed and Don't Fragment was Set" #[RFC792] 17 | "3.5" : "Source Route Failed" #[RFC792] 18 | "3.6" : "Destination Network Unknown" #[RFC1122] 19 | "3.7" : "Destination Host Unknown" #[RFC1122] 20 | "3.8" : "Source Host Isolated" #[RFC1122] 21 | "3.9" : "Communication with Destination Network is Administratively Prohibited" #[RFC1122] 22 | "3.10" : "Communication with Destination Host is Administratively Prohibited" #[RFC1122] 23 | "3.11" : "Destination Network Unreachable for Type of Service" #[RFC1122] 24 | "3.12" : "Destination Host Unreachable for Type of Service" #[RFC1122] 25 | "3.13" : "Communication Administratively Prohibited" #[RFC1812] 26 | "3.14" : "Host Precedence Violation" #[RFC1812] 27 | "3.15" : "Precedence cutoff in effect" #[RFC1812] 28 | "17.0" : "No Code" # 29 | "11.0" : "Time to Live exceeded in Transit" # 30 | "11.1" : "Fragment Reassembly Time Exceeded" # 31 | "12.0" : "Pointer indicates the error" # 32 | "12.1" : "Missing a Required Option" #[RFC1108] 33 | "12.2" : "Bad Length" # 34 | "5.0" : "Redirect Datagram for the Network (or subnet)" # 35 | "5.1" : "Redirect Datagram for the Host" # 36 | "5.2" : "Redirect Datagram for the Type of Service and Network" # 37 | "5.3" : "Redirect Datagram for the Type of Service and Host" # 38 | "9.0" : "Normal router advertisement" #[RFC3344] 39 | "9.16" : "Does not route common traffic" #[RFC3344] 40 | "42.0" : "No Error" #[RFC8335] 41 | "13.0" : "No Code" # 42 | "8.0" : "No Code" # 43 | "14.0" : "No Code" # 44 | "43.0" : "No Error" #[RFC8335] 45 | "43.1" : "Malformed Query" #[RFC8335] 46 | "43.2" : "No Such Interface" #[RFC8335] 47 | "43.3" : "No Such Table Entry" #[RFC8335] 48 | "43.4" : "Multiple Interfaces Satisfy Query" #[RFC8335] 49 | "0.0" : "No Code" # 50 | "6.0" : "Alternate Address for Host" # 51 | -------------------------------------------------------------------------------- /docker/web/source/templates/user/login_or_register.html: -------------------------------------------------------------------------------- 1 | {% extends 'flask_user/_public_base.html' %} 2 | 3 | {% block content %} 4 | {% from "flask_user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 5 | 6 |
7 |
8 | 9 |

{%trans%}Sign in{%endtrans%}

10 | 11 | {# ** Login form ** #} 12 |
13 | {{ login_form.hidden_tag() }} 14 | 15 | {# Username or Email #} 16 | {% set field = login_form.username if user_manager.USER_ENABLE_USERNAME else login_form.email %} 17 | {{ render_field(field, tabindex=110) }} 18 | 19 | {# Password #} 20 | {{ render_field(login_form.password, tabindex=120) }} 21 | 22 | {# Remember me #} 23 | {% if user_manager.USER_ENABLE_REMEMBER_ME %} 24 | {{ render_checkbox_field(login_form.remember_me, tabindex=130) }} 25 | {% endif %} 26 | 27 | {# Submit button #} 28 | {{ render_submit_field(login_form.submit, tabindex=180) }} 29 |
30 | {% if user_manager.USER_ENABLE_FORGOT_PASSWORD %} 31 |

32 |
33 | 34 | {%trans%}Forgot your Password?{%endtrans%} 35 |

36 | {% endif %} 37 | 38 |
39 |
40 | 41 |

{%trans%}Register{%endtrans%}

42 | 43 | {# ** Register form ** #} 44 |
45 | {{ register_form.hidden_tag() }} 46 | 47 | {# Username or Email #} 48 | {% set field = register_form.username if user_manager.USER_ENABLE_USERNAME else register_form.email %} 49 | {{ render_field(field, tabindex=210) }} 50 | 51 | {% if user_manager.USER_ENABLE_EMAIL and user_manager.USER_ENABLE_USERNAME %} 52 | {{ render_field(register_form.email, tabindex=220) }} 53 | {% endif %} 54 | 55 | {{ render_field(register_form.password, tabindex=230) }} 56 | 57 | {% if user_manager.USER_REQUIRE_RETYPE_PASSWORD %} 58 | {{ render_field(register_form.retype_password, tabindex=240) }} 59 | {% endif %} 60 | 61 | {{ render_submit_field(register_form.submit, tabindex=280) }} 62 |
63 | 64 |
65 |
66 | {% endblock %} -------------------------------------------------------------------------------- /docker/web/source/templates/user/change_password.html: -------------------------------------------------------------------------------- 1 | 2 | {% extends 'user_base.html' %} 3 | {% block user %} 4 | {% from "user/_macros.html" import render_field, render_checkbox_field, render_submit_field %} 5 | 6 | 7 |
8 |
9 |

10 |
11 |

Passwort zurücksetzen

12 |
13 |

14 |
15 | {{ form.hidden_tag() }} 16 |
17 |
18 |
19 | 20 | 21 |
22 |
23 | {% if form.old_password.errors %} 24 | {% for e in form.old_password.errors %} 25 |

{{ e }}

26 | {% endfor %} 27 | {% endif %} 28 |
29 |
30 |
31 | 32 | 33 |
34 |
35 | {% if form.new_password.errors %} 36 | {% for e in form.new_password.errors %}1 37 |

{{ e }}

38 | {% endfor %} 39 | {% endif %} 40 | {% if user_manager.USER_REQUIRE_RETYPE_PASSWORD %} 41 |
42 |
43 | 44 | 45 |
46 |
47 | {% if form.retype_password.errors %} 48 | {% for e in form.retype_password.errors %} 49 |

{{ e }}

50 | {% endfor %} 51 | {% endif %} 52 | {% endif %} 53 | 54 |
55 |
56 | {% endblock %} 57 | -------------------------------------------------------------------------------- /docker/logstash/etc/BOX4s/icmp-type.yaml: -------------------------------------------------------------------------------- 1 | "0" : "Echo Reply" #[RFC792] 2 | "3" : "Destination Unreachable" #[RFC792] 3 | "4" : "Source Quench (Deprecated)" #[RFC792][RFC6633] 4 | "5" : "Redirect" #[RFC792] 5 | "6" : "Alternate Host Address (Deprecated)" #[RFC6918] 6 | "8" : "Echo" #[RFC792] 7 | "9" : "Router Advertisement" #[RFC1256] 8 | "10" : "Router Solicitation" #[RFC1256] 9 | "11" : "Time Exceeded" #[RFC792] 10 | "12" : "Parameter Problem" #[RFC792] 11 | "13" : "Timestamp" #[RFC792] 12 | "14" : "Timestamp Reply" #[RFC792] 13 | "15" : "Information Request (Deprecated)" #[RFC792][RFC6918] 14 | "16" : "Information Reply (Deprecated)" #[RFC792][RFC6918] 15 | "17" : "Address Mask Request (Deprecated)" #[RFC950][RFC6918] 16 | "18" : "Address Mask Reply (Deprecated)" #[RFC950][RFC6918] 17 | "19" : "Reserved (for Security)" #[Solo] 18 | "20" : "Reserved (for Robustness Experiment)" #[ZSu] 19 | "21" : "Reserved (for Robustness Experiment)" #[ZSu] 20 | "22" : "Reserved (for Robustness Experiment)" #[ZSu] 21 | "23" : "Reserved (for Robustness Experiment)" #[ZSu] 22 | "24" : "Reserved (for Robustness Experiment)" #[ZSu] 23 | "25" : "Reserved (for Robustness Experiment)" #[ZSu] 24 | "26" : "Reserved (for Robustness Experiment)" #[ZSu] 25 | "27" : "Reserved (for Robustness Experiment)" #[ZSu] 26 | "28" : "Reserved (for Robustness Experiment)" #[ZSu] 27 | "29" : "Reserved (for Robustness Experiment)" #[ZSu] 28 | "30" : "Traceroute (Deprecated)" #[RFC1393][RFC6918] 29 | "31" : "Datagram Conversion Error (Deprecated)" #[RFC1475][RFC6918] 30 | "32" : "Mobile Host Redirect (Deprecated)" #[David_Johnson][RFC6918] 31 | "33" : "IPv6 Where-Are-You (Deprecated)" #[Simpson][RFC6918] 32 | "34" : "IPv6 I-Am-Here (Deprecated)" #[Simpson][RFC6918] 33 | "35" : "Mobile Registration Request (Deprecated)" #[Simpson][RFC6918] 34 | "36" : "Mobile Registration Reply (Deprecated)" #[Simpson][RFC6918] 35 | "37" : "Domain Name Request (Deprecated)" #[RFC1788][RFC6918] 36 | "38" : "Domain Name Reply (Deprecated)" #[RFC1788][RFC6918] 37 | "39" : "SKIP (Deprecated)" #[Markson][RFC6918] 38 | "40" : "Photuris" #[RFC2521] 39 | "41" : "ICMP messages utilized by experimental mobility protocols such as Seamoby" #[RFC4065] 40 | "42" : "Extended Echo Request" #[RFC8335] 41 | "43" : "Extended Echo Reply" #[RFC8335] 42 | "253" : "RFC3692-style Experiment 1" #[RFC4727] 43 | "254" : "RFC3692-style Experiment 2" #[RFC4727] 44 | "255" : "Reserved" #[JBP] 45 | -------------------------------------------------------------------------------- /docker/logstash/etc/BOX4s/icmpv6-type.yaml: -------------------------------------------------------------------------------- 1 | "0" : "Reserved" # 2 | "1" : "Destination Unreachable" #[RFC4443] 3 | "2" : "Packet Too Big" #[RFC4443] 4 | "3" : "Time Exceeded" #[RFC4443] 5 | "4" : "Parameter Problem" #[RFC4443] 6 | "100" : "Private experimentation" #[RFC4443] 7 | "101" : "Private experimentation" #[RFC4443] 8 | "127" : "Reserved for expansion of ICMPv6 error messages" #[RFC4443] 9 | "128" : "Echo Request" #[RFC4443] 10 | "129" : "Echo Reply" #[RFC4443] 11 | "130" : "Multicast Listener Query" #[RFC2710] 12 | "131" : "Multicast Listener Report" #[RFC2710] 13 | "132" : "Multicast Listener Done" #[RFC2710] 14 | "133" : "Router Solicitation" #[RFC4861] 15 | "134" : "Router Advertisement" #[RFC4861] 16 | "135" : "Neighbor Solicitation" #[RFC4861] 17 | "136" : "Neighbor Advertisement" #[RFC4861] 18 | "137" : "Redirect Message" #[RFC4861] 19 | "138" : "Router Renumbering" #[RFC2894] 20 | "139" : "ICMP Node Information Query" #[RFC4620] 21 | "140" : "ICMP Node Information Response" #[RFC4620] 22 | "141" : "Inverse Neighbor Discovery Solicitation Message" #[RFC3122] 23 | "142" : "Inverse Neighbor Discovery Advertisement Message" #[RFC3122] 24 | "143" : "Version 2 Multicast Listener Report" #[RFC3810] 25 | "144" : "Home Agent Address Discovery Request Message" #[RFC6275] 26 | "145" : "Home Agent Address Discovery Reply Message" #[RFC6275] 27 | "146" : "Mobile Prefix Solicitation" #[RFC6275] 28 | "147" : "Mobile Prefix Advertisement" #[RFC6275] 29 | "148" : "Certification Path Solicitation Message" #[RFC3971] 30 | "149" : "Certification Path Advertisement Message" #[RFC3971] 31 | "150" : "ICMP messages utilized by experimentalmobility protocols such as Seamoby" #[RFC4065] 32 | "151" : "Multicast Router Advertisement" #[RFC4286] 33 | "152" : "Multicast Router Solicitation" #[RFC4286] 34 | "153" : "Multicast Router Termination" #[RFC4286] 35 | "154" : "FMIPv6 Messages" #[RFC5568] 36 | "155" : "RPL Control Message" #[RFC6550] 37 | "156" : "ILNPv6 Locator Update Message" #[RFC6743] 38 | "157" : "Duplicate Address Request" #[RFC6775] 39 | "158" : "Duplicate Address Confirmation" #[RFC6775] 40 | "159" : "MPL Control Message" #[RFC7731] 41 | "160" : "Extended Echo Request" #[RFC8335] 42 | "161" : "Extended Echo Reply" #[RFC8335] 43 | "200" : "Private experimentation" #[RFC4443] 44 | "201" : "Private experimentation" #[RFC4443] 45 | "255" : "Reserved for expansion of ICMPv6 informational messages" #[RFC4443] 46 | -------------------------------------------------------------------------------- /docker/web/source/templates/errors/403.html: -------------------------------------------------------------------------------- 1 | {% extends "errors/error_base.html" %} 2 | {% block content %} 3 |
4 |
5 |
6 |
7 | 8 |
9 |

Zugriff verboten

10 |

Sie sind nicht berechtigt, diese Seite aufzurufen.

11 | {% if current_user and current_user.roles%} 12 |

Ihre Berechtigungen:

13 | 27 | {% endif %} 28 |
29 |
30 | 34 | {% if current_user %} 35 | 39 | {% endif %} 40 |
41 | 42 |
43 |
44 |
45 |
46 | {% endblock %} 47 | {% block scripts %} 48 | 51 | {% if current_user %} 52 | 55 | {% else %} 56 | 59 | {% endif %} 60 | 61 | {% endblock %} 62 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/b1685fc5f49c_create_types.py: -------------------------------------------------------------------------------- 1 | """Create NetworkTypes and SystemTypes 2 | 3 | Revision ID: b1685fc5f49c 4 | Revises: 9a02836f6117 5 | Create Date: 2020-10-21 10:01:06.180863 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | from source.wizard.models import SystemType, NetworkType, ScanCategory 12 | # revision identifiers, used by Alembic. 13 | revision = 'b1685fc5f49c' 14 | down_revision = '9a02836f6117' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.bulk_insert(SystemType.__table__, [ 22 | {'id': 1, 'name': 'BOX4security'}, 23 | {'id': 2, 'name': 'DNS-Server'}, 24 | {'id': 3, 'name': 'Gateway'}, 25 | {'id': 4, 'name': 'Firewall'}, 26 | {'id': 5, 'name': 'IoT'}, 27 | {'id': 6, 'name': 'Industrielle IT'}, 28 | ]) 29 | op.bulk_insert(NetworkType.__table__, [ 30 | {'name': 'Client'}, 31 | {'name': 'Server'}, 32 | {'name': 'Gast'}, 33 | ]) 34 | op.bulk_insert(ScanCategory.__table__, [ 35 | {'id': 1, 'name': 'Keine Restriktionen bei den Scans'}, 36 | {'id': 2, 'name': 'Scans ausschließlich zu Randzeiten oder am Wochenende'}, 37 | {'id': 3, 'name': 'Scans ausschließlich bei Einsatzbereitschaft von 4sConsult und Präsenz der Netzwerkadministration'}, 38 | ]) 39 | 40 | 41 | def downgrade(): 42 | """Downgrade to migration.""" 43 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="BOX4security"') 44 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="DNS-Server"') 45 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="Gateway"') 46 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="Firewall"') 47 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="IoT"') 48 | op.execute(f'DELETE FROM "{SystemType.__table__}" WHERE name="Industrielle IT"') 49 | op.execute(f'DELETE FROM "{NetworkType.__table}" WHERE name="Client"') 50 | op.execute(f'DELETE FROM "{NetworkType.__table}" WHERE name="Server"') 51 | op.execute(f'DELETE FROM "{NetworkType.__table}" WHERE name="Gast"') 52 | 53 | op.execute(f'DELETE FROM "{ScanCategory.__table}" WHERE id="1"') 54 | op.execute(f'DELETE FROM "{ScanCategory.__table}" WHERE id="2"') 55 | op.execute(f'DELETE FROM "{ScanCategory.__table}" WHERE id="3"') 56 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [0.0.5] - 2020-12-17 9 | ### Changed 10 | * Use NVD Vulnerability Severity Ratings (CVSS) Ranges. Previously custom vulnerability ranges were used. 11 | * `VERSION` file is now stored under fixed path `/var/lib/box4s`. Only modify if you know what you're doing (`BOX4s_ENV=dev` toggles the development mode). 12 | * When using insecure default secrets (i.e. unchanged prior to installation), these are replaced by randomly generated secrets that are also printed at the end of the installation script. Save them, if you need to. 13 | ### Fixed 14 | * Fixed parts of the update script, which didn't use changed directories yet. 15 | 16 | 17 | ## [0.0.4] - 2020-12-16 18 | Separate installation, configuration from the cloned repository. 19 | 20 | ### Added 21 | * New environment variables `$BOX4s_INSTALL_DIR` and `$BOX4s_CONFIG_DIR` are available, resolving to the installation and configuration directory respectively. 22 | 23 | ### Changed 24 | * Installation is now performed (by default) to `/opt/box4s`. The config files (secrets!) are by default copied from the local repos folder to `/etc/box4s` during installation. **Change secrets before running install script!** 25 | * Installation no longer restarts the OpenVAS container. This became obsolete with recent OpenVAS image updates. Updates are performed with each start, so also with the first. 26 | 27 | 28 | ## [0.0.3] - 2020-12-15 29 | First publicly from GitHub installable release. 30 | 31 | ### Fixed 32 | - Fix errors in installation script `install.sh` that came up when installing from the public GitHub repository. 33 | 34 | 35 | 36 | ## [0.0.2] - 2020-12-15 37 | Refactor Container to Host communication. 38 | 39 | ### Changed 40 | - Use Docker API and named UNIX pipe to communicate from web to host and other containers. 41 | 42 | 43 | ## [0.0.1] - 2020-12-10 44 | First Open Source release to the public domain. 45 | 46 | ### Changed 47 | - Installation no longer requires a deploy secret. 48 | - Installation now references Docker images stored in public Docker-Hub repositories. 49 | - Secret files no longer are encrypted under a PGP key, but are instead changed to a default value of `CHANGEME` that **must** be changed when install. 50 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/5aadb38f6936_network_and_types.py: -------------------------------------------------------------------------------- 1 | """network and types 2 | 3 | Revision ID: 5aadb38f6936 4 | Revises: 96cfbddbc495 5 | Create Date: 2020-10-21 08:27:58.262814 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '5aadb38f6936' 14 | down_revision = '96cfbddbc495' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | # ### commands auto generated by Alembic - please adjust! ### 22 | op.create_table( 23 | 'networktype', 24 | sa.Column('id', sa.Integer(), nullable=False), 25 | sa.Column('name', sa.String(length=100), nullable=True), 26 | sa.PrimaryKeyConstraint('id') 27 | ) 28 | op.create_table( 29 | 'scancategory', 30 | sa.Column('id', sa.Integer(), nullable=False), 31 | sa.Column('name', sa.String(), nullable=True), 32 | sa.PrimaryKeyConstraint('id') 33 | ) 34 | op.create_table( 35 | 'network', 36 | sa.Column('id', sa.Integer(), nullable=False), 37 | sa.Column('name', sa.String(length=100), nullable=True), 38 | sa.Column('ip_address', sa.String(length=24), nullable=False), 39 | sa.Column('cidr', sa.Integer(), nullable=False), 40 | sa.Column('vlan', sa.String(length=50), nullable=True), 41 | sa.Column('scancategory_id', sa.Integer(), nullable=True), 42 | sa.Column('scan_weekday', sa.String(length=24), nullable=True), 43 | sa.Column('scan_time', sa.Time(), nullable=True), 44 | sa.ForeignKeyConstraint(['scancategory_id'], ['scancategory.id'], ), 45 | sa.PrimaryKeyConstraint('id') 46 | ) 47 | op.create_table( 48 | 'network_networktype', 49 | sa.Column('id', sa.Integer(), nullable=False), 50 | sa.Column('network_id', sa.Integer(), nullable=True), 51 | sa.Column('networktype_id', sa.Integer(), nullable=True), 52 | sa.ForeignKeyConstraint(['network_id'], ['network.id'], ondelete='CASCADE'), 53 | sa.ForeignKeyConstraint(['networktype_id'], ['networktype.id'], ondelete='CASCADE'), 54 | sa.PrimaryKeyConstraint('id') 55 | ) 56 | 57 | 58 | def downgrade(): 59 | """Downgrade to migration.""" 60 | op.drop_table('network_networktype') 61 | op.drop_table('network') 62 | op.drop_table('scancategory') 63 | op.drop_table('networktype') 64 | -------------------------------------------------------------------------------- /docker/web/source/wizard/schemas.py: -------------------------------------------------------------------------------- 1 | from source.extensions import ma 2 | from marshmallow import fields 3 | 4 | 5 | class NetworkTypeSchema(ma.Schema): 6 | """Role Schema for API representation.""" 7 | 8 | class Meta: 9 | """Define fields which will be available.""" 10 | 11 | fields = ('id', 'name') 12 | 13 | 14 | class ScanCategorySchema(ma.Schema): 15 | 16 | class Meta: 17 | fields = ( 18 | 'id', 19 | 'name', 20 | ) 21 | 22 | 23 | class NetworkSchema(ma.Schema): 24 | 25 | types = fields.Nested(NetworkTypeSchema, many=True) 26 | scancategory = fields.Nested(ScanCategorySchema) 27 | 28 | class Meta: 29 | fields = ( 30 | 'id', 31 | 'name', 32 | 'ip_address', 33 | 'cidr', 34 | 'vlan', 35 | 'types', 36 | 'scancategory_id', 37 | 'scan_weekday', 38 | 'scan_time', 39 | ) 40 | 41 | 42 | class SystemTypeSchema(ma.Schema): 43 | """Role Schema for API representation.""" 44 | 45 | class Meta: 46 | """Define fields which will be available.""" 47 | 48 | fields = ('id', 'name') 49 | 50 | 51 | class SystemSchema(ma.Schema): 52 | 53 | types = fields.Nested(SystemTypeSchema, many=True) 54 | scancategory = fields.Nested(ScanCategorySchema) 55 | network = fields.Nested(NetworkSchema) 56 | 57 | class Meta: 58 | fields = ( 59 | 'id', 60 | 'name', 61 | 'types', 62 | 'network', 63 | 'ip_address', 64 | 'location', 65 | 'scan_enabled', 66 | 'ids_enabled', 67 | ) 68 | 69 | 70 | class BOX4securitySchema(ma.Schema): 71 | 72 | types = fields.Nested(SystemTypeSchema, many=True) 73 | scancategory = fields.Nested(ScanCategorySchema) 74 | network = fields.Nested(NetworkSchema) 75 | dns = fields.Nested(SystemSchema) 76 | gateway = fields.Nested(SystemSchema) 77 | 78 | class Meta: 79 | fields = ( 80 | 'id', 81 | 'name', 82 | 'types', 83 | 'network', 84 | 'ip_address', 85 | 'location', 86 | 'scan_enabled', 87 | 'ids_enabled', 88 | 'dhcp_enabled', 89 | 'dns', 90 | 'gateway', 91 | ) 92 | 93 | 94 | SYS = SystemSchema() 95 | SYSs = SystemSchema(many=True) 96 | BOX4sSchema = BOX4securitySchema() 97 | NET = NetworkSchema() 98 | NETs = NetworkSchema(many=True) 99 | -------------------------------------------------------------------------------- /scripts/1stLevelRepair/repair_reset.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | # Log file to use 4 | # Create path if allowed or do NOP 5 | mkdir -p /var/log/box4s/1stLevelRepair || : 6 | LOG_DIR="/var/log/box4s/1stLevelRepair" 7 | if [[ ! -w $LOG_DIR ]]; then 8 | LOG_DIR="$HOME" 9 | fi 10 | 11 | LOG=$LOG_DIR/reset.log 12 | 13 | # Do not use interactive debian frontend. 14 | export DEBIAN_FRONTEND=noninteractive 15 | 16 | # Forward fd2 to the console 17 | # exec 2>&1 18 | # Forward fd1 to $LOG 19 | exec 2>&1 1>>${LOG} 20 | 21 | function delete_If_Exists(){ 22 | # Helper to delete files and directories if they exist 23 | if [ -d $1 ]; then 24 | # Directory to remove 25 | sudo rm $1 -r 26 | fi 27 | if [ -f $1 ]; then 28 | # File to remove 29 | sudo rm $1 30 | fi 31 | } 32 | function testNet() { 33 | # Returns 0 for successful internet connection and dns resolution, 1 else 34 | ping -q -c 1 -W 1 $1 >/dev/null; 35 | return $? 36 | } 37 | function waitForNet() { 38 | # use argument or default value of google.com 39 | HOST=${1:-"google.com"} 40 | while ! testNet $HOST; do 41 | # while testNet returns non zero value 42 | echo "No internet connectivity or dns resolution of $HOST, sleeping for 15s" 1>&2 43 | sleep 15s 44 | echo /etc/resolv.conf | grep 'nameserver' || echo "nameserver 8.8.8.8" > /etc/resolv.conf && echo "Empty /etc/resolv.conf/ -> inserting 8.8.8.8" 1>&2 45 | done 46 | } 47 | # 48 | #Flags: 49 | # no-recreate: Does not create an empty BOX4security after deleting the current one 50 | # 51 | 52 | echo -n "Stopping BOX4security Service.. " 1>&2 53 | 54 | if [[ $(systemctl list-units --all -t service --full --no-legend "box4security.service" | cut -f1 -d' ') == $n.service ]]; then 55 | sudo systemctl stop box4security.service 56 | #Remove all Docker containers and Volumes 57 | sudo docker rm -f $(docker ps -a -q) >/dev/null || : 58 | sudo docker volume rm $(docker volume ls -q) >/dev/null || : 59 | fi 60 | echo "[ DONE ]" 1>&2 61 | 62 | echo -n "Removing Data.. " 1>&2 63 | #Securely delete /data 64 | if [ -d /data ]; then 65 | # Directory to remove 66 | sudo srm -zr /data 67 | fi 68 | delete_If_Exists /var/lib/box4s 69 | delete_If_Exists /var/lib/postgresql 70 | delete_If_Exists /var/lib/box4s_openvas 71 | delete_If_Exists /var/lib/box4s_suricata_rules 72 | delete_If_Exists /var/lib/box4s_docs 73 | delete_If_Exists /var/lib/elastalert 74 | delete_If_Exists /var/lib/logstash 75 | delete_If_Exists /etc/box4s 76 | delete_If_Exists /tmp/box4s 77 | waitForNet 78 | echo "[ DONE ]" 1>&2 79 | 80 | echo -n "Installing new BOX4security.. " 1>&2 81 | curl -sL https://gitlab.com/snippets/1982942/raw | sudo bash 82 | echo "[ DONE ]" 1>&2 83 | -------------------------------------------------------------------------------- /docker/logstash/etc/BOX4s/suricata-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "index_patterns": "suricata-*", 3 | "settings": { 4 | "index": { 5 | "number_of_shards": 3, 6 | "number_of_replicas": 1, 7 | "refresh_interval": "10s", 8 | "codec": "best_compression", 9 | "mapping": { 10 | "total_fields": { "limit": 10000} 11 | } 12 | } 13 | }, 14 | "mappings": { 15 | "properties": { 16 | "link_suppress" : { 17 | "type": "text" 18 | }, 19 | "destination" : { 20 | "type": "object", 21 | "properties": { 22 | "geo": { 23 | "type" : "object", 24 | "properties": { 25 | "location" : { 26 | "type": "geo_point" 27 | } 28 | } 29 | } 30 | } 31 | }, 32 | "source" : { 33 | "type": "object", 34 | "properties": { 35 | "geo": { 36 | "type" : "object", 37 | "properties": { 38 | "location" : { 39 | "type": "geo_point" 40 | } 41 | } 42 | } 43 | } 44 | }, 45 | "server" : { 46 | "type": "object", 47 | "properties": { 48 | "geo": { 49 | "type" : "object", 50 | "properties": { 51 | "location" : { 52 | "type": "geo_point" 53 | } 54 | } 55 | } 56 | } 57 | }, 58 | "client" : { 59 | "type": "object", 60 | "properties": { 61 | "geo": { 62 | "type" : "object", 63 | "properties": { 64 | "location" : { 65 | "type": "geo_point" 66 | } 67 | } 68 | } 69 | } 70 | }, 71 | "http" : { 72 | "type" : "object", 73 | "properties" : { 74 | "request" : { 75 | "type" : "object", 76 | "properties" : { 77 | "body" : { 78 | "type": "object", 79 | "properties" : { 80 | "bytes" : { 81 | "type" : "long" 82 | } 83 | } 84 | } 85 | } 86 | } 87 | } 88 | } 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /docker/web/migrations/versions/a59fffda1b70_box4security_table.py: -------------------------------------------------------------------------------- 1 | """BOX4security table 2 | 3 | Revision ID: a59fffda1b70 4 | Revises: ea1ce32ce8fd 5 | Create Date: 2020-10-29 08:20:06.251992 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'a59fffda1b70' 14 | down_revision = 'ea1ce32ce8fd' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | """Upgrade to migration.""" 21 | op.create_table( 22 | 'box4security', 23 | sa.Column('id', sa.Integer(), nullable=False), 24 | sa.Column('name', sa.String(length=100), nullable=True), 25 | sa.Column('ip_address', sa.String(length=24), nullable=True), 26 | sa.Column('location', sa.String(length=255), nullable=True), 27 | sa.Column('scan_enabled', sa.Boolean(), nullable=True), 28 | sa.Column('ids_enabled', sa.Boolean(), nullable=True), 29 | sa.Column('network_id', sa.Integer(), nullable=True), 30 | sa.Column('dns_id', sa.Integer(), nullable=True), 31 | sa.Column('gateway_id', sa.Integer(), nullable=True), 32 | sa.ForeignKeyConstraint(['dns_id'], ['system.id'], ), 33 | sa.ForeignKeyConstraint(['gateway_id'], ['system.id'], ), 34 | sa.ForeignKeyConstraint(['network_id'], ['network.id'], ), 35 | sa.PrimaryKeyConstraint('id'), 36 | sa.UniqueConstraint('name') 37 | ) 38 | op.create_table( 39 | 'box4security_systemtype', 40 | sa.Column('id', sa.Integer(), nullable=False), 41 | sa.Column('box4security', sa.Integer(), nullable=True), 42 | sa.Column('systemtype_id', sa.Integer(), nullable=True), 43 | sa.ForeignKeyConstraint(['box4security'], ['box4security.id'], ondelete='CASCADE'), 44 | sa.ForeignKeyConstraint(['systemtype_id'], ['systemtype.id'], ondelete='CASCADE'), 45 | sa.PrimaryKeyConstraint('id') 46 | ) 47 | op.drop_constraint('system_gateway_id_fkey', 'system', type_='foreignkey') 48 | op.drop_constraint('system_dns_id_fkey', 'system', type_='foreignkey') 49 | op.drop_column('system', 'dns_id') 50 | op.drop_column('system', 'gateway_id') 51 | 52 | 53 | def downgrade(): 54 | """Downgrade to migration.""" 55 | op.add_column('system', sa.Column('gateway_id', sa.INTEGER(), autoincrement=False, nullable=True)) 56 | op.add_column('system', sa.Column('dns_id', sa.INTEGER(), autoincrement=False, nullable=True)) 57 | op.create_foreign_key('system_dns_id_fkey', 'system', 'system', ['dns_id'], ['id']) 58 | op.create_foreign_key('system_gateway_id_fkey', 'system', 'system', ['gateway_id'], ['id']) 59 | op.drop_table('box4security_systemtype') 60 | op.drop_table('box4security') 61 | --------------------------------------------------------------------------------