├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yaml │ ├── config.yml │ └── feature_request.yaml ├── buildkit.toml ├── codecov.yml ├── generate-codecov-yml.sh ├── governance.yml ├── release-drafter.yml └── workflows │ ├── .yamllint │ ├── bats-hub.yml │ ├── bats-mysql.yml │ ├── bats-postgres.yml │ ├── bats-sqlite-coverage.yml │ ├── bats.yml │ ├── cache-cleanup.yaml │ ├── ci-windows-build-msi.yml │ ├── ci_release-drafter.yml │ ├── codeql-analysis.yml │ ├── docker-tests.yml │ ├── go-tests-windows.yml │ ├── go-tests.yml │ ├── governance-bot.yaml │ ├── publish-docker-master.yml │ ├── publish-docker-release.yml │ ├── publish-docker.yml │ ├── publish-tarball-release.yml │ └── update_docker_hub_doc.yml ├── .gitignore ├── .gitmodules ├── .golangci.yml ├── .yamllint ├── CONTRIBUTING.md ├── Dockerfile ├── Dockerfile.debian ├── LICENSE ├── Makefile ├── README.md ├── SECURITY.md ├── azure-pipelines.yml ├── cmd ├── crowdsec-cli │ ├── Makefile │ ├── args │ │ └── args.go │ ├── ask │ │ └── ask.go │ ├── clialert │ │ ├── alerts.go │ │ ├── sanitize.go │ │ └── table.go │ ├── cliallowlists │ │ └── allowlists.go │ ├── clibouncer │ │ ├── add.go │ │ ├── bouncers.go │ │ ├── delete.go │ │ ├── inspect.go │ │ ├── list.go │ │ └── prune.go │ ├── clicapi │ │ └── capi.go │ ├── cliconfig │ │ ├── backup.go │ │ ├── config.go │ │ ├── feature_flags.go │ │ ├── restore.go │ │ ├── show.go │ │ └── showyaml.go │ ├── cliconsole │ │ ├── console.go │ │ └── console_table.go │ ├── clidecision │ │ ├── decisions.go │ │ ├── import.go │ │ └── table.go │ ├── clientinfo │ │ └── clientinfo.go │ ├── cliexplain │ │ └── explain.go │ ├── clihub │ │ ├── hub.go │ │ ├── items.go │ │ └── utils_table.go │ ├── clihubtest │ │ ├── clean.go │ │ ├── coverage.go │ │ ├── create.go │ │ ├── eval.go │ │ ├── explain.go │ │ ├── hubtest.go │ │ ├── info.go │ │ ├── list.go │ │ ├── run.go │ │ └── table.go │ ├── cliitem │ │ ├── cmdinspect.go │ │ ├── cmdinstall.go │ │ ├── cmdremove.go │ │ ├── cmdupgrade.go │ │ ├── hubappsec.go │ │ ├── hubcollection.go │ │ ├── hubcontext.go │ │ ├── hubparser.go │ │ ├── hubpostoverflow.go │ │ ├── hubscenario.go │ │ ├── item.go │ │ ├── metrics.go │ │ └── metrics_table.go │ ├── clilapi │ │ ├── context.go │ │ ├── lapi.go │ │ ├── register.go │ │ ├── status.go │ │ ├── status_test.go │ │ └── utils.go │ ├── climachine │ │ ├── add.go │ │ ├── delete.go │ │ ├── flag.go │ │ ├── inspect.go │ │ ├── list.go │ │ ├── machines.go │ │ ├── prune.go │ │ └── validate.go │ ├── climetrics │ │ ├── list.go │ │ ├── metrics.go │ │ ├── number.go │ │ ├── show.go │ │ ├── statacquis.go │ │ ├── statalert.go │ │ ├── statappsecengine.go │ │ ├── statappsecrule.go │ │ ├── statbouncer.go │ │ ├── statbucket.go │ │ ├── statdecision.go │ │ ├── statlapi.go │ │ ├── statlapibouncer.go │ │ ├── statlapidecision.go │ │ ├── statlapimachine.go │ │ ├── statparser.go │ │ ├── statstash.go │ │ ├── statwhitelist.go │ │ ├── store.go │ │ └── table.go │ ├── clinotifications │ │ ├── notifications.go │ │ └── notifications_table.go │ ├── clipapi │ │ └── papi.go │ ├── clisetup │ │ └── setup.go │ ├── clisimulation │ │ └── simulation.go │ ├── clisupport │ │ └── support.go │ ├── completion.go │ ├── cstable │ │ └── cstable.go │ ├── dashboard.go │ ├── dashboard_unsupported.go │ ├── doc.go │ ├── idgen │ │ ├── machineid.go │ │ └── password.go │ ├── main.go │ ├── reload │ │ ├── message.go │ │ ├── message_freebsd.go │ │ ├── message_linux.go │ │ ├── message_windows.go │ │ └── reload.go │ ├── require │ │ ├── branch.go │ │ └── require.go │ ├── setup.go │ ├── setup_stub.go │ └── version.go ├── crowdsec │ ├── Makefile │ ├── api.go │ ├── appsec.go │ ├── appsec_stub.go │ ├── crowdsec.go │ ├── dump.go │ ├── event_log_hook_windows.go │ ├── fatalhook.go │ ├── lpmetrics.go │ ├── main.go │ ├── metrics.go │ ├── output.go │ ├── parse.go │ ├── pour.go │ ├── run_in_svc.go │ ├── run_in_svc_windows.go │ ├── serve.go │ ├── win_service.go │ ├── win_service_install.go │ └── win_service_manage.go ├── notification-dummy │ ├── Makefile │ ├── dummy.yaml │ └── main.go ├── notification-email │ ├── Makefile │ ├── email.yaml │ └── main.go ├── notification-file │ ├── Makefile │ ├── file.yaml │ └── main.go ├── notification-http │ ├── Makefile │ ├── http.yaml │ └── main.go ├── notification-sentinel │ ├── Makefile │ ├── main.go │ └── sentinel.yaml ├── notification-slack │ ├── Makefile │ ├── main.go │ └── slack.yaml └── notification-splunk │ ├── Makefile │ ├── main.go │ └── splunk.yaml ├── config ├── acquis.yaml ├── acquis_win.yaml ├── config.yaml ├── config_win.yaml ├── config_win_no_lapi.yaml ├── console.yaml ├── context.yaml ├── crowdsec.cron.daily ├── crowdsec.service ├── detect.yaml ├── dev.yaml ├── local_api_credentials.yaml ├── online_api_credentials.yaml ├── patterns │ ├── aws │ ├── bacula │ ├── bro │ ├── cowrie_honeypot │ ├── exim │ ├── firewalls │ ├── haproxy │ ├── java │ ├── junos │ ├── linux-syslog │ ├── mcollective │ ├── modsecurity │ ├── mongodb │ ├── mysql │ ├── nagios │ ├── nginx │ ├── paths │ ├── postgresql │ ├── rails │ ├── redis │ ├── ruby │ ├── smb │ ├── ssh │ └── tcpdump ├── profiles.yaml ├── simulation.yaml └── user.yaml ├── debian ├── .gitignore ├── README.md ├── changelog ├── compat ├── control ├── crowdsec.cron.daily ├── crowdsec.service ├── install ├── migrate-hub.sh ├── patches │ ├── config_plugins │ └── series ├── postinst ├── postrm ├── prerm ├── rules └── templates ├── docker ├── README.md ├── config.yaml ├── docker_start.sh ├── preload-hub-items └── test │ ├── .python-version │ ├── README.md │ ├── default.env │ ├── pyproject.toml │ ├── pytest-debug.ini │ ├── pytest.ini │ ├── tests │ ├── __init__.py │ ├── conftest.py │ ├── test_agent.py │ ├── test_agent_only.py │ ├── test_bouncer.py │ ├── test_capi.py │ ├── test_capi_whitelists.py │ ├── test_cold_logs.py │ ├── test_flavors.py │ ├── test_hello.py │ ├── test_hub.py │ ├── test_hub_collections.py │ ├── test_hub_parsers.py │ ├── test_hub_postoverflows.py │ ├── test_hub_scenarios.py │ ├── test_local_api_url.py │ ├── test_local_item.py │ ├── test_metrics.py │ ├── test_nolapi.py │ ├── test_simple.py │ ├── test_tls.py │ ├── test_version.py │ └── test_wal.py │ └── uv.lock ├── go.mod ├── go.sum ├── make_chocolatey.ps1 ├── make_installer.ps1 ├── mk ├── __gmsl ├── gmsl ├── gmsl.html ├── help.mk ├── platform.mk └── platform │ ├── freebsd.mk │ ├── linux.mk │ ├── openbsd.mk │ ├── unix_common.mk │ └── windows.mk ├── pkg ├── acquisition │ ├── acquisition.go │ ├── acquisition_test.go │ ├── appsec.go │ ├── cloudwatch.go │ ├── configuration │ │ └── configuration.go │ ├── docker.go │ ├── file.go │ ├── http.go │ ├── journalctl.go │ ├── k8s.go │ ├── kafka.go │ ├── kinesis.go │ ├── loki.go │ ├── modules │ │ ├── appsec │ │ │ ├── appsec.go │ │ │ ├── appsec_hooks_test.go │ │ │ ├── appsec_lnx_test.go │ │ │ ├── appsec_remediation_test.go │ │ │ ├── appsec_rules_test.go │ │ │ ├── appsec_runner.go │ │ │ ├── appsec_runner_test.go │ │ │ ├── appsec_test.go │ │ │ ├── appsec_win_test.go │ │ │ ├── bodyprocessors │ │ │ │ └── raw.go │ │ │ ├── metrics.go │ │ │ ├── rx_operator.go │ │ │ └── utils.go │ │ ├── cloudwatch │ │ │ ├── cloudwatch.go │ │ │ └── cloudwatch_test.go │ │ ├── docker │ │ │ ├── docker.go │ │ │ ├── docker_test.go │ │ │ └── utils.go │ │ ├── file │ │ │ ├── file.go │ │ │ ├── file_test.go │ │ │ ├── tailline.go │ │ │ ├── tailline_windows.go │ │ │ └── testdata │ │ │ │ ├── bad.gz │ │ │ │ ├── test.log │ │ │ │ └── test.log.gz │ │ ├── http │ │ │ ├── http.go │ │ │ ├── http_test.go │ │ │ └── testdata │ │ │ │ ├── ca.crt │ │ │ │ ├── client.crt │ │ │ │ ├── client.key │ │ │ │ ├── server.crt │ │ │ │ └── server.key │ │ ├── journalctl │ │ │ ├── journalctl.go │ │ │ ├── journalctl_test.go │ │ │ └── testdata │ │ │ │ └── journalctl │ │ ├── kafka │ │ │ ├── kafka.go │ │ │ ├── kafka_test.go │ │ │ └── testdata │ │ │ │ ├── kafkaClient.certificate.pem │ │ │ │ ├── kafkaClient.key │ │ │ │ └── snakeoil-ca-1.crt │ │ ├── kinesis │ │ │ ├── kinesis.go │ │ │ └── kinesis_test.go │ │ ├── kubernetesaudit │ │ │ ├── k8s_audit.go │ │ │ └── k8s_audit_test.go │ │ ├── loki │ │ │ ├── entry.go │ │ │ ├── internal │ │ │ │ └── lokiclient │ │ │ │ │ ├── loki_client.go │ │ │ │ │ └── types.go │ │ │ ├── loki.go │ │ │ ├── loki_test.go │ │ │ ├── timestamp.go │ │ │ └── timestamp_test.go │ │ ├── s3 │ │ │ ├── s3.go │ │ │ └── s3_test.go │ │ ├── syslog │ │ │ ├── internal │ │ │ │ ├── parser │ │ │ │ │ ├── rfc3164 │ │ │ │ │ │ ├── parse.go │ │ │ │ │ │ ├── parse_test.go │ │ │ │ │ │ └── perf_test.go │ │ │ │ │ ├── rfc5424 │ │ │ │ │ │ ├── parse.go │ │ │ │ │ │ ├── parse_test.go │ │ │ │ │ │ └── perf_test.go │ │ │ │ │ └── utils │ │ │ │ │ │ └── utils.go │ │ │ │ └── server │ │ │ │ │ └── syslogserver.go │ │ │ ├── syslog.go │ │ │ └── syslog_test.go │ │ ├── victorialogs │ │ │ ├── internal │ │ │ │ └── vlclient │ │ │ │ │ ├── types.go │ │ │ │ │ └── vl_client.go │ │ │ ├── victorialogs.go │ │ │ └── victorialogs_test.go │ │ └── wineventlog │ │ │ ├── testdata │ │ │ └── Setup.evtx │ │ │ ├── wineventlog.go │ │ │ ├── wineventlog_windows.go │ │ │ └── wineventlog_windows_test.go │ ├── s3.go │ ├── syslog.go │ ├── testdata │ │ ├── backward_compat.yaml │ │ ├── bad_filetype.yaml │ │ ├── bad_source.yaml │ │ ├── badyaml.yaml │ │ ├── basic_filemode.yaml │ │ ├── emptyitem.yaml │ │ ├── env.yaml │ │ └── missing_labels.yaml │ ├── victorialogs.go │ └── wineventlog.go ├── alertcontext │ ├── alertcontext.go │ ├── alertcontext_test.go │ └── config.go ├── apiclient │ ├── alerts_service.go │ ├── alerts_service_test.go │ ├── allowlists_service.go │ ├── auth_jwt.go │ ├── auth_key.go │ ├── auth_key_test.go │ ├── auth_retry.go │ ├── auth_service.go │ ├── auth_service_test.go │ ├── client.go │ ├── client_http.go │ ├── client_http_test.go │ ├── client_test.go │ ├── clone.go │ ├── config.go │ ├── decisions_service.go │ ├── decisions_service_test.go │ ├── decisions_sync_service.go │ ├── heartbeat.go │ ├── metrics.go │ ├── resperr.go │ ├── retry_config.go │ ├── signal.go │ ├── usagemetrics.go │ └── useragent │ │ └── useragent.go ├── apiserver │ ├── alerts_test.go │ ├── allowlists_test.go │ ├── api_key_test.go │ ├── apic.go │ ├── apic_metrics.go │ ├── apic_metrics_test.go │ ├── apic_test.go │ ├── apiserver.go │ ├── apiserver_test.go │ ├── controllers │ │ ├── controller.go │ │ └── v1 │ │ │ ├── alerts.go │ │ │ ├── allowlist.go │ │ │ ├── controller.go │ │ │ ├── decisions.go │ │ │ ├── errors.go │ │ │ ├── errors_test.go │ │ │ ├── heartbeat.go │ │ │ ├── machines.go │ │ │ ├── metrics.go │ │ │ ├── usagemetrics.go │ │ │ └── utils.go │ ├── decisions_test.go │ ├── heartbeat_test.go │ ├── jwt_test.go │ ├── machines_test.go │ ├── middlewares │ │ └── v1 │ │ │ ├── api_key.go │ │ │ ├── cache.go │ │ │ ├── crl.go │ │ │ ├── jwt.go │ │ │ ├── middlewares.go │ │ │ ├── ocsp.go │ │ │ └── tls_auth.go │ ├── papi.go │ ├── papi_cmd.go │ ├── tests │ │ ├── alertWithInvalidMachineID_sample.json │ │ ├── alert_allowlisted.json │ │ ├── alert_allowlisted_expired.json │ │ ├── alert_bulk.json │ │ ├── alert_duplicate.json │ │ ├── alert_minibulk+simul.json │ │ ├── alert_minibulk.json │ │ ├── alert_sample.json │ │ ├── alert_ssh-bf.json │ │ ├── alert_stream_fixture.json │ │ ├── invalidAlert_sample.json │ │ └── profiles.yaml │ └── usage_metrics_test.go ├── appsec │ ├── allowlists │ │ ├── allowlists.go │ │ └── allowlists_test.go │ ├── appsec.go │ ├── appsec_rule │ │ ├── appsec_rule.go │ │ ├── modsec_rule_test.go │ │ ├── modsecurity.go │ │ └── types.go │ ├── appsec_rules_collection.go │ ├── coraza_logger.go │ ├── ja4h │ │ ├── ja4h.go │ │ └── ja4h_test.go │ ├── loader.go │ ├── request.go │ ├── request_test.go │ ├── tx.go │ └── waf_helpers.go ├── cache │ ├── cache.go │ └── cache_test.go ├── csconfig │ ├── api.go │ ├── api_test.go │ ├── common.go │ ├── config.go │ ├── config_paths.go │ ├── config_test.go │ ├── console.go │ ├── crowdsec_service.go │ ├── crowdsec_service_test.go │ ├── cscli.go │ ├── cscli_test.go │ ├── database.go │ ├── database_test.go │ ├── fflag.go │ ├── hub.go │ ├── hub_test.go │ ├── plugin_config.go │ ├── profiles.go │ ├── prometheus.go │ ├── simulation.go │ ├── simulation_test.go │ ├── testdata │ │ ├── acquis.yaml │ │ ├── acquis │ │ │ └── acquis.yaml │ │ ├── bad_lapi-secrets.yaml │ │ ├── bad_online-api-secrets.yaml │ │ ├── config.yaml │ │ ├── context.yaml │ │ ├── lapi-secrets.yaml │ │ ├── online-api-secrets.yaml │ │ ├── profiles.yaml │ │ └── simulation.yaml │ └── tls.go ├── csnet │ └── socket.go ├── csplugin │ ├── broker.go │ ├── broker_suite_test.go │ ├── broker_test.go │ ├── broker_win_test.go │ ├── hclog_adapter.go │ ├── helpers.go │ ├── listfiles.go │ ├── listfiles_test.go │ ├── notifier.go │ ├── testdata │ │ └── dummy.yaml │ ├── utils.go │ ├── utils_js.go │ ├── utils_test.go │ ├── utils_windows.go │ ├── utils_windows_test.go │ ├── watcher.go │ └── watcher_test.go ├── csprofiles │ ├── csprofiles.go │ └── csprofiles_test.go ├── cticlient │ ├── client.go │ ├── client_test.go │ ├── cti_test.go │ ├── example │ │ └── fire.go │ ├── pagination.go │ ├── tests │ │ ├── fire-page1.json │ │ └── fire-page2.json │ ├── types.go │ └── types_test.go ├── cwhub │ ├── cwhub.go │ ├── cwhub_test.go │ ├── doc.go │ ├── download.go │ ├── download_test.go │ ├── fetch.go │ ├── hub.go │ ├── hub_test.go │ ├── item.go │ ├── item_test.go │ ├── iteminstall_test.go │ ├── itemupgrade_test.go │ ├── pathseparator_unix.go │ ├── pathseparator_windows.go │ ├── relativepath.go │ ├── relativepath_test.go │ ├── state.go │ ├── state_test.go │ ├── sync.go │ └── testdata │ │ ├── collection_v1.yaml │ │ ├── collection_v2.yaml │ │ ├── foobar_parser.yaml │ │ ├── index1.json │ │ └── index2.json ├── cwversion │ ├── component │ │ └── component.go │ ├── constraint │ │ └── constraint.go │ ├── version.go │ └── version_test.go ├── database │ ├── alertfilter.go │ ├── alerts.go │ ├── allowlists.go │ ├── allowlists_test.go │ ├── bouncers.go │ ├── config.go │ ├── database.go │ ├── decisions.go │ ├── ent │ │ ├── alert.go │ │ ├── alert │ │ │ ├── alert.go │ │ │ └── where.go │ │ ├── alert_create.go │ │ ├── alert_delete.go │ │ ├── alert_query.go │ │ ├── alert_update.go │ │ ├── allowlist.go │ │ ├── allowlist │ │ │ ├── allowlist.go │ │ │ └── where.go │ │ ├── allowlist_create.go │ │ ├── allowlist_delete.go │ │ ├── allowlist_query.go │ │ ├── allowlist_update.go │ │ ├── allowlistitem.go │ │ ├── allowlistitem │ │ │ ├── allowlistitem.go │ │ │ └── where.go │ │ ├── allowlistitem_create.go │ │ ├── allowlistitem_delete.go │ │ ├── allowlistitem_query.go │ │ ├── allowlistitem_update.go │ │ ├── bouncer.go │ │ ├── bouncer │ │ │ ├── bouncer.go │ │ │ └── where.go │ │ ├── bouncer_create.go │ │ ├── bouncer_delete.go │ │ ├── bouncer_query.go │ │ ├── bouncer_update.go │ │ ├── client.go │ │ ├── configitem.go │ │ ├── configitem │ │ │ ├── configitem.go │ │ │ └── where.go │ │ ├── configitem_create.go │ │ ├── configitem_delete.go │ │ ├── configitem_query.go │ │ ├── configitem_update.go │ │ ├── decision.go │ │ ├── decision │ │ │ ├── decision.go │ │ │ └── where.go │ │ ├── decision_create.go │ │ ├── decision_delete.go │ │ ├── decision_query.go │ │ ├── decision_update.go │ │ ├── ent.go │ │ ├── enttest │ │ │ └── enttest.go │ │ ├── event.go │ │ ├── event │ │ │ ├── event.go │ │ │ └── where.go │ │ ├── event_create.go │ │ ├── event_delete.go │ │ ├── event_query.go │ │ ├── event_update.go │ │ ├── generate.go │ │ ├── helpers.go │ │ ├── hook │ │ │ └── hook.go │ │ ├── lock.go │ │ ├── lock │ │ │ ├── lock.go │ │ │ └── where.go │ │ ├── lock_create.go │ │ ├── lock_delete.go │ │ ├── lock_query.go │ │ ├── lock_update.go │ │ ├── machine.go │ │ ├── machine │ │ │ ├── machine.go │ │ │ └── where.go │ │ ├── machine_create.go │ │ ├── machine_delete.go │ │ ├── machine_query.go │ │ ├── machine_update.go │ │ ├── meta.go │ │ ├── meta │ │ │ ├── meta.go │ │ │ └── where.go │ │ ├── meta_create.go │ │ ├── meta_delete.go │ │ ├── meta_query.go │ │ ├── meta_update.go │ │ ├── metric.go │ │ ├── metric │ │ │ ├── metric.go │ │ │ └── where.go │ │ ├── metric_create.go │ │ ├── metric_delete.go │ │ ├── metric_query.go │ │ ├── metric_update.go │ │ ├── migrate │ │ │ ├── migrate.go │ │ │ └── schema.go │ │ ├── mutation.go │ │ ├── predicate │ │ │ └── predicate.go │ │ ├── runtime.go │ │ ├── runtime │ │ │ └── runtime.go │ │ ├── schema │ │ │ ├── alert.go │ │ │ ├── allowlist.go │ │ │ ├── allowlist_item.go │ │ │ ├── bouncer.go │ │ │ ├── config.go │ │ │ ├── decision.go │ │ │ ├── event.go │ │ │ ├── lock.go │ │ │ ├── machine.go │ │ │ ├── meta.go │ │ │ └── metric.go │ │ └── tx.go │ ├── errors.go │ ├── file_utils.go │ ├── file_utils_windows.go │ ├── flush.go │ ├── lock.go │ ├── machines.go │ ├── metrics.go │ └── utils.go ├── dumps │ ├── bucket_dump.go │ └── parser_dump.go ├── emoji │ └── emoji.go ├── exprhelpers │ ├── crowdsec_cti.go │ ├── crowdsec_cti_test.go │ ├── debugger.go │ ├── debugger_test.go │ ├── debuggerstub_test.go │ ├── expr_lib.go │ ├── exprlib_test.go │ ├── geoip.go │ ├── helpers.go │ ├── jsonextract.go │ ├── jsonextract_test.go │ ├── libinjection.go │ ├── libinjection_test.go │ ├── strings.go │ ├── tests │ │ ├── test_data.txt │ │ ├── test_data_no_type.txt │ │ ├── test_data_re.txt │ │ └── test_empty_line.txt │ ├── waf.go │ ├── waf_test.go │ ├── xml.go │ └── xml_test.go ├── fflag │ ├── crowdsec.go │ ├── features.go │ └── features_test.go ├── hubops │ ├── colorize.go │ ├── datarefresh.go │ ├── disable.go │ ├── doc.go │ ├── download.go │ ├── enable.go │ ├── plan.go │ └── purge.go ├── hubtest │ ├── appsecrule.go │ ├── coverage.go │ ├── helpers.go │ ├── hubtest.go │ ├── hubtest_item.go │ ├── nucleirunner.go │ ├── parser.go │ ├── parser_assert.go │ ├── postoverflow.go │ ├── regexp.go │ ├── scenario.go │ ├── scenario_assert.go │ ├── utils.go │ └── utils_test.go ├── leakybucket │ ├── README.md │ ├── bayesian.go │ ├── blackhole.go │ ├── bucket.go │ ├── buckets.go │ ├── buckets_test.go │ ├── conditional.go │ ├── manager_load.go │ ├── manager_load_test.go │ ├── manager_run.go │ ├── manager_run_test.go │ ├── overflow_filter.go │ ├── overflows.go │ ├── processor.go │ ├── reset_filter.go │ ├── tests │ │ ├── conditional-bucket │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── guillotine-bayesian-bucket │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── hub │ │ │ └── index.json │ │ ├── leaky-fixedqueue │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── leaky-scope-range-expression │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── multiple-bayesian-bucket │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── overflow-with-meta-and-information │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── overflow-with-meta │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-bayesian-bucket │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-counter-bh │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-counter-timeout │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-counter │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-blackhole │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-cancel_on │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-overflow │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-ovflwfilter │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-underflow │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-uniq-cachesize │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-uniq-w-buckets_state │ │ │ ├── bucket.yaml │ │ │ ├── in-buckets_state.json │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-leaky-uniq │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ ├── simple-trigger-external-data │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ ├── simple_patterns.txt │ │ │ └── test.json │ │ ├── simple-trigger-reprocess │ │ │ ├── bucket.yaml │ │ │ ├── reprocess.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ │ └── simple-trigger │ │ │ ├── bucket.yaml │ │ │ ├── scenarios.yaml │ │ │ └── test.json │ ├── timemachine.go │ ├── trigger.go │ └── uniq.go ├── longpollclient │ └── client.go ├── metabase │ ├── api.go │ ├── container.go │ ├── database.go │ └── metabase.go ├── models │ ├── add_alerts_request.go │ ├── add_alerts_response.go │ ├── add_signals_request.go │ ├── add_signals_request_item.go │ ├── add_signals_request_item_decisions.go │ ├── add_signals_request_item_decisions_item.go │ ├── add_signals_request_item_source.go │ ├── alert.go │ ├── all_metrics.go │ ├── allowlist_item.go │ ├── base_metrics.go │ ├── bulk_check_allowlist_request.go │ ├── bulk_check_allowlist_response.go │ ├── bulk_check_allowlist_result.go │ ├── check_allowlist_response.go │ ├── console_options.go │ ├── decision.go │ ├── decisions_delete_request.go │ ├── decisions_delete_request_item.go │ ├── decisions_stream_response.go │ ├── delete_alerts_response.go │ ├── delete_decision_response.go │ ├── detailed_metrics.go │ ├── error_response.go │ ├── event.go │ ├── flush_decision_response.go │ ├── generate.go │ ├── get_alerts_response.go │ ├── get_allowlist_response.go │ ├── get_allowlists_response.go │ ├── get_decisions_response.go │ ├── helpers.go │ ├── hub_item.go │ ├── hub_items.go │ ├── lapi_metrics.go │ ├── localapi_swagger.yaml │ ├── log_processors_metrics.go │ ├── meta.go │ ├── metrics.go │ ├── metrics_agent_info.go │ ├── metrics_bouncer_info.go │ ├── metrics_detail_item.go │ ├── metrics_labels.go │ ├── metrics_meta.go │ ├── o_sversion.go │ ├── remediation_components_metrics.go │ ├── source.go │ ├── success_response.go │ ├── topx_response.go │ ├── watcher_auth_request.go │ ├── watcher_auth_response.go │ └── watcher_registration_request.go ├── modelscapi │ ├── add_signals_request.go │ ├── add_signals_request_item.go │ ├── add_signals_request_item_decisions.go │ ├── add_signals_request_item_decisions_item.go │ ├── add_signals_request_item_source.go │ ├── allowlist_link.go │ ├── blocklist_link.go │ ├── centralapi_swagger.yaml │ ├── decisions_delete_request.go │ ├── decisions_delete_request_item.go │ ├── decisions_sync_request.go │ ├── decisions_sync_request_item.go │ ├── decisions_sync_request_item_decisions.go │ ├── decisions_sync_request_item_decisions_item.go │ ├── decisions_sync_request_item_source.go │ ├── enroll_request.go │ ├── error_response.go │ ├── generate.go │ ├── get_decisions_stream_response.go │ ├── get_decisions_stream_response_deleted.go │ ├── get_decisions_stream_response_deleted_item.go │ ├── get_decisions_stream_response_links.go │ ├── get_decisions_stream_response_new.go │ ├── get_decisions_stream_response_new_item.go │ ├── login_request.go │ ├── login_response.go │ ├── metrics_request.go │ ├── metrics_request_bouncers_item.go │ ├── metrics_request_machines_item.go │ ├── register_request.go │ ├── reset_password_request.go │ └── success_response.go ├── parser │ ├── README.md │ ├── enrich.go │ ├── enrich_date.go │ ├── enrich_date_test.go │ ├── enrich_dns.go │ ├── enrich_geoip.go │ ├── enrich_unmarshal.go │ ├── grok_pattern.go │ ├── node.go │ ├── node_test.go │ ├── parsing_test.go │ ├── runtime.go │ ├── stage.go │ ├── test_data │ │ ├── GeoLite2-ASN.mmdb │ │ └── GeoLite2-City.mmdb │ ├── tests │ │ ├── base-grok-expression │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-grok-external-data │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-grok-import │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-grok-no-subnode │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-grok-stash │ │ │ ├── base-grok-stash.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-grok │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-json-extract │ │ │ ├── base-grok.yaml │ │ │ ├── base-grok2.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── base-tree │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── dateparser-enrich │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── geoip-enrich │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── json-unmarshal │ │ │ ├── base-parser.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── multi-stage-grok │ │ │ ├── base-grok-s00.yaml │ │ │ ├── base-grok-s01.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── reverse-dns-enrich │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ │ ├── sample_strings.txt │ │ └── whitelist-base │ │ │ ├── base-grok.yaml │ │ │ ├── parsers.yaml │ │ │ └── test.yaml │ ├── unix_parser.go │ ├── whitelist.go │ └── whitelist_test.go ├── protobufs │ ├── README.md │ ├── generate.go │ ├── notifier.pb.go │ ├── notifier.proto │ └── notifier_grpc.pb.go ├── setup │ ├── README.md │ ├── detect.go │ ├── detect_test.go │ ├── export_test.go │ ├── install.go │ ├── units.go │ └── units_test.go ├── time │ ├── AUTHORS │ ├── CONTRIBUTING.md │ ├── CONTRIBUTORS │ ├── LICENSE │ ├── PATENTS │ ├── README.md │ └── rate │ │ ├── rate.go │ │ └── rate_test.go └── types │ ├── appsec_event.go │ ├── constants.go │ ├── datasource.go │ ├── event.go │ ├── event_test.go │ ├── getfstype.go │ ├── getfstype_freebsd.go │ ├── getfstype_openbsd.go │ ├── getfstype_windows.go │ ├── ip.go │ ├── ip_test.go │ ├── line.go │ ├── queue.go │ └── utils.go ├── rpm ├── SOURCES │ ├── 80-crowdsec.preset │ └── user.patch └── SPECS │ └── crowdsec.spec ├── scripts ├── test_env.ps1 ├── test_env.sh └── test_wizard_upgrade.sh ├── test ├── .gitignore ├── README.md ├── ansible │ ├── .gitignore │ ├── README.md │ ├── ansible.cfg │ ├── debug_tools.yml │ ├── env │ │ ├── example.sh │ │ ├── pkg-sqlite.sh │ │ ├── source-mysql.sh │ │ ├── source-pgx.sh │ │ ├── source-postgres.sh │ │ └── source-sqlite.sh │ ├── install_binary_package.yml │ ├── prepare-run │ ├── prepare_tests.yml │ ├── provision_dependencies.yml │ ├── provision_test_suite.yml │ ├── requirements.yml │ ├── roles │ │ ├── make_fixture │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── vars │ │ │ │ └── main.yml │ │ └── run_func_tests │ │ │ ├── tasks │ │ │ └── main.yml │ │ │ └── vars │ │ │ └── main.yml │ ├── run_all.yml │ ├── run_tests.yml │ ├── run_wizard_tests.yml │ ├── vagrant │ │ ├── alma-8 │ │ │ └── Vagrantfile │ │ ├── alma-9 │ │ │ └── Vagrantfile │ │ ├── centos-7 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── centos-8 │ │ │ └── Vagrantfile │ │ ├── centos-9 │ │ │ └── Vagrantfile │ │ ├── common │ │ ├── debian-10-buster │ │ │ └── Vagrantfile │ │ ├── debian-11-bullseye │ │ │ └── Vagrantfile │ │ ├── debian-12-bookworm │ │ │ └── Vagrantfile │ │ ├── debian-9-stretch │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── debian-testing │ │ │ └── Vagrantfile │ │ ├── experimental │ │ │ ├── alpine-3.16 │ │ │ │ ├── Vagrantfile │ │ │ │ ├── bootstrap │ │ │ │ └── skip │ │ │ ├── amazon-linux-2 │ │ │ │ ├── Vagrantfile │ │ │ │ └── issues.txt │ │ │ ├── arch │ │ │ │ └── Vagrantfile │ │ │ ├── devuan-3 │ │ │ │ ├── Vagrantfile │ │ │ │ └── skip │ │ │ ├── dragonflybsd-6 │ │ │ │ └── Vagrantfile │ │ │ ├── gentoo │ │ │ │ ├── Vagrantfile │ │ │ │ └── bootstrap │ │ │ ├── hardenedbsd-13 │ │ │ │ ├── Vagrantfile │ │ │ │ └── skip │ │ │ ├── netbsd-9 │ │ │ │ └── Vagrantfile │ │ │ ├── openbsd-6 │ │ │ │ ├── Vagrantfile │ │ │ │ └── skip │ │ │ ├── openbsd-7 │ │ │ │ ├── Vagrantfile │ │ │ │ └── skip │ │ │ ├── opensuse-15.6 │ │ │ │ ├── Vagrantfile │ │ │ │ └── bootstrap │ │ │ └── ubuntu-14.04-trusty │ │ │ │ └── Vagrantfile │ │ ├── fedora-35 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-36 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-37 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-38 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-39 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-40 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── fedora-41 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── freebsd-12 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── freebsd-13 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── opensuse-leap-15 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── oracle-7 │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── oracle-8 │ │ │ └── Vagrantfile │ │ ├── oracle-9 │ │ │ └── Vagrantfile │ │ ├── rocky-8 │ │ │ └── Vagrantfile │ │ ├── rocky-9 │ │ │ └── Vagrantfile │ │ ├── ubuntu-16.04-xenial │ │ │ ├── Vagrantfile │ │ │ └── skip │ │ ├── ubuntu-18.04-bionic │ │ │ └── Vagrantfile │ │ ├── ubuntu-20.04-focal │ │ │ └── Vagrantfile │ │ ├── ubuntu-22.04-jammy │ │ │ └── Vagrantfile │ │ ├── ubuntu-22.10-kinetic │ │ │ └── Vagrantfile │ │ ├── ubuntu-23.04-lunar │ │ │ └── Vagrantfile │ │ ├── ubuntu-24-04-noble │ │ │ └── Vagrantfile │ │ └── wizard │ │ │ ├── centos-8 │ │ │ └── Vagrantfile │ │ │ ├── common │ │ │ ├── debian-10-buster │ │ │ └── Vagrantfile │ │ │ ├── debian-11-bullseye │ │ │ └── Vagrantfile │ │ │ ├── debian-12-bookworm │ │ │ └── Vagrantfile │ │ │ ├── fedora-36 │ │ │ └── Vagrantfile │ │ │ ├── ubuntu-22.04-jammy │ │ │ └── Vagrantfile │ │ │ └── ubuntu-22.10-kinetic │ │ │ └── Vagrantfile │ └── vars │ │ ├── go.yml │ │ ├── mysql.yml │ │ ├── postgres.yml │ │ └── python.yml ├── bats-detect │ ├── WARNING.md │ ├── apache2-deb.bats │ ├── apache2-rpm.bats │ ├── asterisk-deb.bats │ ├── asterisk-rpm.bats │ ├── caddy-deb.bats │ ├── caddy-rpm.bats │ ├── dovecot-deb.bats │ ├── dovecot-rpm.bats │ ├── emby-deb.bats │ ├── emby-rpm.bats │ ├── endlessh-deb.bats │ ├── endlessh-rpm.bats │ ├── gitea.bats │ ├── haproxy-deb.bats │ ├── haproxy-rpm.bats │ ├── lemonldap-deb.bats │ ├── lemonldap-rpm.bats │ ├── lib │ │ └── setup_file_detect.sh │ ├── litespeed.bats │ ├── mariadb-deb.bats │ ├── mariadb-rpm.bats │ ├── mysql-deb.bats │ ├── mysql-rpm.bats │ ├── nginx-deb.bats │ ├── nginx-rpm.bats │ ├── odoo-deb.bats │ ├── odoo-rpm.bats │ ├── ombi-deb.bats │ ├── openresty-deb.bats │ ├── openresty-rpm.bats │ ├── pgsql-deb.bats │ ├── pgsql-rpm.bats │ ├── postfix-deb.bats │ ├── postfix-rpm.bats │ ├── proftpd-deb.bats │ ├── proftpd-rpm.bats │ ├── proxmox-deb.bats │ ├── pureftpd-deb.bats │ ├── pureftpd-rpm.bats │ ├── smb-deb.bats │ ├── smb-rpm.bats │ ├── sshd-deb.bats │ ├── sshd-rpm.bats │ ├── suricata-deb.bats │ ├── suricata-rpm.bats │ ├── testdata │ │ └── enable_lst_debian_repo.sh │ ├── vsftpd-deb.bats │ └── vsftpd-rpm.bats ├── bats.mk ├── bats │ ├── 00_wait_for.bats │ ├── 01_crowdsec.bats │ ├── 01_crowdsec_lapi.bats │ ├── 01_cscli.bats │ ├── 01_cscli_lapi.bats │ ├── 02_nolapi.bats │ ├── 03_noagent.bats │ ├── 04_capi.bats │ ├── 04_nocapi.bats │ ├── 05_config_yaml_local.bats │ ├── 07_setup.bats │ ├── 08_metrics.bats │ ├── 08_metrics_bouncer.bats │ ├── 08_metrics_machines.bats │ ├── 09_console.bats │ ├── 09_context.bats │ ├── 09_socket.bats │ ├── 10_bouncers.bats │ ├── 11_bouncers_tls.bats │ ├── 12_notifications.bats │ ├── 13_capi_whitelists.bats │ ├── 20_hub.bats │ ├── 20_hub_collections_dep.bats │ ├── 20_hub_items.bats │ ├── 30_machines.bats │ ├── 30_machines_tls.bats │ ├── 40_cold-logs.bats │ ├── 40_live-ban.bats │ ├── 50_simulation.bats │ ├── 70_plugin_http.bats │ ├── 71_plugin_dummy.bats │ ├── 72_plugin_badconfig.bats │ ├── 73_plugin_formatting.bats │ ├── 80_alerts.bats │ ├── 81_alert_context.bats │ ├── 90_decisions.bats │ ├── 97_ipv4_single.bats │ ├── 97_ipv6_single.bats │ ├── 98_ipv4_range.bats │ ├── 98_ipv6_range.bats │ ├── 99_lapi-stream-mode-scenario.bats │ ├── 99_lapi-stream-mode-scopes.bats │ ├── 99_lapi-stream-mode.bats │ ├── appsec.bats │ ├── crowdsec-acquisition.bats │ ├── cscli-allowlists.bats │ ├── cscli-hubtype-inspect.bats │ ├── cscli-hubtype-install.bats │ ├── cscli-hubtype-list.bats │ ├── cscli-hubtype-remove.bats │ ├── cscli-hubtype-upgrade.bats │ ├── cscli-parsers.bats │ ├── cscli-postoverflows.bats │ ├── hub-index.bats │ ├── reformat │ ├── sql.bats │ └── testdata │ │ ├── 07_setup │ │ └── detect.yaml │ │ ├── 90_decisions │ │ ├── csv_decisions │ │ ├── decisions.csv │ │ ├── decisions.json │ │ └── json_decisions │ │ ├── cfssl │ │ ├── agent.json │ │ ├── agent_invalid.json │ │ ├── bouncer.json │ │ ├── bouncer_invalid.json │ │ ├── ca_intermediate.json │ │ ├── ca_root.json │ │ ├── profiles.json │ │ └── server.json │ │ └── explain │ │ └── explain-log.txt ├── bin │ ├── assert-crowdsec-not-running │ ├── check-requirements │ ├── collect-hub-coverage │ ├── decode-jwt │ ├── mock-http.py │ ├── preload-hub-items │ ├── remove-all-hub-items │ ├── wait-for │ └── wait-for-port ├── coverage │ └── .do-not-remove ├── disable-capi ├── enable-capi ├── instance-crowdsec ├── instance-data ├── instance-db ├── instance-mock-http ├── lib │ ├── color-formatter │ ├── config │ │ ├── config-global │ │ └── config-local │ ├── db │ │ ├── instance-mysql │ │ ├── instance-pgx │ │ ├── instance-postgres │ │ └── instance-sqlite │ ├── init │ │ ├── crowdsec-daemon │ │ └── crowdsec-systemd │ ├── setup.sh │ ├── setup_file.sh │ └── teardown_file.sh ├── localstack │ ├── docker-compose.yml │ └── scripts │ │ └── init_script.sh ├── run-tests └── tools │ └── .do-not-remove ├── windows ├── Chocolatey │ └── crowdsec │ │ ├── ReadMe.md │ │ ├── crowdsec.nuspec │ │ └── tools │ │ ├── LICENSE.txt │ │ ├── VERIFICATION.txt │ │ ├── chocolateybeforemodify.ps1 │ │ ├── chocolateyinstall.ps1 │ │ └── chocolateyuninstall.ps1 ├── README.md ├── install_dev_windows.ps1 ├── install_installer_windows.ps1 └── installer │ ├── WixUI_HK.wxs │ ├── crowdsec_icon.ico │ ├── crowdsec_msi_top_banner.bmp │ ├── installer_dialog.bmp │ └── product.wxs └── wizard.sh /.dockerignore: -------------------------------------------------------------------------------- 1 | # We include .git in the build context because excluding it would break the 2 | # "make release" target, which uses git to retrieve the build version and tag. 3 | #.git 4 | 5 | /tests 6 | /crowdsec-v* 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Support Request 3 | url: https://discourse.crowdsec.net 4 | about: Support request or question relating to Crowdsec 5 | -------------------------------------------------------------------------------- /.github/buildkit.toml: -------------------------------------------------------------------------------- 1 | [worker.oci] 2 | # max-parallelism = 2 3 | -------------------------------------------------------------------------------- /.github/generate-codecov-yml.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run this from the repository root: 4 | # 5 | # .github/generate-codecov-yml.sh > .github/codecov.yml 6 | 7 | cat < 72 { 19 | return errors.New("password too long (max 72 characters)") 20 | } 21 | 22 | *p = MachinePassword(v) 23 | 24 | return nil 25 | } 26 | 27 | func (p *MachinePassword) Type() string { 28 | return "string" 29 | } 30 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/climetrics/number.go: -------------------------------------------------------------------------------- 1 | package climetrics 2 | 3 | import ( 4 | "fmt" 5 | "math" 6 | "strconv" 7 | ) 8 | 9 | type unit struct { 10 | value int64 11 | symbol string 12 | } 13 | 14 | var ranges = []unit{ 15 | {value: 1e18, symbol: "E"}, 16 | {value: 1e15, symbol: "P"}, 17 | {value: 1e12, symbol: "T"}, 18 | {value: 1e9, symbol: "G"}, 19 | {value: 1e6, symbol: "M"}, 20 | {value: 1e3, symbol: "k"}, 21 | {value: 1, symbol: ""}, 22 | } 23 | 24 | func formatNumber(num int64, withUnit bool) string { 25 | if !withUnit { 26 | return strconv.FormatInt(num, 10) 27 | } 28 | 29 | goodUnit := ranges[len(ranges)-1] 30 | 31 | for _, u := range ranges { 32 | if num >= u.value { 33 | goodUnit = u 34 | break 35 | } 36 | } 37 | 38 | if goodUnit.value == 1 { 39 | return fmt.Sprintf("%d%s", num, goodUnit.symbol) 40 | } 41 | 42 | res := math.Round(float64(num)/float64(goodUnit.value)*100) / 100 43 | 44 | return fmt.Sprintf("%.2f%s", res, goodUnit.symbol) 45 | } 46 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/dashboard_unsupported.go: -------------------------------------------------------------------------------- 1 | //go:build !linux 2 | 3 | package main 4 | 5 | import ( 6 | "runtime" 7 | 8 | log "github.com/sirupsen/logrus" 9 | "github.com/spf13/cobra" 10 | ) 11 | 12 | type cliDashboard struct{ 13 | cfg configGetter 14 | } 15 | 16 | func NewCLIDashboard(cfg configGetter) *cliDashboard { 17 | return &cliDashboard{ 18 | cfg: cfg, 19 | } 20 | } 21 | 22 | func (cli cliDashboard) NewCommand() *cobra.Command { 23 | cmd := &cobra.Command{ 24 | Use: "dashboard", 25 | DisableAutoGenTag: true, 26 | Run: func(_ *cobra.Command, _ []string) { 27 | log.Infof("Dashboard command is disabled on %s", runtime.GOOS) 28 | }, 29 | } 30 | 31 | return cmd 32 | } 33 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/idgen/password.go: -------------------------------------------------------------------------------- 1 | package idgen 2 | 3 | import ( 4 | saferand "crypto/rand" 5 | "fmt" 6 | "math/big" 7 | ) 8 | 9 | const PasswordLength = 64 10 | 11 | func GeneratePassword(length int) (string, error) { 12 | upper := "ABCDEFGHIJKLMNOPQRSTUVWXY" 13 | lower := "abcdefghijklmnopqrstuvwxyz" 14 | digits := "0123456789" 15 | 16 | charset := upper + lower + digits 17 | charsetLength := len(charset) 18 | 19 | buf := make([]byte, length) 20 | 21 | for i := range length { 22 | rInt, err := saferand.Int(saferand.Reader, big.NewInt(int64(charsetLength))) 23 | if err != nil { 24 | return "", fmt.Errorf("prng failed to generate unique id or password: %w", err) 25 | } 26 | 27 | buf[i] = charset[rInt.Int64()] 28 | } 29 | 30 | return string(buf), nil 31 | } 32 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/reload/message.go: -------------------------------------------------------------------------------- 1 | //go:build !windows && !freebsd && !linux 2 | 3 | package reload 4 | 5 | // generic message since we don't know the platform 6 | const message = "Please reload the crowdsec process for the new configuration to be effective." 7 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/reload/message_freebsd.go: -------------------------------------------------------------------------------- 1 | package reload 2 | 3 | // actually sudo is not that popular on freebsd, but this will do 4 | const message = "Run 'sudo service crowdsec reload' for the new configuration to be effective." 5 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/reload/message_linux.go: -------------------------------------------------------------------------------- 1 | package reload 2 | 3 | // assume systemd, although gentoo and others may differ 4 | const message = "Run 'sudo systemctl reload crowdsec' for the new configuration to be effective." 5 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/reload/message_windows.go: -------------------------------------------------------------------------------- 1 | package reload 2 | 3 | const message = "Please restart the crowdsec service for the new configuration to be effective." 4 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/reload/reload.go: -------------------------------------------------------------------------------- 1 | package reload 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/crowdsecurity/go-cs-lib/version" 7 | isatty "github.com/mattn/go-isatty" 8 | ) 9 | 10 | func UserMessage() string { 11 | if version.System == "docker" { 12 | if isatty.IsTerminal(os.Stdout.Fd()) || isatty.IsCygwinTerminal(os.Stdout.Fd()) { 13 | return "You may need to restart the container to apply the changes." 14 | } 15 | 16 | return "" 17 | } 18 | 19 | return message 20 | } 21 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/setup.go: -------------------------------------------------------------------------------- 1 | //go:build !no_cscli_setup 2 | 3 | package main 4 | 5 | import ( 6 | "github.com/spf13/cobra" 7 | 8 | "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/clisetup" 9 | "github.com/crowdsecurity/crowdsec/pkg/cwversion/component" 10 | "github.com/crowdsecurity/crowdsec/pkg/fflag" 11 | ) 12 | 13 | func (cli *cliRoot) addSetup(cmd *cobra.Command) { 14 | if fflag.CscliSetup.IsEnabled() { 15 | cmd.AddCommand(clisetup.New(cli.cfg).NewCommand()) 16 | } 17 | 18 | component.Register("cscli_setup") 19 | } 20 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/setup_stub.go: -------------------------------------------------------------------------------- 1 | //go:build no_cscli_setup 2 | package main 3 | 4 | import ( 5 | "github.com/spf13/cobra" 6 | ) 7 | 8 | func (cli *cliRoot) addSetup(_ *cobra.Command) { 9 | } 10 | -------------------------------------------------------------------------------- /cmd/crowdsec-cli/version.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/spf13/cobra" 7 | 8 | "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/args" 9 | "github.com/crowdsecurity/crowdsec/pkg/cwversion" 10 | ) 11 | 12 | type cliVersion struct{} 13 | 14 | func NewCLIVersion() *cliVersion { 15 | return &cliVersion{} 16 | } 17 | 18 | func (cliVersion) NewCommand() *cobra.Command { 19 | cmd := &cobra.Command{ 20 | Use: "version", 21 | Short: "Display version", 22 | Args: args.NoArgs, 23 | DisableAutoGenTag: true, 24 | Run: func(_ *cobra.Command, _ []string) { 25 | _, _ = os.Stdout.WriteString(cwversion.FullString()) 26 | }, 27 | } 28 | 29 | return cmd 30 | } 31 | -------------------------------------------------------------------------------- /cmd/crowdsec/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | GOTEST = $(GO) test 10 | 11 | CROWDSEC_BIN = crowdsec$(EXT) 12 | # names longer than 15 chars break 'pgrep' 13 | 14 | .PHONY: all 15 | all: clean test build 16 | 17 | build: clean 18 | $(GOBUILD) $(LD_OPTS) -o $(CROWDSEC_BIN) 19 | 20 | test: 21 | $(GOTEST) $(LD_OPTS) -v ./... 22 | 23 | clean: 24 | @$(RM) $(CROWDSEC_BIN) $(WIN_IGNORE_ERR) 25 | -------------------------------------------------------------------------------- /cmd/crowdsec/appsec.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_appsec 2 | 3 | package main 4 | 5 | import ( 6 | "fmt" 7 | 8 | "github.com/crowdsecurity/crowdsec/pkg/appsec" 9 | "github.com/crowdsecurity/crowdsec/pkg/cwhub" 10 | ) 11 | 12 | func LoadAppsecRules(hub *cwhub.Hub) error { 13 | if err := appsec.LoadAppsecRules(hub); err != nil { 14 | return fmt.Errorf("while loading appsec rules: %w", err) 15 | } 16 | 17 | return nil 18 | } 19 | -------------------------------------------------------------------------------- /cmd/crowdsec/appsec_stub.go: -------------------------------------------------------------------------------- 1 | //go:build no_datasource_appsec 2 | 3 | package main 4 | 5 | import ( 6 | "github.com/crowdsecurity/crowdsec/pkg/cwhub" 7 | ) 8 | 9 | func LoadAppsecRules(hub *cwhub.Hub) error { 10 | return nil 11 | } 12 | -------------------------------------------------------------------------------- /cmd/crowdsec/event_log_hook_windows.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | log "github.com/sirupsen/logrus" 5 | "golang.org/x/sys/windows/svc/eventlog" 6 | ) 7 | 8 | type EventLogHook struct { 9 | LogLevels []log.Level 10 | evtlog *eventlog.Log 11 | } 12 | 13 | func (e *EventLogHook) Fire(entry *log.Entry) error { 14 | line, err := entry.String() 15 | if err != nil { 16 | return err 17 | } 18 | switch entry.Level { 19 | case log.PanicLevel: 20 | return e.evtlog.Error(300, line) 21 | case log.FatalLevel: 22 | return e.evtlog.Error(301, line) 23 | case log.ErrorLevel: 24 | return e.evtlog.Error(302, line) 25 | case log.WarnLevel: 26 | return e.evtlog.Warning(303, line) 27 | case log.InfoLevel: 28 | return e.evtlog.Info(304, line) 29 | case log.DebugLevel: 30 | return e.evtlog.Info(305, line) 31 | case log.TraceLevel: 32 | return e.evtlog.Info(306, line) 33 | } 34 | return nil 35 | } 36 | 37 | func (e *EventLogHook) Levels() []log.Level { 38 | return e.LogLevels 39 | } 40 | -------------------------------------------------------------------------------- /cmd/notification-dummy/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-dummy$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-email/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-email$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-file/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-file$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-file/file.yaml: -------------------------------------------------------------------------------- 1 | # Don't change this 2 | type: file 3 | 4 | name: file_default # this must match with the registered plugin in the profile 5 | log_level: info # Options include: trace, debug, info, warn, error, off 6 | 7 | # This template render all events as ndjson 8 | format: | 9 | {{range . -}} 10 | { "time": "{{.StopAt}}", "program": "crowdsec", "alert": {{. | toJson }} } 11 | {{ end -}} 12 | 13 | # group_wait: # duration to wait collecting alerts before sending to this plugin, eg "30s" 14 | # group_threshold: # if alerts exceed this, then the plugin will be sent the message. eg "10" 15 | 16 | #Use full path EG /tmp/crowdsec_alerts.json or %TEMP%\crowdsec_alerts.json 17 | log_path: "/tmp/crowdsec_alerts.json" 18 | rotate: 19 | enabled: true # Change to false if you want to handle log rotate on system basis 20 | max_size: 500 # in MB 21 | max_files: 5 22 | max_age: 5 23 | compress: true 24 | -------------------------------------------------------------------------------- /cmd/notification-http/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-http$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-sentinel/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-sentinel$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-sentinel/sentinel.yaml: -------------------------------------------------------------------------------- 1 | type: sentinel # Don't change 2 | name: sentinel_default # Must match the registered plugin in the profile 3 | 4 | # One of "trace", "debug", "info", "warn", "error", "off" 5 | log_level: info 6 | # group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s" 7 | # group_threshold: # Amount of alerts that triggers a message before has expired, eg "10" 8 | # max_retry: # Number of attempts to relay messages to plugins in case of error 9 | # timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s" 10 | 11 | #------------------------- 12 | # plugin-specific options 13 | 14 | # The following template receives a list of models.Alert objects 15 | # The output goes in the http request body 16 | format: | 17 | {{.|toJson}} 18 | 19 | customer_id: XXX-XXX 20 | shared_key: XXXXXXX 21 | log_type: crowdsec -------------------------------------------------------------------------------- /cmd/notification-slack/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-slack$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /cmd/notification-splunk/Makefile: -------------------------------------------------------------------------------- 1 | ifeq ($(OS), Windows_NT) 2 | SHELL := pwsh.exe 3 | .SHELLFLAGS := -NoProfile -Command 4 | EXT = .exe 5 | endif 6 | 7 | GO = go 8 | GOBUILD = $(GO) build 9 | 10 | BINARY_NAME = notification-splunk$(EXT) 11 | 12 | build: clean 13 | $(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) 14 | 15 | .PHONY: clean 16 | clean: 17 | @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) 18 | -------------------------------------------------------------------------------- /config/acquis.yaml: -------------------------------------------------------------------------------- 1 | filenames: 2 | - /var/log/nginx/*.log 3 | - ./tests/nginx/nginx.log 4 | #this is not a syslog log, indicate which kind of logs it is 5 | labels: 6 | type: nginx 7 | --- 8 | filenames: 9 | - /var/log/auth.log 10 | - /var/log/syslog 11 | labels: 12 | type: syslog 13 | --- 14 | filename: /var/log/apache2/*.log 15 | labels: 16 | type: apache2 17 | -------------------------------------------------------------------------------- /config/acquis_win.yaml: -------------------------------------------------------------------------------- 1 | ##RDP 2 | source: wineventlog 3 | event_channel: Security 4 | event_ids: 5 | - 4625 6 | - 4623 7 | event_level: information 8 | labels: 9 | type: eventlog 10 | --- 11 | ##Firewall 12 | filenames: 13 | - C:\Windows\System32\LogFiles\Firewall\*.log 14 | labels: 15 | type: windows-firewall 16 | --- 17 | ##SQL Server 18 | source: wineventlog 19 | event_channel: Application 20 | event_ids: 21 | - 18456 22 | event_level: information 23 | labels: 24 | type: eventlog 25 | --- 26 | ##IIS 27 | use_time_machine: true 28 | filenames: 29 | - C:\inetpub\logs\LogFiles\*\*.log 30 | labels: 31 | type: iis 32 | -------------------------------------------------------------------------------- /config/config_win_no_lapi.yaml: -------------------------------------------------------------------------------- 1 | common: 2 | log_media: file 3 | log_level: info 4 | log_dir: C:\ProgramData\CrowdSec\log\ 5 | config_paths: 6 | config_dir: C:\ProgramData\CrowdSec\config\ 7 | data_dir: C:\ProgramData\CrowdSec\data\ 8 | simulation_path: C:\ProgramData\CrowdSec\config\simulation.yaml 9 | hub_dir: C:\ProgramData\CrowdSec\hub\ 10 | index_path: C:\ProgramData\CrowdSec\hub\.index.json 11 | plugin_dir: C:\ProgramData\CrowdSec\plugins\ 12 | notification_dir: C:\ProgramData\CrowdSec\config\notifications\ 13 | crowdsec_service: 14 | acquisition_path: C:\ProgramData\CrowdSec\config\acquis.yaml 15 | parser_routines: 1 16 | cscli: 17 | output: human 18 | api: 19 | client: 20 | insecure_skip_verify: false 21 | credentials_path: C:\ProgramData\CrowdSec\config\local_api_credentials.yaml 22 | prometheus: 23 | enabled: true 24 | level: full 25 | listen_addr: 127.0.0.1 26 | listen_port: 6060 27 | -------------------------------------------------------------------------------- /config/console.yaml: -------------------------------------------------------------------------------- 1 | share_manual_decisions: false 2 | share_custom: true 3 | share_tainted: true 4 | share_context: false -------------------------------------------------------------------------------- /config/context.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/config/context.yaml -------------------------------------------------------------------------------- /config/crowdsec.cron.daily: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | test -x /usr/bin/cscli || exit 0 4 | 5 | # splay hub upgrade and crowdsec reload 6 | sleep "$(seq 1 300 | shuf -n 1)" 7 | 8 | /usr/bin/cscli --error hub update >/dev/null 9 | 10 | upgraded=$(/usr/bin/cscli --error hub upgrade) 11 | if [ -n "$upgraded" ]; then 12 | systemctl reload crowdsec 13 | fi 14 | 15 | exit 0 16 | -------------------------------------------------------------------------------- /config/crowdsec.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Crowdsec agent 3 | After=syslog.target network.target remote-fs.target nss-lookup.target 4 | 5 | [Service] 6 | Type=notify 7 | Environment=LC_ALL=C LANG=C 8 | ExecStartPre=/usr/local/bin/crowdsec -c /etc/crowdsec/config.yaml -t -error 9 | ExecStart=/usr/local/bin/crowdsec -c /etc/crowdsec/config.yaml 10 | #ExecStartPost=/bin/sleep 0.1 11 | ExecReload=/usr/local/bin/crowdsec -c /etc/crowdsec/config.yaml -t -error 12 | ExecReload=/bin/kill -HUP $MAINPID 13 | Restart=always 14 | RestartSec=60 15 | 16 | [Install] 17 | WantedBy=multi-user.target 18 | -------------------------------------------------------------------------------- /config/local_api_credentials.yaml: -------------------------------------------------------------------------------- 1 | url: http://127.0.0.1:8080 -------------------------------------------------------------------------------- /config/online_api_credentials.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/config/online_api_credentials.yaml -------------------------------------------------------------------------------- /config/patterns/cowrie_honeypot: -------------------------------------------------------------------------------- 1 | COWRIE_NEW_CO New connection: %{IPV4:source_ip}:[0-9]+ \(%{IPV4:dest_ip}:%{INT:dest_port}\) \[session: %{DATA:telnet_session}\]$ -------------------------------------------------------------------------------- /config/patterns/exim: -------------------------------------------------------------------------------- 1 | EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} 2 | EXIM_FLAGS (<=|[-=>*]>|[*]{2}|==) 3 | EXIM_DATE %{YEAR:exim_year}-%{MONTHNUM:exim_month}-%{MONTHDAY:exim_day} %{TIME:exim_time} 4 | EXIM_PID \[%{POSINT}\] 5 | EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) 6 | EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) 7 | EXIM_REMOTE_HOST (H=(%{NOTSPACE:remote_hostname} )?(\(%{NOTSPACE:remote_heloname}\) )?\[%{IP:remote_host}\]) 8 | EXIM_INTERFACE (I=\[%{IP:exim_interface}\](:%{NUMBER:exim_interface_port})) 9 | EXIM_PROTOCOL (P=%{NOTSPACE:protocol}) 10 | EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size}) 11 | EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id}) 12 | EXIM_SUBJECT (T=%{QS:exim_subject}) -------------------------------------------------------------------------------- /config/patterns/mcollective: -------------------------------------------------------------------------------- 1 | # Remember, these can be multi-line events. 2 | MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\]%{SPACE}%{LOGLEVEL:event_level} 3 | 4 | MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: -------------------------------------------------------------------------------- /config/patterns/mongodb: -------------------------------------------------------------------------------- 1 | MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:component}\] %{GREEDYDATA:message} 2 | MONGO_QUERY \{ \{ .* \} ntoreturn: \} 3 | MONGO_WORDDASH \b[\w-]+\b 4 | MONGO_SLOWQUERY %{WORD} %{MONGO_WORDDASH:database}\.%{MONGO_WORDDASH:collection} %{WORD}: %{MONGO_QUERY:query} %{WORD}:%{NONNEGINT:ntoreturn} %{WORD}:%{NONNEGINT:ntoskip} %{WORD}:%{NONNEGINT:nscanned}.*nreturned:%{NONNEGINT:nreturned}..+ %{POSINT:duration}ms 5 | MONGO3_SEVERITY \w 6 | MONGO3_COMPONENT %{WORD}|- 7 | MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:severity} %{MONGO3_COMPONENT:component}%{SPACE}(?:\[%{DATA:context}\])? %{GREEDYDATA:message} -------------------------------------------------------------------------------- /config/patterns/mysql: -------------------------------------------------------------------------------- 1 | MYSQL_AUTH_FAIL %{TIMESTAMP_ISO8601:time} %{NUMBER} \[Note\] Access denied for user '%{DATA:user}'@'%{IP:source_ip}' \(using password: %{WORD:using_password}\) 2 | -------------------------------------------------------------------------------- /config/patterns/postgresql: -------------------------------------------------------------------------------- 1 | # Default postgresql pg_log format pattern 2 | POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} -------------------------------------------------------------------------------- /config/patterns/ruby: -------------------------------------------------------------------------------- 1 | RUBY_LOGLEVEL DEBUG|FATAL|ERROR|WARN|INFO 2 | RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:pid}\] *%{RUBY_LOGLEVEL:loglevel} -- +%{DATA:progname}: %{GREEDYDATA:message} -------------------------------------------------------------------------------- /config/patterns/smb: -------------------------------------------------------------------------------- 1 | SMB_AUTH_FAIL Auth:%{GREEDYDATA} user \[%{DATA:smb_domain}\]\\\[%{DATA:user}\]%{GREEDYDATA} status \[NT_STATUS_NO_SUCH_USER\]%{GREEDYDATA} remote host \[ipv4:%{IP:ip_source} -------------------------------------------------------------------------------- /config/patterns/tcpdump: -------------------------------------------------------------------------------- 1 | TCPDUMP_OUTPUT %{GREEDYDATA:timestamp} IP %{IPORHOST:source_ip}\.%{INT:source_port} > %{IPORHOST:dest_ip}\.%{INT:dest_port}: Flags \[%{GREEDYDATA:tcpflags}\], seq 2 | -------------------------------------------------------------------------------- /config/simulation.yaml: -------------------------------------------------------------------------------- 1 | simulation: false 2 | # exclusions: 3 | # - crowdsecurity/ssh-bf 4 | -------------------------------------------------------------------------------- /debian/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated during the build 2 | /crowdsec 3 | /files 4 | /*.substvars 5 | /*.log 6 | /*.debhelper 7 | /*-stamp 8 | -------------------------------------------------------------------------------- /debian/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Building Debian/Ubuntu packages 3 | 4 | It is not recommended to build your own packages for production environments. 5 | 6 | However, if you want to experiment and contribute: 7 | 8 | * Update the changelog (at least give it a correct version number) 9 | * Run "QUILT_PATCHES=debian/patches quilt push -a && quilt refresh" 10 | 11 | We do the above in the build pipeline, so you'll have to do it manually before running: 12 | 13 | * dpkg-buildpackage -uc -us -b 14 | 15 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 11 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: crowdsec 2 | Maintainer: Crowdsec Team 3 | Build-Depends: debhelper, bash 4 | Section: admin 5 | Priority: optional 6 | 7 | Package: crowdsec 8 | Architecture: any 9 | Description: Crowdsec - An open-source, lightweight agent to detect and respond to bad behaviors. It also automatically benefits from our global community-wide IP reputation database 10 | Depends: coreutils 11 | Suggests: cron 12 | -------------------------------------------------------------------------------- /debian/crowdsec.cron.daily: -------------------------------------------------------------------------------- 1 | ../config/crowdsec.cron.daily -------------------------------------------------------------------------------- /debian/crowdsec.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Crowdsec agent 3 | After=syslog.target network.target remote-fs.target nss-lookup.target 4 | 5 | [Service] 6 | Type=notify 7 | Environment=LC_ALL=C LANG=C 8 | ExecStartPre=/usr/bin/crowdsec -c /etc/crowdsec/config.yaml -t -error 9 | ExecStart=/usr/bin/crowdsec -c /etc/crowdsec/config.yaml 10 | #ExecStartPost=/bin/sleep 0.1 11 | ExecReload=/usr/bin/crowdsec -c /etc/crowdsec/config.yaml -t -error 12 | ExecReload=/bin/kill -HUP $MAINPID 13 | Restart=always 14 | RestartSec=60 15 | 16 | [Install] 17 | WantedBy=multi-user.target 18 | -------------------------------------------------------------------------------- /debian/install: -------------------------------------------------------------------------------- 1 | config/config.yaml etc/crowdsec/ 2 | config/profiles.yaml etc/crowdsec/ 3 | config/simulation.yaml etc/crowdsec/ 4 | 5 | config/patterns/* etc/crowdsec/patterns 6 | 7 | # Referenced configs: 8 | cmd/notification-slack/slack.yaml etc/crowdsec/notifications/ 9 | cmd/notification-http/http.yaml etc/crowdsec/notifications/ 10 | cmd/notification-splunk/splunk.yaml etc/crowdsec/notifications/ 11 | cmd/notification-email/email.yaml etc/crowdsec/notifications/ 12 | cmd/notification-sentinel/sentinel.yaml etc/crowdsec/notifications/ 13 | cmd/notification-file/file.yaml etc/crowdsec/notifications/ 14 | -------------------------------------------------------------------------------- /debian/patches/config_plugins: -------------------------------------------------------------------------------- 1 | Index: crowdsec/config/config.yaml 2 | =================================================================== 3 | --- crowdsec.orig/config/config.yaml 4 | +++ crowdsec/config/config.yaml 5 | @@ -12,7 +12,7 @@ config_paths: 6 | hub_dir: /etc/crowdsec/hub/ 7 | index_path: /etc/crowdsec/hub/.index.json 8 | notification_dir: /etc/crowdsec/notifications/ 9 | - plugin_dir: /usr/local/lib/crowdsec/plugins/ 10 | + plugin_dir: /usr/lib/crowdsec/plugins/ 11 | crowdsec_service: 12 | acquisition_path: /etc/crowdsec/acquis.yaml 13 | parser_routines: 1 14 | -------------------------------------------------------------------------------- /debian/patches/series: -------------------------------------------------------------------------------- 1 | config_plugins 2 | -------------------------------------------------------------------------------- /debian/postrm: -------------------------------------------------------------------------------- 1 | if [ "$1" = "purge" ]; then 2 | find /etc/crowdsec -maxdepth 1 -mindepth 1 | grep -v "bouncer" | xargs rm -rf || echo "" 3 | rm -rf /var/lib/crowdsec 4 | fi 5 | -------------------------------------------------------------------------------- /debian/prerm: -------------------------------------------------------------------------------- 1 | if [ "$1" = "remove" ]; then 2 | systemctl stop crowdsec 3 | systemctl disable crowdsec 4 | fi 5 | 6 | if [ "$1" = "upgrade" ]; then 7 | systemctl stop crowdsec 8 | fi 9 | -------------------------------------------------------------------------------- /debian/templates: -------------------------------------------------------------------------------- 1 | Template: crowdsec/lapi 2 | Type: boolean 3 | Default: true 4 | Description: Do you want to run the local API server ? 5 | A local API is required to run crowdsec, but another installation can be used. 6 | . 7 | If you don't know what to do, answer yes. 8 | 9 | Template: crowdsec/lapi_host 10 | Type: string 11 | Default: 127.0.0.1:8080 12 | Description: Address of the local API server 13 | A local API is required to run crowdsec, but another installation can be used. 14 | . 15 | Please add the address of the local API server 16 | 17 | Template: crowdsec/capi 18 | Type: boolean 19 | Default: true 20 | Description: Do you want to use the centralized remote API server ? 21 | To share information with other crowdsec you can register to the centralized remote API server. 22 | . 23 | If you don't know what to do, answer yes. 24 | -------------------------------------------------------------------------------- /docker/preload-hub-items: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eu 4 | 5 | # pre-download everything but don't install anything 6 | 7 | echo "Pre-downloading Hub content..." 8 | 9 | types=$(cscli hub types -o raw) 10 | 11 | for itemtype in $types; do 12 | ALL_ITEMS=$(cscli "$itemtype" list -a -o json | itemtype="$itemtype" yq '.[env(itemtype)][] | .name') 13 | if [[ -n "${ALL_ITEMS}" ]]; then 14 | #shellcheck disable=SC2086 15 | cscli "$itemtype" install \ 16 | $ALL_ITEMS \ 17 | --download-only \ 18 | --error 19 | fi 20 | done 21 | 22 | echo " done." -------------------------------------------------------------------------------- /docker/test/.python-version: -------------------------------------------------------------------------------- 1 | 3.12 2 | -------------------------------------------------------------------------------- /docker/test/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/docker/test/README.md -------------------------------------------------------------------------------- /docker/test/default.env: -------------------------------------------------------------------------------- 1 | # The dev image is only built for the "full" flavor 2 | # CROWDSEC_TEST_VERSION="test" 3 | # CROWDSEC_TEST_VERSION="v1.5.0" 4 | CROWDSEC_TEST_VERSION="dev" 5 | 6 | # All of the following flavors will be tested when using the "flavor" fixture 7 | CROWDSEC_TEST_FLAVORS="full" 8 | # CROWDSEC_TEST_FLAVORS="full,slim,debian" 9 | # CROWDSEC_TEST_FLAVORS="full,slim,debian,debian-slim" 10 | 11 | # network to use 12 | CROWDSEC_TEST_NETWORK="net-test" 13 | 14 | # Timeout for each event to wait in tests: http, log.. (default 20) 15 | CROWDSEC_TEST_TIMEOUT="10" 16 | -------------------------------------------------------------------------------- /docker/test/pytest-debug.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | # run all tests sequentially, drop to pdb on first failure 3 | addopts = -n 0 --no-header --pdb --pdbcls=IPython.terminal.debugger:Pdb 4 | env_files = 5 | .env 6 | default.env 7 | -------------------------------------------------------------------------------- /docker/test/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | # run all tests in parallel, compact output 3 | addopts = -n 4 --no-header 4 | required_plugins = pytest-xdist 5 | env_files = 6 | .env 7 | default.env 8 | -------------------------------------------------------------------------------- /docker/test/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/docker/test/tests/__init__.py -------------------------------------------------------------------------------- /docker/test/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from _pytest.config import Config 2 | 3 | pytest_plugins = ("cs",) 4 | 5 | 6 | def pytest_configure(config: Config) -> None: 7 | config.addinivalue_line("markers", "docker: mark tests for lone or manually orchestrated containers") 8 | config.addinivalue_line("markers", "compose: mark tests for docker compose projects") 9 | -------------------------------------------------------------------------------- /docker/test/tests/test_nolapi.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pytest_cs import Status 3 | 4 | pytestmark = pytest.mark.docker 5 | 6 | 7 | def test_no_agent(crowdsec, flavor: str) -> None: 8 | """Test DISABLE_LOCAL_API=true (failing stand-alone container)""" 9 | env = { 10 | "DISABLE_LOCAL_API": "true", 11 | } 12 | 13 | # if an alternative lapi url is not defined, the container should exit 14 | 15 | with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs: 16 | cs.wait_for_log("*dial tcp 0.0.0.0:8080: connect: connection refused*") 17 | -------------------------------------------------------------------------------- /docker/test/tests/test_simple.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytestmark = pytest.mark.docker 4 | 5 | 6 | # XXX this is redundant, already tested in pytest_cs 7 | def test_crowdsec(crowdsec, flavor: str) -> None: 8 | with crowdsec(flavor=flavor) as cs: 9 | for waiter in cs.log_waiters(): 10 | with waiter as matcher: 11 | matcher.fnmatch_lines(["*Starting processing data*"]) 12 | res = cs.cont.exec_run('sh -c "echo $CI_TESTING"') 13 | assert res.exit_code == 0 14 | assert res.output.decode().strip() == "true" 15 | -------------------------------------------------------------------------------- /docker/test/tests/test_version.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytestmark = pytest.mark.docker 4 | 5 | 6 | def test_version_docker_platform(crowdsec, flavor: str) -> None: 7 | with crowdsec(flavor=flavor) as cs: 8 | for waiter in cs.log_waiters(): 9 | with waiter as matcher: 10 | matcher.fnmatch_lines(["*Starting processing data*"]) 11 | res = cs.cont.exec_run("cscli version") 12 | assert res.exit_code == 0 13 | assert "Platform: docker" in res.output.decode() 14 | res = cs.cont.exec_run("crowdsec -version") 15 | assert res.exit_code == 0 16 | assert "Platform: docker" in res.output.decode() 17 | -------------------------------------------------------------------------------- /make_chocolatey.ps1: -------------------------------------------------------------------------------- 1 | param ( 2 | $version 3 | ) 4 | if ($version.StartsWith("v")) 5 | { 6 | $version = $version.Substring(1) 7 | } 8 | 9 | #Pre-releases will be like 1.4.0-rc1, remove everything after the dash as it does not conform to the MSI versioning scheme 10 | if ($version.Contains("-")) 11 | { 12 | $version = $version.Substring(0, $version.IndexOf("-")) 13 | } 14 | 15 | Set-Location .\windows\Chocolatey\crowdsec 16 | Copy-Item ..\..\..\crowdsec_$version.msi tools\crowdsec.msi 17 | 18 | choco pack --version $version 19 | 20 | Copy-Item crowdsec.$version.nupkg ..\..\..\ -------------------------------------------------------------------------------- /mk/__gmsl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/mk/__gmsl -------------------------------------------------------------------------------- /mk/help.mk: -------------------------------------------------------------------------------- 1 | .PHONY: help 2 | help: 3 | @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) \ 4 | | sed -n 's/^.*:\(.*\): \(.*\)##\(.*\)/\1:\3/p' \ 5 | | awk 'BEGIN {FS = ":"; printf "\033[33m"} {printf "%-20s \033[32m %s\033[0m\n", $$1, $$2}' 6 | -------------------------------------------------------------------------------- /mk/platform.mk: -------------------------------------------------------------------------------- 1 | 2 | BUILD_CODENAME ?= alphaga 3 | GOARCH ?= $(shell go env GOARCH) 4 | BUILD_TAG ?= $(shell git rev-parse --short HEAD) 5 | 6 | ifeq ($(OS), Windows_NT) 7 | SHELL := pwsh.exe 8 | .SHELLFLAGS := -NoProfile -Command 9 | SYSTEM = windows 10 | EXT = .exe 11 | else 12 | SYSTEM ?= $(shell uname -s | tr '[A-Z]' '[a-z]') 13 | include mk/platform/unix_common.mk 14 | endif 15 | 16 | ifneq ("$(wildcard mk/platform/$(SYSTEM).mk)", "") 17 | include mk/platform/$(SYSTEM).mk 18 | else 19 | include mk/platform/linux.mk 20 | endif 21 | -------------------------------------------------------------------------------- /mk/platform/freebsd.mk: -------------------------------------------------------------------------------- 1 | # FreeBSD specific 2 | 3 | MAKE=gmake 4 | -------------------------------------------------------------------------------- /mk/platform/linux.mk: -------------------------------------------------------------------------------- 1 | # Linux specific 2 | 3 | MAKE=make 4 | -------------------------------------------------------------------------------- /mk/platform/openbsd.mk: -------------------------------------------------------------------------------- 1 | # OpenBSD specific 2 | 3 | MAKE=gmake 4 | -------------------------------------------------------------------------------- /mk/platform/unix_common.mk: -------------------------------------------------------------------------------- 1 | 2 | RM=rm -rf 3 | CP=cp 4 | CPR=cp -r 5 | MKDIR=mkdir -p 6 | 7 | # Go should not be required to run functional tests 8 | GOOS ?= $(shell go env GOOS) 9 | 10 | # Current versioning information from env 11 | # The $(or) is used to ignore an empty BUILD_VERSION when it's an envvar, 12 | # like inside a docker build: docker build --build-arg BUILD_VERSION=1.2.3 13 | # as opposed to a make parameter: make BUILD_VERSION=1.2.3 14 | BUILD_VERSION:=$(or $(BUILD_VERSION),$(shell git describe --tags --dirty)) 15 | 16 | BUILD_TIMESTAMP=$(shell date +%F"_"%T) 17 | DEFAULT_CONFIGDIR?=/etc/crowdsec 18 | DEFAULT_DATADIR?=/var/lib/crowdsec/data 19 | 20 | PKG_CONFIG:=$(shell command -v pkg-config 2>/dev/null) 21 | 22 | # See if we have libre2-dev installed for C++ optimizations. 23 | # In fedora and other distros, we need to tell where to find re2.pc 24 | RE2_CHECK := $(shell PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:$(PKG_CONFIG_PATH) pkg-config --libs re2 2>/dev/null) 25 | -------------------------------------------------------------------------------- /mk/platform/windows.mk: -------------------------------------------------------------------------------- 1 | # Windows specific 2 | 3 | MAKE=make 4 | GOOS=windows 5 | PREFIX=$(shell $$env:TEMP) 6 | 7 | # Current versioning information from env 8 | # BUILD_VERSION?=$(shell (Invoke-WebRequest -UseBasicParsing -Uri https://api.github.com/repos/crowdsecurity/crowdsec/releases/latest).Content | jq -r '.tag_name') 9 | # hardcode it till I find a workaround 10 | BUILD_VERSION?=$(shell git describe --tags $$(git rev-list --tags --max-count=1)) 11 | BUILD_TIMESTAMP?=$(shell Get-Date -Format "yyyy-MM-dd_HH:mm:ss") 12 | DEFAULT_CONFIGDIR?=C:\\ProgramData\\CrowdSec\\config 13 | DEFAULT_DATADIR?=C:\\ProgramData\\CrowdSec\\data 14 | 15 | #please tell me there is a better way to completly ignore errors when trying to delete a file.... 16 | RM=Remove-Item -ErrorAction Ignore -Recurse 17 | CP=Copy-Item 18 | CPR=Copy-Item -Recurse 19 | MKDIR=New-Item -ItemType directory 20 | WIN_IGNORE_ERR=; exit 0 21 | 22 | PKG_CONFIG:=$(shell Get-Command pkg-config -ErrorAction SilentlyContinue) 23 | -------------------------------------------------------------------------------- /pkg/acquisition/appsec.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_appsec 2 | 3 | package acquisition 4 | 5 | import ( 6 | appsecacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/appsec" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("appsec", func() DataSource { return &appsecacquisition.AppsecSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/cloudwatch.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_cloudwatch 2 | 3 | package acquisition 4 | 5 | import ( 6 | cloudwatchacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/cloudwatch" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("cloudwatch", func() DataSource { return &cloudwatchacquisition.CloudwatchSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/docker.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_docker 2 | 3 | package acquisition 4 | 5 | import ( 6 | dockeracquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/docker" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("docker", func() DataSource { return &dockeracquisition.DockerSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/file.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_file 2 | 3 | package acquisition 4 | 5 | import ( 6 | fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("file", func() DataSource { return &fileacquisition.FileSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/http.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_http 2 | 3 | package acquisition 4 | 5 | import ( 6 | httpacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/http" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("http", func() DataSource { return &httpacquisition.HTTPSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/journalctl.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_journalctl 2 | 3 | package acquisition 4 | 5 | import ( 6 | journalctlacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/journalctl" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("journalctl", func() DataSource { return &journalctlacquisition.JournalCtlSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/k8s.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_k8saudit 2 | 3 | package acquisition 4 | 5 | import ( 6 | k8sauditacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/kubernetesaudit" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("k8s-audit", func() DataSource { return &k8sauditacquisition.KubernetesAuditSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/kafka.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_kafka 2 | 3 | package acquisition 4 | 5 | import ( 6 | kafkaacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/kafka" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("kafka", func() DataSource { return &kafkaacquisition.KafkaSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/kinesis.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_kinesis 2 | 3 | package acquisition 4 | 5 | import ( 6 | kinesisacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/kinesis" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("kinesis", func() DataSource { return &kinesisacquisition.KinesisSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/loki.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_loki 2 | 3 | package acquisition 4 | 5 | import ( 6 | "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/loki" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("loki", func() DataSource { return &loki.LokiSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/docker/utils.go: -------------------------------------------------------------------------------- 1 | package dockeracquisition 2 | 3 | import ( 4 | "strings" 5 | ) 6 | 7 | func parseLabels(labels map[string]string) map[string]interface{} { 8 | result := make(map[string]interface{}) 9 | for key, value := range labels { 10 | parseKeyToMap(result, key, value) 11 | } 12 | return result 13 | } 14 | 15 | func parseKeyToMap(m map[string]interface{}, key string, value string) { 16 | if !strings.HasPrefix(key, "crowdsec") { 17 | return 18 | } 19 | parts := strings.Split(key, ".") 20 | 21 | if len(parts) < 2 || parts[0] != "crowdsec" { 22 | return 23 | } 24 | 25 | for i := range parts { 26 | if parts[i] == "" { 27 | return 28 | } 29 | } 30 | 31 | for i := 1; i < len(parts)-1; i++ { 32 | if _, ok := m[parts[i]]; !ok { 33 | m[parts[i]] = make(map[string]interface{}) 34 | } 35 | m = m[parts[i]].(map[string]interface{}) 36 | } 37 | m[parts[len(parts)-1]] = value 38 | } 39 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/file/tailline.go: -------------------------------------------------------------------------------- 1 | //go:build !windows 2 | 3 | package fileacquisition 4 | 5 | func trimLine(text string) string { 6 | return text 7 | } 8 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/file/tailline_windows.go: -------------------------------------------------------------------------------- 1 | //go:build windows 2 | 3 | package fileacquisition 4 | 5 | import "strings" 6 | 7 | func trimLine(text string) string { 8 | return strings.TrimRight(text, "\r") 9 | } 10 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/file/testdata/bad.gz: -------------------------------------------------------------------------------- 1 | 42 2 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/file/testdata/test.log: -------------------------------------------------------------------------------- 1 | 1 2 | 2 3 | 3 4 | 4 5 | 5 6 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/file/testdata/test.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/pkg/acquisition/modules/file/testdata/test.log.gz -------------------------------------------------------------------------------- /pkg/acquisition/modules/loki/timestamp.go: -------------------------------------------------------------------------------- 1 | package loki 2 | 3 | import ( 4 | "fmt" 5 | "time" 6 | ) 7 | 8 | type timestamp time.Time 9 | 10 | func (t *timestamp) UnmarshalYAML(unmarshal func(interface{}) error) error { 11 | var tt time.Time 12 | err := unmarshal(&tt) 13 | if err == nil { 14 | *t = timestamp(tt) 15 | return nil 16 | } 17 | var d time.Duration 18 | err = unmarshal(&d) 19 | if err == nil { 20 | *t = timestamp(time.Now().Add(-d)) 21 | fmt.Println("t", time.Time(*t).Format(time.RFC3339)) 22 | return nil 23 | } 24 | return err 25 | } 26 | 27 | func (t *timestamp) IsZero() bool { 28 | return time.Time(*t).IsZero() 29 | } 30 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/victorialogs/internal/vlclient/types.go: -------------------------------------------------------------------------------- 1 | package vlclient 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | // Log represents a VictoriaLogs log line 8 | // See: https://docs.victoriametrics.com/victorialogs/querying/#querying-logs 9 | type Log struct { 10 | Message string `json:"_msg"` 11 | Time time.Time `json:"_time"` 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/modules/wineventlog/testdata/Setup.evtx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/pkg/acquisition/modules/wineventlog/testdata/Setup.evtx -------------------------------------------------------------------------------- /pkg/acquisition/s3.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_s3 2 | 3 | package acquisition 4 | 5 | import ( 6 | s3acquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/s3" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("s3", func() DataSource { return &s3acquisition.S3Source{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/syslog.go: -------------------------------------------------------------------------------- 1 | //go:build !no_datasource_syslog 2 | 3 | package acquisition 4 | 5 | import ( 6 | syslogacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/syslog" 7 | ) 8 | 9 | //nolint:gochecknoinits 10 | func init() { 11 | registerDataSource("syslog", func() DataSource { return &syslogacquisition.SyslogSource{} }) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/acquisition/testdata/backward_compat.yaml: -------------------------------------------------------------------------------- 1 | filename: /tmp/test.log 2 | labels: 3 | type: syslog 4 | --- 5 | filenames: 6 | - /tmp/test*.log 7 | labels: 8 | type: syslog 9 | --- 10 | # to be uncommented when we reimplement back journalctl 11 | # journalctl_filter: 12 | # - "_SYSTEMD_UNIT=ssh.service" 13 | # labels: 14 | # type: syslog 15 | --- 16 | -------------------------------------------------------------------------------- /pkg/acquisition/testdata/bad_filetype.yaml: -------------------------------------------------------------------------------- 1 | type: file 2 | filenames: /tmp/tltlt.log #it should be an array 3 | labels: 4 | type: syslog 5 | -------------------------------------------------------------------------------- /pkg/acquisition/testdata/bad_source.yaml: -------------------------------------------------------------------------------- 1 | source: does_not_exist 2 | labels: 3 | type: syslog 4 | foobar: toto 5 | -------------------------------------------------------------------------------- /pkg/acquisition/testdata/badyaml.yaml: -------------------------------------------------------------------------------- 1 | 0 && len(socket) > limit { 20 | return fmt.Errorf("%w (path length exceeds system limit: %d > %d)", err, len(socket), limit) 21 | } 22 | return err 23 | } 24 | -------------------------------------------------------------------------------- /pkg/csplugin/helpers.go: -------------------------------------------------------------------------------- 1 | package csplugin 2 | 3 | import ( 4 | "html" 5 | "os" 6 | "text/template" 7 | 8 | log "github.com/sirupsen/logrus" 9 | 10 | "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" 11 | "github.com/crowdsecurity/crowdsec/pkg/models" 12 | ) 13 | 14 | var helpers = template.FuncMap{ 15 | "GetMeta": func(a *models.Alert, metaName string) []string { 16 | var metaValues []string 17 | for _, evt := range a.Events { 18 | for _, meta := range evt.Meta { 19 | if meta.Key == metaName { 20 | metaValues = append(metaValues, meta.Value) 21 | } 22 | } 23 | } 24 | return metaValues 25 | }, 26 | "CrowdsecCTI": func(x string) any { 27 | ret, err := exprhelpers.CrowdsecCTI(x) 28 | if err != nil { 29 | log.Warningf("error while calling CrowdsecCTI : %s", err) 30 | } 31 | return ret 32 | }, 33 | "Hostname": os.Hostname, 34 | "HTMLEscape": html.EscapeString, 35 | } 36 | 37 | func funcMap() template.FuncMap { 38 | return helpers 39 | } 40 | -------------------------------------------------------------------------------- /pkg/csplugin/listfiles.go: -------------------------------------------------------------------------------- 1 | package csplugin 2 | 3 | import ( 4 | "os" 5 | "path/filepath" 6 | ) 7 | 8 | // helper which gives paths to all files in the given directory non-recursively 9 | func listFilesAtPath(path string) ([]string, error) { 10 | filePaths := make([]string, 0) 11 | files, err := os.ReadDir(path) 12 | if err != nil { 13 | return nil, err 14 | } 15 | for _, file := range files { 16 | if !file.IsDir() { 17 | filePaths = append(filePaths, filepath.Join(path, file.Name())) 18 | } 19 | } 20 | return filePaths, nil 21 | } 22 | -------------------------------------------------------------------------------- /pkg/csplugin/testdata/dummy.yaml: -------------------------------------------------------------------------------- 1 | type: dummy # Don't change 2 | name: dummy_default # Must match the registered plugin in the profile 3 | 4 | # One of "trace", "debug", "info", "warn", "error", "off" 5 | log_level: info 6 | 7 | # group_wait: # Time to wait collecting alerts before relaying a message to this plugin, eg "30s" 8 | # group_threshold: # Amount of alerts that triggers a message before has expired, eg "10" 9 | # max_retry: # Number of attempts to relay messages to plugins in case of error 10 | # timeout: # Time to wait for response from the plugin before considering the attempt a failure, eg "10s" 11 | 12 | #------------------------- 13 | # plugin-specific options 14 | 15 | # The following template receives a list of models.Alert objects 16 | # The output goes in the logs and to a text file, if defined 17 | format: | 18 | {{.|toJson}} 19 | 20 | # 21 | output_file: ${OUTFILE} 22 | 23 | -------------------------------------------------------------------------------- /pkg/csplugin/utils_js.go: -------------------------------------------------------------------------------- 1 | package csplugin 2 | 3 | import "os/exec" 4 | 5 | //All functions are empty, just to make the code compile when targeting js/wasm 6 | 7 | func (pb *PluginBroker) CreateCmd(binaryPath string) (*exec.Cmd, error) { 8 | return nil, nil 9 | } 10 | 11 | func getPluginTypeAndSubtypeFromPath(path string) (string, string, error) { 12 | return "", "", nil 13 | } 14 | 15 | func pluginIsValid(path string) error { 16 | return nil 17 | } 18 | -------------------------------------------------------------------------------- /pkg/cticlient/pagination.go: -------------------------------------------------------------------------------- 1 | package cticlient 2 | 3 | type FirePaginator struct { 4 | client *CrowdsecCTIClient 5 | params FireParams 6 | currentPage int 7 | done bool 8 | } 9 | 10 | func (p *FirePaginator) Next() ([]FireItem, error) { 11 | if p.done { 12 | return nil, nil 13 | } 14 | p.params.Page = &p.currentPage 15 | resp, err := p.client.Fire(p.params) 16 | if err != nil { 17 | return nil, err 18 | } 19 | p.currentPage++ 20 | if resp.Links.Next == nil { 21 | p.done = true 22 | } 23 | return resp.Items, nil 24 | } 25 | 26 | func NewFirePaginator(client *CrowdsecCTIClient, params FireParams) *FirePaginator { 27 | startPage := 1 28 | if params.Page != nil { 29 | startPage = *params.Page 30 | } 31 | return &FirePaginator{ 32 | client: client, 33 | params: params, 34 | currentPage: startPage, 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /pkg/cwhub/pathseparator_unix.go: -------------------------------------------------------------------------------- 1 | //go:build unix 2 | 3 | package cwhub 4 | 5 | import "strings" 6 | 7 | func hasPathSuffix(hubpath string, remotePath string) bool { 8 | return strings.HasSuffix(hubpath, remotePath) 9 | } 10 | -------------------------------------------------------------------------------- /pkg/cwhub/pathseparator_windows.go: -------------------------------------------------------------------------------- 1 | package cwhub 2 | 3 | import ( 4 | "path/filepath" 5 | "strings" 6 | ) 7 | 8 | func hasPathSuffix(hubpath string, remotePath string) bool { 9 | newPath := filepath.ToSlash(hubpath) 10 | return strings.HasSuffix(newPath, remotePath) 11 | } 12 | -------------------------------------------------------------------------------- /pkg/cwhub/relativepath.go: -------------------------------------------------------------------------------- 1 | package cwhub 2 | 3 | import ( 4 | "path/filepath" 5 | "strings" 6 | ) 7 | 8 | // relativePathComponents returns the list of path components after baseDir. 9 | // If path is not inside baseDir, it returns an empty slice. 10 | func relativePathComponents(path string, baseDir string) []string { 11 | absPath, err := filepath.Abs(path) 12 | if err != nil { 13 | return []string{} 14 | } 15 | 16 | absBaseDir, err := filepath.Abs(baseDir) 17 | if err != nil { 18 | return []string{} 19 | } 20 | 21 | // is path inside baseDir? 22 | relPath, err := filepath.Rel(absBaseDir, absPath) 23 | if err != nil || strings.HasPrefix(relPath, "..") || relPath == "." { 24 | return []string{} 25 | } 26 | 27 | return strings.Split(relPath, string(filepath.Separator)) 28 | } 29 | -------------------------------------------------------------------------------- /pkg/cwhub/testdata/collection_v1.yaml: -------------------------------------------------------------------------------- 1 | scenarios: 2 | - crowdsecurity/foobar_scenario -------------------------------------------------------------------------------- /pkg/cwhub/testdata/collection_v2.yaml: -------------------------------------------------------------------------------- 1 | scenarios: 2 | - crowdsecurity/foobar_scenario 3 | - crowdsecurity/barfoo_scenario -------------------------------------------------------------------------------- /pkg/cwhub/testdata/foobar_parser.yaml: -------------------------------------------------------------------------------- 1 | onsuccess: next_stage 2 | filter: evt.Parsed.program == 'foobar_parser' 3 | name: crowdsecurity/foobar_parser 4 | #debug: true 5 | description: A parser for foobar_parser WAF 6 | grok: 7 | name: foobar_parser 8 | apply_on: message -------------------------------------------------------------------------------- /pkg/cwversion/constraint/constraint.go: -------------------------------------------------------------------------------- 1 | package constraint 2 | 3 | import ( 4 | "fmt" 5 | 6 | goversion "github.com/hashicorp/go-version" 7 | ) 8 | 9 | const ( 10 | Parser = ">= 1.0, <= 3.0" 11 | Scenario = ">= 1.0, <= 3.0" 12 | API = "v1" 13 | Acquis = ">= 1.0, < 2.0" 14 | ) 15 | 16 | func Satisfies(strvers string, constraint string) (bool, error) { 17 | vers, err := goversion.NewVersion(strvers) 18 | if err != nil { 19 | return false, fmt.Errorf("failed to parse '%s': %w", strvers, err) 20 | } 21 | 22 | constraints, err := goversion.NewConstraint(constraint) 23 | if err != nil { 24 | return false, fmt.Errorf("failed to parse constraint '%s'", constraint) 25 | } 26 | 27 | if !constraints.Check(vers) { 28 | return false, nil 29 | } 30 | 31 | return true, nil 32 | } 33 | -------------------------------------------------------------------------------- /pkg/database/ent/generate.go: -------------------------------------------------------------------------------- 1 | package ent 2 | 3 | //go:generate go run -mod=mod entgo.io/ent/cmd/ent@v0.14.2 generate ./schema 4 | 5 | -------------------------------------------------------------------------------- /pkg/database/ent/helpers.go: -------------------------------------------------------------------------------- 1 | package ent 2 | 3 | func (m *Machine) GetOsname() string { 4 | return m.Osname 5 | } 6 | 7 | func (b *Bouncer) GetOsname() string { 8 | return b.Osname 9 | } 10 | 11 | func (m *Machine) GetOsversion() string { 12 | return m.Osversion 13 | } 14 | 15 | func (b *Bouncer) GetOsversion() string { 16 | return b.Osversion 17 | } 18 | 19 | func (m *Machine) GetFeatureflags() string { 20 | return m.Featureflags 21 | } 22 | 23 | func (b *Bouncer) GetFeatureflags() string { 24 | return b.Featureflags 25 | } 26 | -------------------------------------------------------------------------------- /pkg/database/ent/runtime/runtime.go: -------------------------------------------------------------------------------- 1 | // Code generated by ent, DO NOT EDIT. 2 | 3 | package runtime 4 | 5 | // The schema-stitching logic is generated in github.com/crowdsecurity/crowdsec/pkg/database/ent/runtime.go 6 | 7 | const ( 8 | Version = "v0.14.2" // Version of ent codegen. 9 | Sum = "h1:ywld/j2Rx4EmnIKs8eZ29cbFA1zpB+DA9TLL5l3rlq0=" // Sum of ent codegen. 10 | ) 11 | -------------------------------------------------------------------------------- /pkg/database/ent/schema/lock.go: -------------------------------------------------------------------------------- 1 | package schema 2 | 3 | import ( 4 | "entgo.io/ent" 5 | "entgo.io/ent/schema/field" 6 | 7 | "github.com/crowdsecurity/crowdsec/pkg/types" 8 | ) 9 | 10 | type Lock struct { 11 | ent.Schema 12 | } 13 | 14 | func (Lock) Fields() []ent.Field { 15 | return []ent.Field{ 16 | field.String("name").Unique().Immutable().StructTag(`json:"name"`), 17 | field.Time("created_at").Default(types.UtcNow).StructTag(`json:"created_at"`).Immutable(), 18 | } 19 | } 20 | 21 | func (Lock) Edges() []ent.Edge { 22 | return nil 23 | } 24 | -------------------------------------------------------------------------------- /pkg/database/errors.go: -------------------------------------------------------------------------------- 1 | package database 2 | 3 | import "errors" 4 | 5 | var ( 6 | UserExists = errors.New("user already exist") 7 | UserNotExists = errors.New("user doesn't exist") 8 | HashError = errors.New("unable to hash") 9 | InsertFail = errors.New("unable to insert row") 10 | QueryFail = errors.New("unable to query") 11 | UpdateFail = errors.New("unable to update") 12 | DeleteFail = errors.New("unable to delete") 13 | ItemNotFound = errors.New("object not found") 14 | ParseTimeFail = errors.New("unable to parse time") 15 | ParseDurationFail = errors.New("unable to parse duration") 16 | MarshalFail = errors.New("unable to serialize") 17 | BulkError = errors.New("unable to insert bulk") 18 | ParseType = errors.New("unable to parse type") 19 | InvalidIPOrRange = errors.New("invalid ip address / range") 20 | InvalidFilter = errors.New("invalid filter") 21 | ) 22 | -------------------------------------------------------------------------------- /pkg/database/file_utils.go: -------------------------------------------------------------------------------- 1 | //go:build !windows 2 | 3 | package database 4 | 5 | import ( 6 | "io/fs" 7 | "os" 8 | ) 9 | 10 | func setFilePerm(path string, mode fs.FileMode) error { 11 | return os.Chmod(path, mode) 12 | } 13 | -------------------------------------------------------------------------------- /pkg/dumps/bucket_dump.go: -------------------------------------------------------------------------------- 1 | package dumps 2 | 3 | import ( 4 | "io" 5 | "os" 6 | 7 | "gopkg.in/yaml.v3" 8 | 9 | "github.com/crowdsecurity/crowdsec/pkg/types" 10 | ) 11 | 12 | type BucketPourInfo map[string][]types.Event 13 | 14 | func LoadBucketPourDump(filepath string) (*BucketPourInfo, error) { 15 | dumpData, err := os.Open(filepath) 16 | if err != nil { 17 | return nil, err 18 | } 19 | defer dumpData.Close() 20 | 21 | results, err := io.ReadAll(dumpData) 22 | if err != nil { 23 | return nil, err 24 | } 25 | 26 | var bucketDump BucketPourInfo 27 | 28 | if err := yaml.Unmarshal(results, &bucketDump); err != nil { 29 | return nil, err 30 | } 31 | 32 | return &bucketDump, nil 33 | } 34 | -------------------------------------------------------------------------------- /pkg/emoji/emoji.go: -------------------------------------------------------------------------------- 1 | package emoji 2 | 3 | const ( 4 | CheckMarkButton = "\u2705" // ✅ 5 | CheckMark = "\u2714\ufe0f" // ✔️ 6 | CrossMark = "\u274c" // ❌ 7 | GreenCircle = "\U0001f7e2" // 🟢 8 | House = "\U0001f3e0" // 🏠 9 | Package = "\U0001f4e6" // 📦 10 | Prohibited = "\U0001f6ab" // 🚫 11 | QuestionMark = "\u2753" // ❓ 12 | RedCircle = "\U0001f534" // 🔴 13 | Warning = "\u26a0\ufe0f" // ⚠️ 14 | InboxTray = "\U0001f4e5" // 📥 15 | DownArrow = "\u2b07" // ⬇️ 16 | Wastebasket = "\U0001f5d1" // 🗑 17 | Sync = "\U0001F504" // 🔄 official name is Anticlockwise Downwards and Upwards Open Circle Arrows and I'm not even joking 18 | ) 19 | -------------------------------------------------------------------------------- /pkg/exprhelpers/debuggerstub_test.go: -------------------------------------------------------------------------------- 1 | //go:build !expr_debug 2 | package exprhelpers 3 | 4 | import ( 5 | "testing" 6 | ) 7 | 8 | func TestFailWithoutExprDebug(t *testing.T) { 9 | t.Fatal("To test pkg/exprhelpers, you need the expr_debug build tag") 10 | } 11 | -------------------------------------------------------------------------------- /pkg/exprhelpers/libinjection.go: -------------------------------------------------------------------------------- 1 | package exprhelpers 2 | 3 | import "github.com/corazawaf/libinjection-go" 4 | 5 | func LibInjectionIsSQLI(params ...any) (any, error) { 6 | str := params[0].(string) 7 | 8 | ret, _ := libinjection.IsSQLi(str) 9 | return ret, nil 10 | } 11 | 12 | func LibInjectionIsXSS(params ...any) (any, error) { 13 | str := params[0].(string) 14 | 15 | ret := libinjection.IsXSS(str) 16 | return ret, nil 17 | } 18 | -------------------------------------------------------------------------------- /pkg/exprhelpers/tests/test_data.txt: -------------------------------------------------------------------------------- 1 | Crowdsec 2 | Crowdsecurity 3 | CrowdSec -------------------------------------------------------------------------------- /pkg/exprhelpers/tests/test_data_no_type.txt: -------------------------------------------------------------------------------- 1 | Crowdsec 2 | Crowdsecurity 3 | CrowdSec -------------------------------------------------------------------------------- /pkg/exprhelpers/tests/test_data_re.txt: -------------------------------------------------------------------------------- 1 | .*Crowdsec.* 2 | .*Crowd[sS]ec.* -------------------------------------------------------------------------------- /pkg/exprhelpers/tests/test_empty_line.txt: -------------------------------------------------------------------------------- 1 | foo 2 | 3 | #toto 4 | 5 | 6 | bar 7 | 8 | 9 | 10 | 11 | 12 | baz 13 | -------------------------------------------------------------------------------- /pkg/hubtest/helpers.go: -------------------------------------------------------------------------------- 1 | package hubtest 2 | 3 | import ( 4 | "path/filepath" 5 | ) 6 | 7 | func basename(params ...any) (any, error) { 8 | s := params[0].(string) 9 | return filepath.Base(s), nil 10 | } 11 | -------------------------------------------------------------------------------- /pkg/hubtest/regexp.go: -------------------------------------------------------------------------------- 1 | package hubtest 2 | 3 | import ( 4 | "regexp" 5 | ) 6 | 7 | var ( 8 | variableRE = regexp.MustCompile(`(?P[^ =]+) == .*`) 9 | parserResultRE = regexp.MustCompile(`^results\["[^"]+"\]\["(?P[^"]+)"\]\[[0-9]+\]\.Evt\..*`) 10 | scenarioResultRE = regexp.MustCompile(`^results\[[0-9]+\].Overflow.Alert.GetScenario\(\) == "(?P[^"]+)"`) 11 | ) 12 | -------------------------------------------------------------------------------- /pkg/hubtest/utils_test.go: -------------------------------------------------------------------------------- 1 | package hubtest 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/require" 7 | ) 8 | 9 | func TestCheckPathNotContained(t *testing.T) { 10 | require.NoError(t, checkPathNotContained("/foo", "/bar")) 11 | require.NoError(t, checkPathNotContained("/foo/bar", "/foo")) 12 | require.NoError(t, checkPathNotContained("/foo/bar", "/")) 13 | require.NoError(t, checkPathNotContained("/path/to/somewhere", "/path/to/somewhere-else")) 14 | require.NoError(t, checkPathNotContained("~/.local/path/to/somewhere", "~/.local/path/to/somewhere-else")) 15 | require.Error(t, checkPathNotContained("/foo", "/foo/bar")) 16 | require.Error(t, checkPathNotContained("/", "/foo")) 17 | require.Error(t, checkPathNotContained("/", "/foo/bar/baz")) 18 | } 19 | -------------------------------------------------------------------------------- /pkg/leakybucket/buckets.go: -------------------------------------------------------------------------------- 1 | package leakybucket 2 | 3 | import ( 4 | "crypto/sha1" 5 | "fmt" 6 | "sync" 7 | ) 8 | 9 | // Buckets is the struct used to hold buckets in the context of 10 | // main.go the idea is to have one struct to rule them all 11 | type Buckets struct { 12 | wgDumpState *sync.WaitGroup 13 | wgPour *sync.WaitGroup 14 | Bucket_map *sync.Map 15 | } 16 | 17 | // NewBuckets create the Buckets struct 18 | func NewBuckets() *Buckets { 19 | return &Buckets{ 20 | wgDumpState: &sync.WaitGroup{}, 21 | wgPour: &sync.WaitGroup{}, 22 | Bucket_map: &sync.Map{}, 23 | } 24 | } 25 | 26 | func GetKey(bucketCfg BucketFactory, stackkey string) string { 27 | return fmt.Sprintf("%x", sha1.Sum([]byte(bucketCfg.Filter+stackkey+bucketCfg.Name))) 28 | } 29 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/conditional-bucket/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: conditional 2 | name: test/conditional 3 | #debug: true 4 | description: "conditional bucket" 5 | filter: "evt.Meta.log_type == 'http_access-log'" 6 | groupby: evt.Meta.source_ip 7 | condition: any(queue.Queue, {.Meta.http_path == "/"}) and any(queue.Queue, {.Meta.http_path == "/foo"}) 8 | leakspeed: 1s 9 | capacity: -1 10 | labels: 11 | type: overflow_1 -------------------------------------------------------------------------------- /pkg/leakybucket/tests/conditional-bucket/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml -------------------------------------------------------------------------------- /pkg/leakybucket/tests/guillotine-bayesian-bucket/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: bayesian 2 | name: test/guillotine-bayesian 3 | debug: true 4 | description: "bayesian bucket" 5 | filter: "evt.Meta.log_type == 'http_access-log' || evt.Meta.log_type == 'ssh_access-log'" 6 | groupby: evt.Meta.source_ip 7 | bayesian_prior: 0.5 8 | bayesian_threshold: 0.8 9 | bayesian_conditions: 10 | - condition: evt.Meta.http_path == "/" 11 | prob_given_evil: 0.8 12 | prob_given_benign: 0.2 13 | guillotine : true 14 | - condition: evt.Meta.ssh_user == "admin" 15 | prob_given_evil: 0.9 16 | prob_given_benign: 0.5 17 | guillotine : true 18 | leakspeed: 30s 19 | capacity: -1 20 | labels: 21 | type: overflow_1 -------------------------------------------------------------------------------- /pkg/leakybucket/tests/guillotine-bayesian-bucket/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml -------------------------------------------------------------------------------- /pkg/leakybucket/tests/hub/index.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/leaky-fixedqueue/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: leaky 2 | debug: true 3 | name: test/simple-leaky 4 | description: "Simple leaky" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | leakspeed: "10s" 7 | capacity: 5 8 | cache_size: 3 9 | groupby: evt.Meta.source_ip 10 | labels: 11 | type: overflow_1 12 | 13 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/leaky-fixedqueue/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/leaky-scope-range-expression/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: leaky 2 | debug: true 3 | name: test/leaky-scope-range-expression 4 | description: "Leaky with scope range-expression" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | leakspeed: "10s" 7 | capacity: 1 8 | groupby: evt.Meta.source_ip 9 | labels: 10 | type: overflow_1 11 | scope: 12 | type: Range 13 | expression: IpToRange(evt.Meta.source_ip, "/16") 14 | 15 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/leaky-scope-range-expression/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml -------------------------------------------------------------------------------- /pkg/leakybucket/tests/multiple-bayesian-bucket/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: bayesian 2 | name: test/multiple-bayesian 3 | debug: true 4 | description: "bayesian bucket" 5 | filter: "evt.Meta.log_type == 'http_access-log' || evt.Meta.log_type == 'ssh_access-log'" 6 | groupby: evt.Meta.source_ip 7 | bayesian_prior: 0.5 8 | bayesian_threshold: 0.8 9 | bayesian_conditions: 10 | - condition: evt.Meta.http_path == "/" 11 | prob_given_evil: 0.8 12 | prob_given_benign: 0.2 13 | guillotine : true 14 | - condition: evt.Meta.ssh_user == "admin" 15 | prob_given_evil: 0.9 16 | prob_given_benign: 0.5 17 | guillotine : true 18 | leakspeed: 30s 19 | capacity: -1 20 | labels: 21 | type: overflow_1 -------------------------------------------------------------------------------- /pkg/leakybucket/tests/multiple-bayesian-bucket/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml -------------------------------------------------------------------------------- /pkg/leakybucket/tests/overflow-with-meta-and-information/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | version: 1.0 3 | type: trigger 4 | debug: true 5 | name: test/simple-trigger 6 | description: "Simple trigger" 7 | filter: "evt.Line.Labels.type =='testlog'" 8 | labels: 9 | type: overflow_1 10 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/overflow-with-meta-and-information/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/overflow-with-meta/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: trigger 3 | debug: true 4 | name: test/simple-trigger 5 | description: "Simple trigger" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | labels: 8 | type: overflow_1 9 | 10 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/overflow-with-meta/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-bayesian-bucket/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: bayesian 2 | name: test/simple-bayesian 3 | debug: true 4 | description: "bayesian bucket" 5 | filter: "evt.Meta.log_type == 'http_access-log' || evt.Meta.log_type == 'ssh_access-log'" 6 | groupby: evt.Meta.source_ip 7 | bayesian_prior: 0.5 8 | bayesian_threshold: 0.8 9 | bayesian_conditions: 10 | - condition: any(queue.Queue, {.Meta.http_path == "/"}) 11 | prob_given_evil: 0.8 12 | prob_given_benign: 0.2 13 | - condition: any(queue.Queue, {.Meta.ssh_user == "admin"}) 14 | prob_given_evil: 0.9 15 | prob_given_benign: 0.5 16 | leakspeed: 30s 17 | capacity: -1 18 | labels: 19 | type: overflow_1 -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-bayesian-bucket/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-bh/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: counter 2 | name: test/simple-trigger 3 | description: "Simple leaky" 4 | filter: "evt.Line.Labels.type =='testlog'" 5 | duration: 1s 6 | overflow_filter: any(queue.Queue, {.Meta.source_ip != '1.2.3.4'} ) 7 | capacity: -1 8 | groupby: evt.Meta.source_ip 9 | labels: 10 | type: overflow_1 11 | 12 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-bh/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-bh/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "lines": [ 3 | { 4 | "Line": { 5 | "Labels": { 6 | "type": "testlog" 7 | }, 8 | "Raw": "xxheader VALUE1 trailing stuff" 9 | }, 10 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 11 | "Meta": { 12 | "source_ip": "1.2.3.4" 13 | } 14 | }, 15 | { 16 | "Line": { 17 | "Labels": { 18 | "type": "testlog" 19 | }, 20 | "Raw": "xxheader VALUE1 trailing stuff" 21 | }, 22 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 23 | "Meta": { 24 | "source_ip": "1.2.3.4" 25 | } 26 | } 27 | ], 28 | "results": [ 29 | { 30 | "Alert": { 31 | } 32 | } 33 | ] 34 | } 35 | 36 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-timeout/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: counter 2 | name: test/simple-trigger 3 | description: "Simple leaky" 4 | filter: "evt.Line.Labels.type =='testlog'" 5 | duration: 10s 6 | capacity: -1 7 | groupby: evt.Meta.source_ip 8 | labels: 9 | type: overflow_1 10 | 11 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-timeout/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter-timeout/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "lines": [ 3 | { 4 | "Line": { 5 | "Labels": { 6 | "type": "testlog" 7 | }, 8 | "Raw": "xxheader VALUE1 trailing stuff" 9 | }, 10 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 11 | "Meta": { 12 | "source_ip": "1.2.3.4" 13 | } 14 | }, 15 | { 16 | "Line": { 17 | "Labels": { 18 | "type": "testlog" 19 | }, 20 | "Raw": "xxheader VALUE1 trailing stuff" 21 | }, 22 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 23 | "Meta": { 24 | "source_ip": "1.2.3.4" 25 | } 26 | } 27 | ], 28 | "results": null 29 | } 30 | 31 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: counter 2 | name: test/simple-counter 3 | description: "Simple leaky" 4 | filter: "evt.Line.Labels.type =='testlog'" 5 | duration: 1s 6 | capacity: -1 7 | groupby: evt.Meta.source_ip 8 | labels: 9 | type: overflow_1 10 | 11 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-counter/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-blackhole/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/simple-leaky 5 | description: "Simple leaky" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "10s" 8 | capacity: 1 9 | blackhole: 1m 10 | groupby: evt.Meta.source_ip 11 | labels: 12 | type: overflow_1 13 | 14 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-blackhole/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-cancel_on/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: leaky 2 | debug: true 3 | name: test/simple-leaky-cancel 4 | description: "Simple leaky" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | cancel_on: evt.Parsed.random_value == '42' 7 | leakspeed: "10s" 8 | blackhole: 1m 9 | capacity: 1 10 | groupby: evt.Meta.source_ip 11 | labels: 12 | type: overflow_1 13 | 14 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-cancel_on/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-overflow/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: leaky 2 | debug: true 3 | name: test/simple-leaky 4 | description: "Simple leaky" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | leakspeed: "10s" 7 | capacity: 1 8 | groupby: evt.Meta.source_ip 9 | labels: 10 | type: overflow_1 11 | 12 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-overflow/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-ovflwfilter/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/filter-discard 5 | description: "ko" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "10s" 8 | capacity: 1 9 | overflow_filter: any(queue.Queue, { Atof(.Meta.specvalue) > 3}) 10 | #overflow_filter: Atof() 11 | groupby: evt.Meta.source_ip 12 | labels: 13 | type: overflow_1 14 | --- 15 | # ssh bruteforce 16 | type: leaky 17 | debug: true 18 | name: test/filter-ok 19 | description: "ok" 20 | filter: "evt.Line.Labels.type =='testlog'" 21 | leakspeed: "10s" 22 | capacity: 1 23 | overflow_filter: any(queue.Queue, { Atof(.Meta.specvalue) > 1}) 24 | #overflow_filter: Atof() 25 | groupby: evt.Meta.source_ip 26 | labels: 27 | type: overflow_2 28 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-ovflwfilter/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-underflow/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/simple-leaky 5 | description: "Simple leaky" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "0.5s" 8 | capacity: 2 9 | groupby: evt.Meta.source_ip 10 | labels: 11 | type: overflow_1 12 | 13 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-underflow/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-underflow/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "lines": [ 3 | { 4 | "Line": { 5 | "Labels": { 6 | "type": "testlog" 7 | }, 8 | "Raw": "xxheader VALUE1 trailing stuff" 9 | }, 10 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 11 | "Meta": { 12 | "source_ip": "1.2.3.4" 13 | } 14 | } 15 | ], 16 | "results": [ 17 | { 18 | "Alert": { 19 | } 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq-cachesize/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/simple-leaky 5 | description: "Simple leaky" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "20s" 8 | capacity: 3 9 | cache_size: 1 10 | distinct: evt.Meta.uniq_key 11 | groupby: evt.Meta.source_ip 12 | labels: 13 | type: overflow_1 14 | 15 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq-cachesize/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq-w-buckets_state/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/simple-leaky 5 | description: "Simple leaky" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "10s" 8 | capacity: 3 9 | distinct: evt.Meta.uniq_key 10 | groupby: evt.Meta.source_ip 11 | labels: 12 | type: overflow_1 13 | 14 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq-w-buckets_state/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq/bucket.yaml: -------------------------------------------------------------------------------- 1 | # ssh bruteforce 2 | type: leaky 3 | debug: true 4 | name: test/simple-leaky 5 | description: "Simple leaky" 6 | filter: "evt.Line.Labels.type =='testlog'" 7 | leakspeed: "10s" 8 | capacity: 1 9 | distinct: evt.Meta.uniq_key 10 | groupby: evt.Meta.source_ip 11 | labels: 12 | type: overflow_1 13 | 14 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-leaky-uniq/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-external-data/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: trigger 2 | debug: true 3 | name: test/simple-trigger 4 | data: 5 | - source_url: https://invalid.com/test.list 6 | dest_file: simple-trigger-external-data/simple_patterns.txt 7 | type: string 8 | description: "Simple trigger with external data" 9 | filter: "evt.Line.Labels.type =='testlog' && evt.Parsed.tainted_data in File('simple-trigger-external-data/simple_patterns.txt')" 10 | groupby: evt.Meta.source_ip 11 | labels: 12 | type: overflow_1 13 | 14 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-external-data/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-external-data/simple_patterns.txt: -------------------------------------------------------------------------------- 1 | BBBBBBBBBBB11111XXX 2 | AAAABBBBBBB11111XXX 3 | CCCCCCCCCC11111XXX 4 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-reprocess/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: trigger 2 | debug: true 3 | name: test/simple-trigger-reprocess 4 | description: "Simple leaky" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | groupby: evt.Meta.source_ip 7 | reprocess: true 8 | labels: 9 | type: overflow_1 10 | 11 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-reprocess/reprocess.yaml: -------------------------------------------------------------------------------- 1 | type: trigger 2 | debug: true 3 | name: test/simple-postoverflow-scenario 4 | description: "Simple post overflow" 5 | #filter: true 6 | filter: "evt.Overflow.Alert != nil && evt.Overflow.Alert.Scenario != nil" 7 | labels: 8 | type: overflow_2 9 | 10 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger-reprocess/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | - filename: {{.TestDirectory}}/reprocess.yaml 3 | 4 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger/bucket.yaml: -------------------------------------------------------------------------------- 1 | type: trigger 2 | debug: true 3 | name: test/simple-trigger 4 | description: "Simple leaky" 5 | filter: "evt.Line.Labels.type =='testlog'" 6 | groupby: evt.Meta.source_ip 7 | labels: 8 | type: overflow_1 9 | 10 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger/scenarios.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/bucket.yaml 2 | 3 | -------------------------------------------------------------------------------- /pkg/leakybucket/tests/simple-trigger/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "lines": [ 3 | { 4 | "Line": { 5 | "Labels": { 6 | "type": "testlog" 7 | }, 8 | "Raw": "xxheader VALUE1 trailing stuff" 9 | }, 10 | "MarshaledTime": "2020-01-01T10:00:00+00:00", 11 | "Meta": { 12 | "source_ip": "1.2.3.4" 13 | } 14 | } 15 | ], 16 | "results": [ 17 | { 18 | "Alert": { 19 | "sources": { 20 | "1.2.3.4": { 21 | "scope": "Ip", 22 | "value": "1.2.3.4", 23 | 24 | "ip": "1.2.3.4" 25 | } 26 | }, 27 | "Alert" : { 28 | "scenario": "test/simple-trigger", 29 | "events_count": 1 30 | } 31 | } 32 | } 33 | ] 34 | } 35 | 36 | -------------------------------------------------------------------------------- /pkg/models/add_alerts_response.go: -------------------------------------------------------------------------------- 1 | // Code generated by go-swagger; DO NOT EDIT. 2 | 3 | package models 4 | 5 | // This file was generated by the swagger tool. 6 | // Editing this file might prove futile when you re-run the swagger generate command 7 | 8 | import ( 9 | "context" 10 | 11 | "github.com/go-openapi/strfmt" 12 | ) 13 | 14 | // AddAlertsResponse AddAlertsResponse 15 | // 16 | // swagger:model AddAlertsResponse 17 | type AddAlertsResponse []string 18 | 19 | // Validate validates this add alerts response 20 | func (m AddAlertsResponse) Validate(formats strfmt.Registry) error { 21 | return nil 22 | } 23 | 24 | // ContextValidate validates this add alerts response based on context it is used 25 | func (m AddAlertsResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { 26 | return nil 27 | } 28 | -------------------------------------------------------------------------------- /pkg/models/console_options.go: -------------------------------------------------------------------------------- 1 | // Code generated by go-swagger; DO NOT EDIT. 2 | 3 | package models 4 | 5 | // This file was generated by the swagger tool. 6 | // Editing this file might prove futile when you re-run the swagger generate command 7 | 8 | import ( 9 | "context" 10 | 11 | "github.com/go-openapi/strfmt" 12 | ) 13 | 14 | // ConsoleOptions ConsoleOptions 15 | // 16 | // swagger:model ConsoleOptions 17 | type ConsoleOptions []string 18 | 19 | // Validate validates this console options 20 | func (m ConsoleOptions) Validate(formats strfmt.Registry) error { 21 | return nil 22 | } 23 | 24 | // ContextValidate validates this console options based on context it is used 25 | func (m ConsoleOptions) ContextValidate(ctx context.Context, formats strfmt.Registry) error { 26 | return nil 27 | } 28 | -------------------------------------------------------------------------------- /pkg/models/decisions_delete_request_item.go: -------------------------------------------------------------------------------- 1 | // Code generated by go-swagger; DO NOT EDIT. 2 | 3 | package models 4 | 5 | // This file was generated by the swagger tool. 6 | // Editing this file might prove futile when you re-run the swagger generate command 7 | 8 | import ( 9 | "context" 10 | 11 | "github.com/go-openapi/strfmt" 12 | ) 13 | 14 | // DecisionsDeleteRequestItem decisionsIDs 15 | // 16 | // swagger:model DecisionsDeleteRequestItem 17 | type DecisionsDeleteRequestItem string 18 | 19 | // Validate validates this decisions delete request item 20 | func (m DecisionsDeleteRequestItem) Validate(formats strfmt.Registry) error { 21 | return nil 22 | } 23 | 24 | // ContextValidate validates this decisions delete request item based on context it is used 25 | func (m DecisionsDeleteRequestItem) ContextValidate(ctx context.Context, formats strfmt.Registry) error { 26 | return nil 27 | } 28 | -------------------------------------------------------------------------------- /pkg/models/generate.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | //go:generate go run -mod=mod github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 generate model --spec=./localapi_swagger.yaml --target=../ 4 | 5 | -------------------------------------------------------------------------------- /pkg/modelscapi/decisions_delete_request_item.go: -------------------------------------------------------------------------------- 1 | // Code generated by go-swagger; DO NOT EDIT. 2 | 3 | package modelscapi 4 | 5 | // This file was generated by the swagger tool. 6 | // Editing this file might prove futile when you re-run the swagger generate command 7 | 8 | import ( 9 | "context" 10 | 11 | "github.com/go-openapi/strfmt" 12 | ) 13 | 14 | // DecisionsDeleteRequestItem decisionsIDs 15 | // 16 | // swagger:model DecisionsDeleteRequestItem 17 | type DecisionsDeleteRequestItem string 18 | 19 | // Validate validates this decisions delete request item 20 | func (m DecisionsDeleteRequestItem) Validate(formats strfmt.Registry) error { 21 | return nil 22 | } 23 | 24 | // ContextValidate validates this decisions delete request item based on context it is used 25 | func (m DecisionsDeleteRequestItem) ContextValidate(ctx context.Context, formats strfmt.Registry) error { 26 | return nil 27 | } 28 | -------------------------------------------------------------------------------- /pkg/modelscapi/generate.go: -------------------------------------------------------------------------------- 1 | package modelscapi 2 | 3 | //go:generate go run -mod=mod github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0 generate model --spec=./centralapi_swagger.yaml --target=../ --model-package=modelscapi 4 | 5 | -------------------------------------------------------------------------------- /pkg/parser/enrich_dns.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "net" 5 | 6 | log "github.com/sirupsen/logrus" 7 | 8 | "github.com/crowdsecurity/crowdsec/pkg/types" 9 | ) 10 | 11 | /* All plugins must export a list of function pointers for exported symbols */ 12 | //var ExportedFuncs = []string{"reverse_dns"} 13 | 14 | func reverse_dns(field string, p *types.Event, plog *log.Entry) (map[string]string, error) { 15 | ret := make(map[string]string) 16 | if field == "" { 17 | return nil, nil 18 | } 19 | rets, err := net.LookupAddr(field) 20 | if err != nil { 21 | plog.Debugf("failed to resolve '%s'", field) 22 | return nil, nil //nolint:nilerr 23 | } 24 | //When using the host C library resolver, at most one result will be returned. To bypass the host resolver, use a custom Resolver. 25 | ret["reverse_dns"] = rets[0] 26 | return ret, nil 27 | } 28 | -------------------------------------------------------------------------------- /pkg/parser/enrich_unmarshal.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "encoding/json" 5 | 6 | log "github.com/sirupsen/logrus" 7 | 8 | "github.com/crowdsecurity/crowdsec/pkg/types" 9 | ) 10 | 11 | func unmarshalJSON(field string, p *types.Event, plog *log.Entry) (map[string]string, error) { 12 | err := json.Unmarshal([]byte(p.Line.Raw), &p.Unmarshaled) 13 | if err != nil { 14 | plog.Errorf("could not parse JSON: %s", err) 15 | return nil, err 16 | } 17 | plog.Tracef("unmarshaled JSON: %+v", p.Unmarshaled) 18 | return nil, nil 19 | } 20 | -------------------------------------------------------------------------------- /pkg/parser/test_data/GeoLite2-ASN.mmdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/pkg/parser/test_data/GeoLite2-ASN.mmdb -------------------------------------------------------------------------------- /pkg/parser/test_data/GeoLite2-City.mmdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/pkg/parser/test_data/GeoLite2-City.mmdb -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-expression/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | pattern_syntax: 6 | MYCAP1: ".*" 7 | nodes: 8 | - grok: 9 | pattern: ^xxheader %{MYCAP1:extracted_value} trailing stuff$ 10 | expression: evt.Line.Raw 11 | statics: 12 | - meta: log_type 13 | value: parsed_testlog 14 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-expression/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-expression/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: testlog 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: testlog 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | - Meta: 17 | log_type: parsed_testlog 18 | Parsed: 19 | extracted_value: VALUE1 20 | Process: true 21 | Stage: s00-raw 22 | - Meta: 23 | log_type: parsed_testlog 24 | Parsed: 25 | extracted_value: VALUE2 26 | Process: true 27 | Stage: s00-raw 28 | 29 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-external-data/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | data: 6 | - source_url: https://invalid.com/test.list 7 | dest_file: ./sample_strings.txt 8 | type: string 9 | 10 | pattern_syntax: 11 | MYCAP_EXT: ".*" 12 | nodes: 13 | - grok: 14 | pattern: ^xxheader %{MYCAP_EXT:extracted_value} trailing stuff$ 15 | apply_on: Line.Raw 16 | statics: 17 | - meta: log_type 18 | value: parsed_testlog 19 | - meta: is_it_in_file 20 | expression: |- 21 | evt.Parsed.extracted_value in File("./sample_strings.txt") ? "true" : "false" 22 | 23 | 24 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-external-data/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-external-data/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: testlog 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: testlog 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | 17 | - Meta: 18 | log_type: parsed_testlog 19 | is_it_in_file: true 20 | Parsed: 21 | extracted_value: VALUE1 22 | 23 | Process: true 24 | Stage: s00-raw 25 | - Meta: 26 | log_type: parsed_testlog 27 | is_it_in_file: false 28 | Parsed: 29 | extracted_value: VALUE2 30 | Process: true 31 | Stage: s00-raw 32 | 33 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-import/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | nodes: 6 | - grok: 7 | #USERNAME is a pattern defined by the grokky library we are using 8 | name: SYSLOGFACILITY 9 | apply_on: Line.Raw 10 | statics: 11 | - enriched: subgrok_static_why_is_it_still_here 12 | value: because 13 | statics: 14 | - meta: log_type 15 | value: parsed_testlog 16 | 17 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-import/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-no-subnode/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | pattern_syntax: 6 | MYCAP2: ".*" 7 | grok: 8 | pattern: ^xxheader %{MYCAP2:extracted_value} trailing stuff$ 9 | apply_on: Line.Raw 10 | statics: 11 | - meta: log_type 12 | value: parsed_testlog 13 | 14 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-no-subnode/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-no-subnode/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: testlog 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: testlog 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | 17 | - Meta: 18 | log_type: parsed_testlog 19 | Parsed: 20 | extracted_value: VALUE1 21 | Process: true 22 | Stage: s00-raw 23 | - Meta: 24 | log_type: parsed_testlog 25 | Parsed: 26 | extracted_value: VALUE2 27 | Process: true 28 | Stage: s00-raw 29 | 30 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok-stash/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok-stash.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | pattern_syntax: 6 | MYCAP1: ".*" 7 | nodes: 8 | - grok: 9 | pattern: ^xxheader %{MYCAP1:extracted_value} trailing stuff$ 10 | apply_on: Line.Raw 11 | statics: 12 | - meta: log_type 13 | value: parsed_testlog 14 | 15 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-grok/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: testlog 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: testlog 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | 17 | - Meta: 18 | log_type: parsed_testlog 19 | Parsed: 20 | extracted_value: VALUE1 21 | Process: true 22 | Stage: s00-raw 23 | - Meta: 24 | log_type: parsed_testlog 25 | Parsed: 26 | extracted_value: VALUE2 27 | Process: true 28 | Stage: s00-raw 29 | 30 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-json-extract/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'json-1'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-json-extract 5 | statics: 6 | - parsed: message 7 | expression: JsonExtract(evt.Line.Raw, "log") 8 | - meta: other_field 9 | expression: JsonExtract(evt.Line.Raw, "testfield") 10 | - meta: program 11 | expression: evt.Line.Labels.progrname 12 | - parsed: extracted_array 13 | expression: JsonExtract(evt.Line.Raw, "nested_1.anarray") 14 | - parsed: extracted_array_field 15 | expression: JsonExtract(evt.Line.Raw, "nested_1.anarray[0]") 16 | 17 | 18 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-json-extract/base-grok2.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Meta.program == 'my_test_prog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | pattern_syntax: 6 | MYCAP3: ".*" 7 | nodes: 8 | - grok: 9 | pattern: ^xxheader %{MYCAP3:extracted_value} trailing stuff$ 10 | apply_on: message 11 | statics: 12 | - meta: log_type 13 | value: parsed_testlog 14 | - parsed: extracted_arrayfield_from_object 15 | expression: JsonExtract(evt.Parsed.extracted_array, '[1]') 16 | 17 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-json-extract/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | - filename: {{.TestDirectory}}/base-grok2.yaml 4 | stage: s01-parse 5 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-json-extract/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | type: json-1 6 | progrname: my_test_prog 7 | Raw: '{"testfield": "some stuff", "log": "xxheader VALUE1 trailing stuff", "nested_1" : {"anarray" : ["foo","bar","xx1"], "xxx" : "zzzz"}}' 8 | results: 9 | - Meta: 10 | other_field: some stuff 11 | program: my_test_prog 12 | Parsed: 13 | message: xxheader VALUE1 trailing stuff 14 | extracted_value: VALUE1 15 | extracted_array_field: foo 16 | extracted_array: '["foo","bar","xx1"]' 17 | extracted_arrayfield_from_object: bar 18 | Process: true 19 | 20 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-tree/base-grok.yaml: -------------------------------------------------------------------------------- 1 | #Here we are testing the trees within the node 2 | filter: "evt.Line.Labels.type == 'type1'" 3 | debug: true 4 | name: tests/base-grok-root 5 | pattern_syntax: 6 | MYCAP4: ".*" 7 | grok: 8 | pattern: ^xxheader %{MYCAP4:extracted_value} trailing stuff$ 9 | apply_on: Line.Raw 10 | statics: 11 | - meta: state 12 | value: root-done 13 | - meta: state_sub 14 | expression: evt.Parsed.extracted_value 15 | --- 16 | filter: "evt.Line.Labels.type == 'type1' && evt.Meta.state == 'root-done'" 17 | debug: true 18 | onsuccess: next_stage 19 | name: tests/base-grok-leafs 20 | #the sub-nodes will process the result of the master node 21 | nodes: 22 | - filter: "evt.Parsed.extracted_value == 'VALUE1'" 23 | debug: true 24 | statics: 25 | - meta: final_state 26 | value: leaf1 27 | - filter: "evt.Parsed.extracted_value == 'VALUE2'" 28 | debug: true 29 | statics: 30 | - meta: final_state 31 | value: leaf2 32 | 33 | 34 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-tree/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/base-tree/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: type1 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: type1 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | - Meta: 17 | final_state: leaf1 18 | state_sub: VALUE1 19 | Parsed: 20 | extracted_value: VALUE1 21 | Process: true 22 | Stage: s00-raw 23 | - Meta: 24 | final_state: leaf2 25 | state_sub: VALUE2 26 | Parsed: 27 | extracted_value: VALUE2 28 | Process: true 29 | Stage: s00-raw 30 | 31 | -------------------------------------------------------------------------------- /pkg/parser/tests/dateparser-enrich/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.StrTime != ''" 2 | name: test/dateparse 3 | debug: true 4 | #it's a hack lol 5 | statics: 6 | - method: ParseDate 7 | expression: evt.StrTime 8 | - target: MarshaledTime 9 | expression: evt.Enriched.MarshaledTime 10 | 11 | -------------------------------------------------------------------------------- /pkg/parser/tests/dateparser-enrich/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/dateparser-enrich/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - StrTime: 2012/11/01 4 | Parsed: 5 | test: format1 6 | - StrTime: 11/02/2012 13:37:05 7 | Parsed: 8 | test: format2 9 | #these are the results we expect from the parser 10 | results: 11 | - Parsed: 12 | test: format1 13 | Enriched: 14 | MarshaledTime: "2012-11-01T00:00:00Z" 15 | Process: true 16 | Stage: s00-raw 17 | - Parsed: 18 | test: format2 19 | Enriched: 20 | MarshaledTime: "2012-11-02T13:37:05Z" 21 | Process: true 22 | Stage: s00-raw 23 | -------------------------------------------------------------------------------- /pkg/parser/tests/geoip-enrich/base-grok.yaml: -------------------------------------------------------------------------------- 1 | filter: "'source_ip' in evt.Meta" 2 | name: tests/geoip-enrich 3 | debug: true 4 | description: "Populate event with geoloc info : as, country, coords, source range." 5 | statics: 6 | - method: GeoIpCity 7 | expression: evt.Meta.source_ip 8 | - meta: IsoCode 9 | expression: evt.Enriched.IsoCode 10 | - meta: IsInEU 11 | expression: evt.Enriched.IsInEU 12 | - meta: GeoCoords 13 | expression: evt.Enriched.GeoCoords 14 | - method: GeoIpASN 15 | expression: evt.Meta.source_ip 16 | - meta: ASNNumber 17 | expression: evt.Enriched.ASNNumber 18 | - meta: ASNOrg 19 | expression: evt.Enriched.ASNOrg 20 | - method: IpToRange 21 | expression: evt.Meta.source_ip 22 | - meta: SourceRange 23 | expression: evt.Enriched.SourceRange 24 | -------------------------------------------------------------------------------- /pkg/parser/tests/geoip-enrich/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/geoip-enrich/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Meta: 4 | test: test1 5 | source_ip: 1.0.0.1 6 | - Meta: 7 | test: test2 8 | source_ip: 192.168.0.1 9 | #these are the results we expect from the parser 10 | results: 11 | - Process: true 12 | Enriched: 13 | IsInEU: false 14 | ASNOrg: "Google Inc." 15 | Meta: 16 | source_ip: 1.0.0.1 17 | - Process: true 18 | Enriched: 19 | IsInEU: false 20 | IsoCode: 21 | ASNOrg: 22 | Meta: 23 | source_ip: 192.168.0.1 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /pkg/parser/tests/json-unmarshal/base-parser.yaml: -------------------------------------------------------------------------------- 1 | #filter: "evt.Overflow.Labels.remediation == 'true'" 2 | name: tests/unmarshalJSON 3 | description: "unmarshal JSON" 4 | statics: 5 | - method: UnmarshalJSON 6 | expression: evt.Line.Raw 7 | -------------------------------------------------------------------------------- /pkg/parser/tests/json-unmarshal/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-parser.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/json-unmarshal/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Raw: | 5 | {"foo": "bar", "pouet": 42} 6 | - Line: 7 | Raw: xxheader VALUE2 trailing stuff 8 | #these are the results we expect from the parser 9 | results: 10 | - Unmarshaled: 11 | JSON: 12 | foo: "bar" 13 | pouet: 42 14 | Process: true 15 | Stage: s00-raw 16 | - Unmarshaled: 17 | JSON: {} 18 | Process: true 19 | Stage: s00-raw 20 | 21 | 22 | -------------------------------------------------------------------------------- /pkg/parser/tests/multi-stage-grok/base-grok-s00.yaml: -------------------------------------------------------------------------------- 1 | filter: "evt.Line.Labels.type == 'testlog'" 2 | debug: true 3 | onsuccess: next_stage 4 | name: tests/base-grok 5 | nodes: 6 | - grok: 7 | pattern: ^xxheader %{GREEDYDATA:extracted_value} trailing stuff$ 8 | apply_on: Line.Raw 9 | statics: 10 | - meta: log_type 11 | value: parsed_testlog 12 | 13 | -------------------------------------------------------------------------------- /pkg/parser/tests/multi-stage-grok/base-grok-s01.yaml: -------------------------------------------------------------------------------- 1 | #only one of the events is going to throu filter 2 | filter: "evt.Parsed.extracted_value == 'VALUE1'" 3 | debug: true 4 | onsuccess: next_stage 5 | name: tests/second-stage-grok 6 | statics: 7 | - meta: did_second_stage 8 | value: yes 9 | - target: evt.Parsed.test_bis 10 | value: lolilol 11 | 12 | -------------------------------------------------------------------------------- /pkg/parser/tests/multi-stage-grok/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok-s00.yaml 2 | stage: s00-raw 3 | - filename: {{.TestDirectory}}/base-grok-s01.yaml 4 | stage: s01-raw 5 | -------------------------------------------------------------------------------- /pkg/parser/tests/multi-stage-grok/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Line: 4 | Labels: 5 | #this one will be checked by a filter 6 | type: testlog 7 | Raw: xxheader VALUE1 trailing stuff 8 | - Line: 9 | #see tricky case : first one is nginx via syslog, the second one is local nginx :) 10 | Labels: 11 | #this one will be checked by a filter 12 | type: testlog 13 | Raw: xxheader VALUE2 trailing stuff 14 | #these are the results we expect from the parser 15 | results: 16 | - Meta: 17 | log_type: parsed_testlog 18 | Parsed: 19 | extracted_value: VALUE1 20 | test_bis: lolilol 21 | Process: true 22 | Stage: s01-raw 23 | #because of how our second stage parser is done, this one won't pass stage 24 | - Meta: 25 | log_type: parsed_testlog 26 | Parsed: 27 | extracted_value: VALUE2 28 | Process: false 29 | Stage: s01-raw 30 | -------------------------------------------------------------------------------- /pkg/parser/tests/reverse-dns-enrich/base-grok.yaml: -------------------------------------------------------------------------------- 1 | #filter: "evt.Overflow.Labels.remediation == 'true'" 2 | name: tests/rdns 3 | description: "Lookup the DNS assiocated to the source IP only for overflows" 4 | statics: 5 | - method: reverse_dns 6 | expression: evt.Enriched.IpToResolve 7 | - meta: did_dns_succeeded 8 | expression: 'evt.Enriched.reverse_dns == "" ? "no" : "yes"' 9 | -------------------------------------------------------------------------------- /pkg/parser/tests/reverse-dns-enrich/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/parser/tests/reverse-dns-enrich/test.yaml: -------------------------------------------------------------------------------- 1 | #these are the events we input into parser 2 | lines: 3 | - Enriched: 4 | IpToResolve: 1.1.1.1 5 | - Enriched: 6 | IpToResolve: 1.2.3.4 7 | #these are the results we expect from the parser 8 | results: 9 | - Enriched: 10 | reverse_dns: one.one.one.one. 11 | IpToResolve: 1.1.1.1 12 | Meta: 13 | did_dns_succeeded: yes 14 | Process: true 15 | Stage: s00-raw 16 | - Enriched: 17 | IpToResolve: 1.2.3.4 18 | Meta: 19 | did_dns_succeeded: no 20 | Process: true 21 | Stage: s00-raw 22 | -------------------------------------------------------------------------------- /pkg/parser/tests/sample_strings.txt: -------------------------------------------------------------------------------- 1 | VALUE1 2 | VALUE3 3 | RATATA 4 | -------------------------------------------------------------------------------- /pkg/parser/tests/whitelist-base/base-grok.yaml: -------------------------------------------------------------------------------- 1 | name: test/whitelists 2 | description: "Whitelist tests" 3 | debug: true 4 | whitelist: 5 | reason: "Whitelist tests" 6 | ip: 7 | - 1.1.1.1 8 | cidr: 9 | - "1.2.3.0/24" 10 | expression: 11 | - "'supertoken1234' == evt.Enriched.test_token" 12 | statics: 13 | - meta: statics 14 | value: success 15 | -------------------------------------------------------------------------------- /pkg/parser/tests/whitelist-base/parsers.yaml: -------------------------------------------------------------------------------- 1 | - filename: {{.TestDirectory}}/base-grok.yaml 2 | stage: s00-raw 3 | -------------------------------------------------------------------------------- /pkg/protobufs/README.md: -------------------------------------------------------------------------------- 1 | To generate go code for the `notifier.proto` files, run : 2 | 3 | ``` 4 | protoc --go_out=. --go_opt=paths=source_relative \ 5 | --go-grpc_out=. --go-grpc_opt=paths=source_relative \ 6 | proto/alert.proto` 7 | ``` 8 | 9 | -------------------------------------------------------------------------------- /pkg/protobufs/generate.go: -------------------------------------------------------------------------------- 1 | package protobufs 2 | 3 | // Dependencies: 4 | // 5 | // apt install protobuf-compiler 6 | // 7 | // keep this in sync with go.mod 8 | // go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2 9 | // 10 | // Not the same versions as google.golang.org/grpc 11 | // go list -m -versions google.golang.org/grpc/cmd/protoc-gen-go-grpc 12 | // go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.5.1 13 | 14 | //go:generate protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative notifier.proto 15 | -------------------------------------------------------------------------------- /pkg/protobufs/notifier.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3" ; 2 | package proto; 3 | option go_package = ".;protobufs"; 4 | 5 | message Notification { 6 | string text = 1 ; 7 | string name = 2 ; 8 | } 9 | 10 | message Config { 11 | bytes config = 2 ; 12 | } 13 | 14 | message Empty {} 15 | 16 | service Notifier { 17 | rpc Notify(Notification) returns (Empty); 18 | rpc Configure(Config) returns (Empty); 19 | } -------------------------------------------------------------------------------- /pkg/setup/export_test.go: -------------------------------------------------------------------------------- 1 | package setup 2 | 3 | var ( 4 | SystemdUnitList = systemdUnitList 5 | FilterWithRules = filterWithRules 6 | ApplyRules = applyRules 7 | 8 | // NormalizeVersion = normalizeVersion 9 | ) 10 | -------------------------------------------------------------------------------- /pkg/setup/units_test.go: -------------------------------------------------------------------------------- 1 | package setup_test 2 | 3 | import ( 4 | "os/exec" 5 | "testing" 6 | 7 | "github.com/stretchr/testify/require" 8 | 9 | "github.com/crowdsecurity/crowdsec/pkg/setup" 10 | ) 11 | 12 | func TestSystemdUnitList(t *testing.T) { 13 | require := require.New(t) 14 | setup.ExecCommand = fakeExecCommand 15 | 16 | defer func() { setup.ExecCommand = exec.Command }() 17 | 18 | units, err := setup.SystemdUnitList() //nolint:typecheck,nolintlint // exported only for tests 19 | require.NoError(err) 20 | 21 | require.Equal([]string{ 22 | "crowdsec-setup-detect.service", 23 | "apache2.service", 24 | "apparmor.service", 25 | "apport.service", 26 | "atop.service", 27 | "atopacct.service", 28 | "finalrd.service", 29 | "fwupd-refresh.service", 30 | "fwupd.service", 31 | }, units) 32 | } 33 | -------------------------------------------------------------------------------- /pkg/time/AUTHORS: -------------------------------------------------------------------------------- 1 | # This source code refers to The Go Authors for copyright purposes. 2 | # The master list of authors is in the main Go distribution, 3 | # visible at http://tip.golang.org/AUTHORS. 4 | -------------------------------------------------------------------------------- /pkg/time/CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | # This source code was written by the Go contributors. 2 | # The master list of contributors is in the main Go distribution, 3 | # visible at http://tip.golang.org/CONTRIBUTORS. 4 | -------------------------------------------------------------------------------- /pkg/time/README.md: -------------------------------------------------------------------------------- 1 | # Go Time 2 | 3 | This repository provides supplementary Go time packages. 4 | 5 | ## Download/Install 6 | 7 | The easiest way to install is to run `go get -u golang.org/x/time`. You can 8 | also manually git clone the repository to `$GOPATH/src/golang.org/x/time`. 9 | 10 | ## Report Issues / Send Patches 11 | 12 | This repository uses Gerrit for code changes. To learn how to submit changes to 13 | this repository, see https://golang.org/doc/contribute.html. 14 | 15 | The main issue tracker for the time repository is located at 16 | https://github.com/golang/go/issues. Prefix your issue with "x/time:" in the 17 | subject line, so it is easy to find. 18 | -------------------------------------------------------------------------------- /pkg/types/datasource.go: -------------------------------------------------------------------------------- 1 | package types 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type DataSource struct { 8 | SourceURL string `yaml:"source_url"` 9 | DestPath string `yaml:"dest_file"` 10 | Type string `yaml:"type"` 11 | //Control cache strategy on expensive regexps 12 | Cache *bool `yaml:"cache"` 13 | Strategy *string `yaml:"strategy"` 14 | Size *int `yaml:"size"` 15 | TTL *time.Duration `yaml:"ttl"` 16 | } 17 | -------------------------------------------------------------------------------- /pkg/types/getfstype_freebsd.go: -------------------------------------------------------------------------------- 1 | //go:build freebsd 2 | 3 | package types 4 | 5 | import ( 6 | "fmt" 7 | "syscall" 8 | ) 9 | 10 | func GetFSType(path string) (string, error) { 11 | var fsStat syscall.Statfs_t 12 | 13 | if err := syscall.Statfs(path, &fsStat); err != nil { 14 | return "", fmt.Errorf("failed to get filesystem type: %w", err) 15 | } 16 | 17 | bs := fsStat.Fstypename 18 | 19 | b := make([]byte, len(bs)) 20 | for i, v := range bs { 21 | b[i] = byte(v) 22 | } 23 | 24 | return string(b), nil 25 | } 26 | -------------------------------------------------------------------------------- /pkg/types/getfstype_openbsd.go: -------------------------------------------------------------------------------- 1 | //go:build openbsd 2 | 3 | package types 4 | 5 | import ( 6 | "fmt" 7 | "syscall" 8 | ) 9 | 10 | func GetFSType(path string) (string, error) { 11 | var fsStat syscall.Statfs_t 12 | 13 | if err := syscall.Statfs(path, &fsStat); err != nil { 14 | return "", fmt.Errorf("failed to get filesystem type: %w", err) 15 | } 16 | 17 | bs := fsStat.F_fstypename 18 | 19 | b := make([]byte, len(bs)) 20 | for i, v := range bs { 21 | b[i] = byte(v) 22 | } 23 | 24 | return string(b), nil 25 | } 26 | -------------------------------------------------------------------------------- /pkg/types/line.go: -------------------------------------------------------------------------------- 1 | package types 2 | 3 | import "time" 4 | 5 | type Line struct { 6 | Raw string `yaml:"Raw,omitempty"` 7 | Src string `yaml:"Src,omitempty"` 8 | Time time.Time //acquis time 9 | Labels map[string]string `yaml:"Labels,omitempty"` 10 | Process bool 11 | Module string `yaml:"Module,omitempty"` 12 | } 13 | -------------------------------------------------------------------------------- /rpm/SOURCES/80-crowdsec.preset: -------------------------------------------------------------------------------- 1 | # This file is part of crowdsec 2 | 3 | enable crowdsec.service -------------------------------------------------------------------------------- /rpm/SOURCES/user.patch: -------------------------------------------------------------------------------- 1 | --- config/config.yaml-orig 2021-09-08 12:04:29.758785098 +0200 2 | +++ config/config.yaml 2021-09-08 12:04:39.866856057 +0200 3 | @@ -32,7 +32,7 @@ 4 | max_age: 7d 5 | plugin_config: 6 | user: nobody # plugin process would be ran on behalf of this user 7 | - group: nogroup # plugin process would be ran on behalf of this group 8 | + group: nobody # plugin process would be ran on behalf of this group 9 | api: 10 | client: 11 | insecure_skip_verify: false 12 | -------------------------------------------------------------------------------- /test/.gitignore: -------------------------------------------------------------------------------- 1 | /local/ 2 | /local-init/ 3 | /.environment.sh 4 | -------------------------------------------------------------------------------- /test/ansible/.gitignore: -------------------------------------------------------------------------------- 1 | .vagrant 2 | vagrant/*/*.out 3 | -------------------------------------------------------------------------------- /test/ansible/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | pipelining = True 3 | force_color = True 4 | nocows = True 5 | 6 | # inventory = inventory.yml 7 | callbacks_enabled = timer 8 | 9 | # more compact and readable output 10 | stdout_callback = debug 11 | display_skipped_hosts = False 12 | display_ok_hosts = True 13 | 14 | [ssh_connection] 15 | ssh_args = -o ControlMaster=auto -o ControlPersist=60s 16 | -------------------------------------------------------------------------------- /test/ansible/debug_tools.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | - name: "Install debug tools" 5 | hosts: all 6 | become: true 7 | tasks: 8 | - name: "Install debug tools" 9 | ansible.builtin.apt: 10 | pkg: 11 | - kitty 12 | - neovim 13 | - zsh 14 | - zsh-autosuggestions 15 | - zsh-syntax-highlighting 16 | - zsh-theme-powerlevel9k 17 | - silversearcher-ag 18 | when: 19 | - ansible_facts.os_family == "Debian" 20 | -------------------------------------------------------------------------------- /test/ansible/env/pkg-sqlite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | DB_BACKEND=sqlite 4 | PACKAGE_TESTING=true 5 | TEST_PACKAGE_VERSION_DEB=1.4.1 6 | TEST_PACKAGE_VERSION_RPM=1.4.1-1 7 | 8 | export DB_BACKEND 9 | export PACKAGE_TESTING 10 | export TEST_SUITE_GIT 11 | export TEST_SUITE_VERSION 12 | export TEST_SUITE_ZIP 13 | export TEST_PACKAGE_VERSION_DEB 14 | export TEST_PACKAGE_VERSION_RPM 15 | export TEST_PACKAGE_FILE 16 | export TEST_PACKAGE_DIR 17 | export TEST_SKIP 18 | -------------------------------------------------------------------------------- /test/ansible/env/source-mysql.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | DB_BACKEND=mysql 4 | 5 | export DB_BACKEND 6 | export PACKAGE_TESTING 7 | export TEST_SUITE_GIT 8 | export TEST_SUITE_VERSION 9 | export TEST_SUITE_ZIP 10 | export TEST_PACKAGE_VERSION_DEB 11 | export TEST_PACKAGE_VERSION_RPM 12 | export TEST_PACKAGE_FILE 13 | export TEST_PACKAGE_DIR 14 | export TEST_SKIP 15 | -------------------------------------------------------------------------------- /test/ansible/env/source-pgx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | DB_BACKEND=pgx 4 | 5 | export DB_BACKEND 6 | export PACKAGE_TESTING 7 | export TEST_SUITE_GIT 8 | export TEST_SUITE_VERSION 9 | export TEST_SUITE_ZIP 10 | export TEST_PACKAGE_VERSION_DEB 11 | export TEST_PACKAGE_VERSION_RPM 12 | export TEST_PACKAGE_FILE 13 | export TEST_PACKAGE_DIR 14 | export TEST_SKIP 15 | -------------------------------------------------------------------------------- /test/ansible/env/source-postgres.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | DB_BACKEND=postgres 4 | 5 | export DB_BACKEND 6 | export PACKAGE_TESTING 7 | export TEST_SUITE_GIT 8 | export TEST_SUITE_VERSION 9 | export TEST_SUITE_ZIP 10 | export TEST_PACKAGE_VERSION_DEB 11 | export TEST_PACKAGE_VERSION_RPM 12 | export TEST_PACKAGE_FILE 13 | export TEST_PACKAGE_DIR 14 | export TEST_SKIP 15 | -------------------------------------------------------------------------------- /test/ansible/env/source-sqlite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | DB_BACKEND=sqlite 4 | 5 | export DB_BACKEND 6 | export PACKAGE_TESTING 7 | export TEST_SUITE_GIT 8 | export TEST_SUITE_VERSION 9 | export TEST_SUITE_ZIP 10 | export TEST_PACKAGE_VERSION_DEB 11 | export TEST_PACKAGE_VERSION_RPM 12 | export TEST_PACKAGE_FILE 13 | export TEST_PACKAGE_DIR 14 | export TEST_SKIP 15 | -------------------------------------------------------------------------------- /test/ansible/prepare_tests.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | - name: "Prepare fixture for the functional tests" 5 | hosts: all 6 | gather_facts: true 7 | vars_files: 8 | - vars/go.yml 9 | - vars/mysql.yml 10 | - vars/postgres.yml 11 | roles: 12 | - name: make_fixture 13 | environment: 14 | PGHOST: 127.0.0.1 15 | PGPORT: 5432 16 | PGPASSWORD: "{{ postgresql_users[0].password }}" 17 | PGUSER: postgres 18 | MYSQL_HOST: localhost 19 | MYSQL_PORT: 3306 20 | MYSQL_PASSWORD: "{{ mysql_root_password }}" 21 | MYSQL_USER: "root" 22 | -------------------------------------------------------------------------------- /test/ansible/requirements.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | roles: 5 | - src: geerlingguy.mysql 6 | - src: https://github.com/crowdsecurity/ansible-role-postgresql 7 | version: crowdsec 8 | name: geerlingguy.postgresql 9 | # these should be included as dependencies of crowdsecurity.testing, but sometime are not 10 | - src: geerlingguy.repo-epel 11 | - src: gantsign.golang 12 | 13 | collections: 14 | - name: ansible.posix 15 | - name: https://github.com/crowdsecurity/ansible-collection-crowdsecurity.testing.git 16 | type: git 17 | version: v0.0.7 18 | 19 | # - name: crowdsecurity.testing 20 | # source: ../../../crowdsecurity.testing 21 | # type: dir 22 | -------------------------------------------------------------------------------- /test/ansible/roles/make_fixture/vars/main.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | package_testing: "{{ lookup('ansible.builtin.env', 'PACKAGE_TESTING') }}" 4 | -------------------------------------------------------------------------------- /test/ansible/roles/run_func_tests/vars/main.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | package_testing: "{{ lookup('ansible.builtin.env', 'PACKAGE_TESTING') }}" 4 | skip_tests: "{{ lookup('ansible.builtin.env', 'TEST_SKIP') }}" 5 | -------------------------------------------------------------------------------- /test/ansible/run_all.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | - import_playbook: provision_dependencies.yml 5 | - import_playbook: provision_test_suite.yml 6 | - import_playbook: install_binary_package.yml 7 | - import_playbook: prepare_tests.yml 8 | - import_playbook: run_tests.yml 9 | -------------------------------------------------------------------------------- /test/ansible/run_tests.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | - name: "Run functional tests" 5 | hosts: all 6 | gather_facts: true 7 | vars_files: 8 | - vars/mysql.yml 9 | - vars/postgres.yml 10 | roles: 11 | - name: run_func_tests 12 | environment: 13 | PGHOST: 127.0.0.1 14 | PGPORT: 5432 15 | PGPASSWORD: "{{ postgresql_users[0].password }}" 16 | PGUSER: postgres 17 | MYSQL_HOST: localhost 18 | MYSQL_PORT: 3306 19 | MYSQL_PASSWORD: "{{ mysql_root_password }}" 20 | MYSQL_USER: "root" 21 | -------------------------------------------------------------------------------- /test/ansible/run_wizard_tests.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | - name: "Run setup/wizard tests" 5 | hosts: all 6 | tasks: 7 | # - name: "Build release package" 8 | # become: false 9 | # ansible.builtin.command: 10 | # cmd: "make clean release" 11 | - name: "Run BATS: service detection tests" 12 | become: true 13 | ansible.builtin.command: 14 | chdir: "/home/{{ ansible_user }}" 15 | cmd: "./crowdsec/test/run-tests crowdsec/test/bats-detect" 16 | -------------------------------------------------------------------------------- /test/ansible/vagrant/alma-8/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/alma8' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install dnf-plugins-core kitty-terminfo 7 | sudo dnf config-manager --set-enabled powertools 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/alma-9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/alma9' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install kitty-terminfo 7 | sudo dnf config-manager --set-enabled crb 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/centos-7/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'centos/7' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/centos-7/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | # postgres is too old on this distribution 9 | [ "${DB_BACKEND}" = "postgres" ] && die "skipping: postgres too old" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "skipping: postgres too old" 11 | exit 0 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/centos-8/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/centos8s' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install dnf-plugins-core kitty-terminfo 7 | sudo dnf config-manager --set-enabled powertools 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/centos-9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/centos9s' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install dnf-plugins-core 7 | sudo dnf config-manager --set-enabled crb 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-10-buster/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/buster64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-11-bullseye/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/bullseye64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-12-bookworm/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/bookworm64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | # sudo apt install -y kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-9-stretch/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/stretch64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo sed -i s/httpredir.debian.org/archive.debian.org/g /etc/apt/sources.list 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-9-stretch/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | # postgres is too old on this distribution 9 | [ "${DB_BACKEND}" = "postgres" ] && die "skipping: postgres too old" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "skipping: postgres too old" 11 | exit 0 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/debian-testing/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/testing64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/alpine-3.16/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/alpine316' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provision 'shell', path: 'bootstrap' 8 | 9 | config.vm.provider :libvirt do |libvirt| 10 | libvirt.cpus = 1 11 | libvirt.memory = 1536 12 | end 13 | 14 | config.vm.synced_folder '.', '/vagrant', disabled: true 15 | 16 | config.vm.provision 'ansible' do |ansible| 17 | ansible.config_file = '../../../ansible.cfg' 18 | ansible.playbook = '../../../run_all.yml' 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/alpine-3.16/bootstrap: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | unset IFS 3 | set -euf 4 | 5 | # coreutils -> for timeout (busybox is not enough) 6 | sudo apk add python3 go tar procps coreutils 7 | 8 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/alpine-3.16/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/amazon-linux-2/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'cloudnatives/amazon-linux-2' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provider :libvirt do |libvirt| 8 | libvirt.cpus = 1 9 | libvirt.memory = 1536 10 | end 11 | 12 | config.vm.synced_folder '.', '/vagrant', disabled: true 13 | 14 | config.vm.provision 'ansible' do |ansible| 15 | ansible.config_file = '../../../ansible.cfg' 16 | ansible.playbook = '../../../run_all.yml' 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/amazon-linux-2/issues.txt: -------------------------------------------------------------------------------- 1 | 2 | The file 70_http_plugin.bats hangs forever when run from ansible on amzn2, but all tests pass when run from ssh. 3 | 4 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/arch/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/arch' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provider :libvirt do |libvirt| 8 | libvirt.cpus = 1 9 | libvirt.memory = 1536 10 | end 11 | 12 | config.vm.synced_folder '.', '/vagrant', disabled: true 13 | 14 | config.vm.provision 'ansible' do |ansible| 15 | ansible.config_file = '../../../ansible.cfg' 16 | ansible.playbook = '../../../run_all.yml' 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/devuan-3/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/devuan3' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provider :libvirt do |libvirt| 8 | libvirt.cpus = 1 9 | libvirt.memory = 1536 10 | end 11 | 12 | config.vm.synced_folder '.', '/vagrant', disabled: true 13 | 14 | config.vm.provision 'ansible' do |ansible| 15 | ansible.config_file = '../../../ansible.cfg' 16 | ansible.playbook = '../../../run_all.yml' 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/devuan-3/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/dragonflybsd-6/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/dragonflybsd6' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provider :libvirt do |libvirt| 8 | libvirt.cpus = 1 9 | libvirt.memory = 1536 10 | end 11 | 12 | config.vm.synced_folder '.', '/vagrant', disabled: true 13 | 14 | config.vm.provision 'ansible' do |ansible| 15 | ansible.config_file = '../../../ansible.cfg' 16 | ansible.playbook = '../../../run_all.yml' 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/gentoo/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/gentoo' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.provision 'shell', path: 'bootstrap' 8 | 9 | config.vm.provider :libvirt do |libvirt| 10 | libvirt.cpus = 1 11 | libvirt.memory = 1536 12 | end 13 | 14 | config.vm.synced_folder '.', '/vagrant', disabled: true 15 | 16 | config.vm.provision 'ansible' do |ansible| 17 | ansible.config_file = '../../../ansible.cfg' 18 | ansible.playbook = '../../../run_all.yml' 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/gentoo/bootstrap: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | sudo emerge --quiet app-portage/gentoolkit dev-vcs/git net-misc/curl app-misc/jq 4 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/hardenedbsd-13/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/hardenedbsd13' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo pkg install python3 7 | SHELL 8 | end 9 | 10 | common = '../../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/hardenedbsd-13/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/netbsd-9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/netbsd9' 5 | config.vm.define 'crowdsec' 6 | 7 | # config.vm.provision 'shell', path: 'bootstrap' 8 | 9 | config.vm.provider :libvirt do |libvirt| 10 | libvirt.cpus = 1 11 | libvirt.memory = 1536 12 | end 13 | 14 | config.vm.synced_folder '.', '/vagrant', disabled: true 15 | 16 | config.vm.provision 'ansible' do |ansible| 17 | ansible.config_file = '../../../ansible.cfg' 18 | ansible.playbook = '../../../run_all.yml' 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/openbsd-6/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/openbsd6' 5 | # config.vm.box_version = '4.2.16' 6 | config.vm.provision "shell", inline: <<-SHELL 7 | sudo pkg_add python py3-pip gcc openssl-1.0.2up3 gtar-1.34 8 | # sudo pkg_add -u 9 | # sudo pkg_add kitty 10 | SHELL 11 | end 12 | 13 | common = '../../common' 14 | load common if File.exist?(common) 15 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/openbsd-6/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/openbsd-7/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/openbsd7' 5 | # config.vm.box_version = '4.2.16' 6 | config.vm.provision "shell", inline: <<-SHELL 7 | sudo pkg_add python-3.9.16 py3-pip gcc-11.2.0p3 openssl-3.0.8 gtar-1.34 8 | # sudo pkg_add -u 9 | # sudo pkg_add kitty 10 | SHELL 11 | end 12 | 13 | common = '../../common' 14 | load common if File.exist?(common) 15 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/openbsd-7/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/opensuse-15.6/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'opensuse/Leap-15.6.x86_64' 5 | config.vm.box_version = "15.6.13.280" 6 | config.vm.define 'crowdsec' 7 | 8 | config.vm.provision 'shell', path: 'bootstrap' 9 | 10 | config.vm.provider :libvirt do |libvirt| 11 | libvirt.cpus = 1 12 | libvirt.memory = 1536 13 | end 14 | 15 | config.vm.synced_folder '.', '/vagrant', disabled: true 16 | 17 | config.vm.provision 'ansible' do |ansible| 18 | ansible.config_file = '../../../ansible.cfg' 19 | ansible.playbook = '../../../run_all.yml' 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/opensuse-15.6/bootstrap: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | zypper install -y kitty-terminfo 4 | -------------------------------------------------------------------------------- /test/ansible/vagrant/experimental/ubuntu-14.04-trusty/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'peru/ubuntu-14.04-server-amd64' 5 | config.vm.define 'crowdsec' 6 | 7 | config.vm.box_version = '20190901.01' 8 | 9 | config.vm.provider :libvirt do |libvirt| 10 | libvirt.cpus = 1 11 | libvirt.memory = 1536 12 | end 13 | 14 | config.vm.synced_folder '.', '/vagrant', disabled: true 15 | 16 | config.vm.provision 'ansible' do |ansible| 17 | ansible.config_file = '../../../ansible.cfg' 18 | ansible.playbook = '../../../run_all.yml' 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-35/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | # config.vm.box = 'fedora/35-cloud-base' 5 | config.vm.box = 'generic/fedora35' 6 | config.vm.provision "shell", inline: <<-SHELL 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-35/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-36/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | # config.vm.box = "fedora/36-cloud-base" 5 | config.vm.box = 'generic/fedora36' 6 | config.vm.provision "shell", inline: <<-SHELL 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-36/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-37/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/fedora37' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-37/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-38/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = "fedora/38-cloud-base" 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-38/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-39/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = "fedora/39-cloud-base" 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-39/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-40/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = "fedora/40-cloud-base" 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-40/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-41/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = "fedora/40-cloud-base" 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | config.vm.provision "shell" do |s| 8 | s.inline = "sudo dnf upgrade --refresh -y && sudo dnf install dnf-plugin-system-upgrade -y && sudo dnf system-upgrade download --releasever=41 -y && sudo dnf system-upgrade reboot -y" 9 | end 10 | end 11 | 12 | common = '../common' 13 | load common if File.exist?(common) 14 | -------------------------------------------------------------------------------- /test/ansible/vagrant/fedora-41/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/freebsd-12/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/freebsd12' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | pkg install -y gtar 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/freebsd-12/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | [ "${DB_BACKEND}" = "postgres" ] && die "postgres role does not support this distribution" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "postgres role does not support this distribution" 11 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 12 | exit 0 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/freebsd-13/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/freebsd13' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | pkg install -y gtar 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/freebsd-13/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${PACKAGE_TESTING}" = "true" ] && die "no package available for this distribution" 9 | [ "${DB_BACKEND}" = "postgres" ] && die "postgres role does not support this distribution" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "postgres role does not support this distribution" 11 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 12 | exit 0 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/opensuse-leap-15/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = "opensuse/Leap-15.6.x86_64" 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/opensuse-leap-15/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | [ "${DB_BACKEND}" = "mysql" ] && die "mysql role does not support this distribution" 9 | exit 0 10 | -------------------------------------------------------------------------------- /test/ansible/vagrant/oracle-7/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/oracle7' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo yum-config-manager --enable ol7_optional_latest 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/oracle-7/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | # postgres is too old on this distribution 9 | [ "${DB_BACKEND}" = "postgres" ] && die "skipping: postgres too old" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "skipping: postgres too old" 11 | exit 0 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/oracle-8/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/oracle8' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf config-manager --set-enabled ol8_codeready_builder 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/oracle-9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/oracle9' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf config-manager --set-enabled ol9_codeready_builder 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/rocky-8/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/rocky8' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf config-manager --set-enabled powertools 7 | sudo dnf -y install kitty-terminfo 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/rocky-9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/rocky9' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf config-manager --set-enabled crb 7 | sudo dnf -y install kitty-terminfo 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-16.04-xenial/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu1604' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-16.04-xenial/skip: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | die() { 4 | echo "$@" >&2 5 | exit 1 6 | } 7 | 8 | # postgres is too old on this distribution 9 | [ "${DB_BACKEND}" = "postgres" ] && die "skipping: postgres too old" 10 | [ "${DB_BACKEND}" = "pgx" ] && die "skipping: postgres too old" 11 | exit 0 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-18.04-bionic/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | # the official boxes only supports virtualbox 5 | config.vm.box = 'generic/ubuntu1804' 6 | config.vm.provision "shell", inline: <<-SHELL 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-20.04-focal/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu2004' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt install -y kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-22.04-jammy/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu2204' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt install -y kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-22.10-kinetic/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu2210' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt install -y kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-23.04-lunar/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'bento/ubuntu-23.04' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt install -y kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/ubuntu-24-04-noble/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'alvistack/ubuntu-24.04' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | SHELL 7 | end 8 | 9 | common = '../common' 10 | load common if File.exist?(common) 11 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/centos-8/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/centos8s' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo dnf -y install dnf-plugins-core kitty-terminfo 7 | dnf config-manager --set-enabled powertools 8 | # sudo dnf -y update 9 | SHELL 10 | end 11 | 12 | common = '../common' 13 | load common if File.exist?(common) 14 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/debian-10-buster/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/buster64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt update 7 | sudo apt install -y aptitude kitty-terminfo 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/debian-11-bullseye/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/bullseye64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt update 7 | sudo apt install -y aptitude kitty-terminfo 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/debian-12-bookworm/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'debian/bookworm64' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt update 7 | sudo apt install -y aptitude kitty-terminfo 8 | SHELL 9 | end 10 | 11 | common = '../common' 12 | load common if File.exist?(common) 13 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/fedora-36/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'fedora/36-cloud-base' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | # sudo dnf -y update 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/ubuntu-22.04-jammy/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu2204' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo env DEBIAN_FRONTEND=noninteractive apt install -y aptitude kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vagrant/wizard/ubuntu-22.10-kinetic/Vagrantfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | Vagrant.configure('2') do |config| 4 | config.vm.box = 'generic/ubuntu2210' 5 | config.vm.provision "shell", inline: <<-SHELL 6 | sudo apt install -y aptitude kitty-terminfo 7 | SHELL 8 | end 9 | 10 | common = '../common' 11 | load common if File.exist?(common) 12 | -------------------------------------------------------------------------------- /test/ansible/vars/go.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | golang_version: "1.21.4" 5 | golang_install_dir: "/opt/go/{{ golang_version }}" 6 | -------------------------------------------------------------------------------- /test/ansible/vars/mysql.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | # The password is insecure since the db is ephemeral and only listen to localhost. 5 | mysql_root_password: password 6 | -------------------------------------------------------------------------------- /test/ansible/vars/postgres.yml: -------------------------------------------------------------------------------- 1 | # vim: set ft=yaml.ansible: 2 | --- 3 | 4 | # The password is insecure since the db is ephemeral and only listen to localhost. 5 | postgresql_users: 6 | - name: postgres 7 | password: postgres 8 | 9 | postgresql_hba_entries: 10 | - type: local 11 | database: all 12 | user: postgres 13 | auth_method: peer 14 | 15 | - type: local 16 | database: all 17 | user: all 18 | auth_method: peer 19 | 20 | - type: host 21 | database: all 22 | user: all 23 | address: "127.0.0.1/32" 24 | auth_method: md5 25 | 26 | - type: host 27 | database: all 28 | user: all 29 | address: "::1/128" 30 | auth_method: md5 31 | -------------------------------------------------------------------------------- /test/ansible/vars/python.yml: -------------------------------------------------------------------------------- 1 | python_version: "3.12.3" 2 | -------------------------------------------------------------------------------- /test/bats-detect/WARNING.md: -------------------------------------------------------------------------------- 1 | 2 | Running the tests in this directory WILL change the system configuration in 3 | unpredictable ways, remove packages and data (with a peculiar appetite for 4 | databases) and possibly bring the system to an unusable state. 5 | 6 | They are meant to be run, as root, on temporary VMs. They are only intended to 7 | ease the development of configurations for "cscli setup detect". 8 | 9 | -------------------------------------------------------------------------------- /test/bats/12_notifications.bats: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bats 2 | 3 | set -u 4 | 5 | setup_file() { 6 | load "../lib/setup_file.sh" 7 | } 8 | 9 | teardown_file() { 10 | load "../lib/teardown_file.sh" 11 | } 12 | 13 | setup() { 14 | load "../lib/setup.sh" 15 | load "../lib/bats-file/load.bash" 16 | ./instance-data load 17 | ./instance-crowdsec start 18 | } 19 | 20 | teardown() { 21 | cd "$TEST_DIR" || exit 1 22 | ./instance-crowdsec stop 23 | } 24 | 25 | #---------- 26 | 27 | @test "cscli notifications list" { 28 | rune -0 cscli notifications list 29 | assert_output --partial "Name" 30 | assert_output --partial "Type" 31 | assert_output --partial "Profile name" 32 | } 33 | 34 | @test "cscli notifications must be run from lapi" { 35 | config_disable_lapi 36 | rune -1 cscli notifications list 37 | assert_stderr --partial "local API is disabled -- this command must be run on the local API machine" 38 | } 39 | -------------------------------------------------------------------------------- /test/bats/reformat: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # from https://github.com/bats-core/bats-core/issues/192#issuecomment-528315083 4 | # thanks Sean Leather 5 | 6 | # Rewrite the Bats scripts in-place to look more like Bash scripts to shfmt 7 | perl -pi -e 's/^(\@test.*) \{$/$1\n{/' ./*.bats 8 | 9 | tmpfile=$(mktemp) 10 | for file in *bats; do 11 | shfmt -i 4 -ln bash -s "${file}" > "${tmpfile}" 12 | mv "${tmpfile}" "${file}" 13 | done 14 | rm -f "${tmpfile}" 15 | 16 | # Undo the changes to the Bats scripts in-place so that they work with Bats 17 | perl -pi -e 's/^\{\R//; s/(\@test.*$)/$1 {/' ./*.bats 18 | -------------------------------------------------------------------------------- /test/bats/sql.bats: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bats 2 | 3 | set -u 4 | 5 | setup_file() { 6 | load "../lib/setup_file.sh" 7 | } 8 | 9 | teardown_file() { 10 | load "../lib/teardown_file.sh" 11 | } 12 | 13 | setup() { 14 | load "../lib/setup.sh" 15 | load "../lib/bats-file/load.bash" 16 | ./instance-data load 17 | } 18 | 19 | #---------- 20 | 21 | @test "sql helper" { 22 | rune -0 ./instance-db exec_sql "SELECT 11235813" 23 | assert_output --partial '11235813' 24 | } 25 | -------------------------------------------------------------------------------- /test/bats/testdata/90_decisions/csv_decisions: -------------------------------------------------------------------------------- 1 | origin,scope,value,reason,type,duration 2 | cscli,ip,1.6.11.16,manual import from csv,ban,1h 3 | cscli,ip,2.7.12.17,manual import from csv,ban,1h 4 | cscli,ip,3.8.13.18,manual import from csv,ban,1h 5 | cscli,ip,4.9.14.19,manual import from csv,ban,1h 6 | cscli,ip,5.10.15.20,manual import from csv,ban,1h 7 | -------------------------------------------------------------------------------- /test/bats/testdata/90_decisions/decisions.csv: -------------------------------------------------------------------------------- 1 | origin,scope,value,reason,type,duration 2 | cscli,ip,1.6.11.16,manual import from csv,ban,1h 3 | cscli,ip,2.7.12.17,manual import from csv,ban,1h 4 | cscli,ip,3.8.13.18,manual import from csv,ban,1h 5 | cscli,ip,4.9.14.19,manual import from csv,ban,1h 6 | cscli,ip,5.10.15.20,manual import from csv,ban,1h 7 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/agent.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "localhost", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "agent-ou", 13 | "ST": "France" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/agent_invalid.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "localhost", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "this-is-not-the-ou-youre-looking-for", 13 | "ST": "France" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/bouncer.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "localhost", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "bouncer-ou", 13 | "ST": "France" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/bouncer_invalid.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "localhost", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "this-is-not-the-ou-youre-looking-for", 13 | "ST": "France" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/ca_intermediate.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "CrowdSec Test CA Intermediate", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "Crowdsec Intermediate", 13 | "ST": "France" 14 | } 15 | ], 16 | "ca": { 17 | "expiry": "42720h" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/ca_root.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "CrowdSec Test CA", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "Crowdsec", 13 | "ST": "France" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/profiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "signing": { 3 | "default": { 4 | "expiry": "8760h" 5 | }, 6 | "profiles": { 7 | "intermediate_ca": { 8 | "usages": [ 9 | "signing", 10 | "key encipherment", 11 | "cert sign", 12 | "crl sign", 13 | "server auth", 14 | "client auth" 15 | ], 16 | "expiry": "8760h", 17 | "ca_constraint": { 18 | "is_ca": true, 19 | "max_path_len": 0, 20 | "max_path_len_zero": true 21 | } 22 | }, 23 | "server": { 24 | "usages": [ 25 | "server auth" 26 | ], 27 | "expiry": "8760h" 28 | }, 29 | "client": { 30 | "usages": [ 31 | "client auth" 32 | ], 33 | "expiry": "8760h" 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test/bats/testdata/cfssl/server.json: -------------------------------------------------------------------------------- 1 | { 2 | "CN": "localhost", 3 | "key": { 4 | "algo": "rsa", 5 | "size": 2048 6 | }, 7 | "names": [ 8 | { 9 | "C": "FR", 10 | "L": "Paris", 11 | "O": "Crowdsec", 12 | "OU": "Crowdsec Server", 13 | "ST": "France" 14 | } 15 | ], 16 | "hosts": [ 17 | "127.0.0.1", 18 | "localhost" 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /test/bats/testdata/explain/explain-log.txt: -------------------------------------------------------------------------------- 1 | line: Sep 19 18:33:22 scw-d95986 sshd[24347]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=1.2.3.4 2 | ├ s00-raw 3 | | └ 🟢 crowdsecurity/syslog-logs (+12 ~9) 4 | ├ s01-parse 5 | | └ 🟢 crowdsecurity/sshd-logs (+8) 6 | ├-------- parser success 🟢 7 | ├ Scenarios 8 | ├ 🟢 crowdsecurity/ssh-bf 9 | ├ 🟢 crowdsecurity/ssh-bf_user-enum 10 | ├ 🟢 crowdsecurity/ssh-slow-bf 11 | └ 🟢 crowdsecurity/ssh-slow-bf_user-enum 12 | -------------------------------------------------------------------------------- /test/bin/assert-crowdsec-not-running: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | is_crowdsec_running() { 4 | case $(uname) in 5 | "Linux") 6 | # ignore processes in containers 7 | PIDS=$(pgrep --ns $$ -x 'crowdsec') 8 | ;; 9 | *) 10 | PIDS=$(pgrep -x 'crowdsec') 11 | ;; 12 | esac 13 | } 14 | 15 | # The process can be slow, especially on CI and during test coverage. 16 | # Give it some time, maybe it's quitting soon. 17 | for _i in {1..10}; do 18 | is_crowdsec_running || exit 0 19 | sleep .5 20 | done 21 | 22 | PIDS=$(echo "${PIDS}" | tr '\n' ' ') 23 | msg="CrowdSec is already running (PID ${PIDS}). Please terminate it and run the tests again." 24 | 25 | # Are we inside a setup() or @test? Is file descriptor 3 open? 26 | if { true >&3; } 2>/dev/null; then 27 | echo "${msg}" >&3 28 | else 29 | echo "${msg}" >&2 30 | fi 31 | 32 | # cause the calling setup() or @test to fail 33 | exit 1 34 | -------------------------------------------------------------------------------- /test/bin/remove-all-hub-items: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eu 4 | 5 | # shellcheck disable=SC1007 6 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 7 | # shellcheck disable=SC1091 8 | . "${THIS_DIR}/../.environment.sh" 9 | 10 | # pre-download everything but don't install anything 11 | 12 | echo "Pre-downloading Hub content..." 13 | 14 | types=$("$CSCLI" hub types -o raw) 15 | 16 | for itemtype in $types; do 17 | "$CSCLI" "$itemtype" remove --all --force --purge 18 | done 19 | 20 | echo " done." 21 | -------------------------------------------------------------------------------- /test/coverage/.do-not-remove: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/test/coverage/.do-not-remove -------------------------------------------------------------------------------- /test/disable-capi: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck disable=SC1007 4 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 5 | # shellcheck disable=SC1091 6 | . "${THIS_DIR}/.environment.sh" 7 | 8 | yq e 'del(.api.server.online_client)' -i "$CONFIG_YAML" 9 | -------------------------------------------------------------------------------- /test/enable-capi: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck disable=SC1007 4 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 5 | # shellcheck disable=SC1091 6 | . "${THIS_DIR}/.environment.sh" 7 | 8 | online_api_credentials="$(dirname "$CONFIG_YAML")/online_api_credentials.yaml" 9 | export online_api_credentials 10 | 11 | yq e '.api.server.online_client.credentials_path=strenv(online_api_credentials)' -i "$CONFIG_YAML" 12 | -------------------------------------------------------------------------------- /test/instance-crowdsec: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #shellcheck disable=SC1007 4 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 5 | cd "$THIS_DIR" || exit 1 6 | # shellcheck disable=SC1091 7 | . ./.environment.sh 8 | 9 | backend_script="./lib/init/crowdsec-${INIT_BACKEND}" 10 | 11 | if [[ ! -x "$backend_script" ]]; then 12 | echo "unknown init system '${INIT_BACKEND}'" >&2 13 | exit 1 14 | fi 15 | 16 | exec "$backend_script" "$@" 17 | -------------------------------------------------------------------------------- /test/instance-data: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eu 4 | 5 | die() { 6 | echo >&2 "$@" 7 | exit 1 8 | } 9 | 10 | #shellcheck disable=SC1007 11 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 12 | cd "$THIS_DIR" || exit 1 13 | # shellcheck disable=SC1091 14 | . ./.environment.sh 15 | 16 | if [[ -f "$LOCAL_INIT_DIR/.lock" ]] && [[ "$1" != "unlock" ]]; then 17 | die "init data is locked: are you doing some manual test? if so, please finish what you are doing, run 'instance-data unlock' and retry" 18 | fi 19 | 20 | backend_script="./lib/config/config-${CONFIG_BACKEND}" 21 | 22 | if [[ ! -x "$backend_script" ]]; then 23 | die "unknown config backend '${CONFIG_BACKEND}'" 24 | fi 25 | 26 | exec "$backend_script" "$@" 27 | -------------------------------------------------------------------------------- /test/instance-db: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #shellcheck disable=SC1007 4 | THIS_DIR=$(CDPATH= cd -- "$(dirname -- "$0")" && pwd) 5 | cd "$THIS_DIR" || exit 1 6 | # shellcheck disable=SC1091 7 | . ./.environment.sh 8 | 9 | ./bin/assert-crowdsec-not-running 10 | 11 | backend_script="./lib/db/instance-${DB_BACKEND}" 12 | 13 | if [[ ! -x "$backend_script" ]]; then 14 | echo "unknown database '${DB_BACKEND}'" >&2 15 | exit 1 16 | fi 17 | 18 | exec "$backend_script" "$@" 19 | -------------------------------------------------------------------------------- /test/lib/db/instance-pgx: -------------------------------------------------------------------------------- 1 | instance-postgres -------------------------------------------------------------------------------- /test/lib/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # these plugins are always available 4 | 5 | load "../lib/bats-support/load.bash" 6 | load "../lib/bats-assert/load.bash" 7 | #load "../lib/bats-file/load.bash" 8 | 9 | # mark the start of each test in the logs, beware crowdsec might be running 10 | # echo "time=\"$(date +"%d-%m-%Y %H:%M:%S")\" level=info msg=\"TEST: ${BATS_TEST_DESCRIPTION}\"" >> /var/log/crowdsec.log 11 | 12 | export CROWDSEC_FEATURE_DISABLE_HTTP_RETRY_BACKOFF=true -------------------------------------------------------------------------------- /test/lib/teardown_file.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # any stdout, stderr from now on will go to &3 4 | eval "$(debug)" 5 | 6 | # ensure we don't leave crowdsec running if tests are broken or interrupted 7 | ./instance-crowdsec stop 8 | 9 | -------------------------------------------------------------------------------- /test/localstack/scripts/init_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Create Kinesis streams 4 | aws --endpoint-url=http://localstack:4566 --region us-east-1 kinesis create-stream --stream-name stream-1-shard --shard-count 1 5 | aws --endpoint-url=http://localstack:4566 --region us-east-1 kinesis create-stream --stream-name stream-2-shards --shard-count 2 6 | 7 | -------------------------------------------------------------------------------- /test/tools/.do-not-remove: -------------------------------------------------------------------------------- 1 | this directory is populated by test dependencies, and is not checked in git 2 | -------------------------------------------------------------------------------- /windows/Chocolatey/crowdsec/tools/VERIFICATION.txt: -------------------------------------------------------------------------------- 1 |  2 | 3 | VERIFICATION 4 | Verification is intended to assist the Chocolatey moderators and community 5 | in verifying that this package's contents are trustworthy. 6 | 7 | This package is published by CrowdSecurity itself. The MSI is identical to the one published in the github releases for the project. 8 | You can download the MSI from the latest release or pre-release here: https://github.com/crowdsecurity/crowdsec/releases 9 | The MSI is also digitally signed. -------------------------------------------------------------------------------- /windows/Chocolatey/crowdsec/tools/chocolateybeforemodify.ps1: -------------------------------------------------------------------------------- 1 | Stop-Service crowdsec -------------------------------------------------------------------------------- /windows/Chocolatey/crowdsec/tools/chocolateyinstall.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop'; 2 | $toolsDir = "$(Split-Path -parent $MyInvocation.MyCommand.Definition)" 3 | $fileLocation = Join-Path $toolsDir 'crowdsec.msi' 4 | 5 | $silentArgs = "/qn /norestart /l*v `"$($env:TEMP)\$($packageName).$($env:chocolateyPackageVersion).MsiInstall.log`"" 6 | 7 | 8 | $pp = Get-PackageParameters 9 | 10 | if ($pp['AgentOnly']) { 11 | $silentArgs += " AGENT_ONLY=1" 12 | } 13 | 14 | 15 | $packageArgs = @{ 16 | packageName = $env:ChocolateyPackageName 17 | unzipLocation = $toolsDir 18 | fileType = 'msi' 19 | file64 = $fileLocation 20 | softwareName = 'Crowdsec' 21 | silentArgs = $silentArgs 22 | validExitCodes= @(0, 3010, 1641) 23 | } 24 | 25 | Install-ChocolateyInstallPackage @packageArgs 26 | 27 | if ($pp['AgentOnly']) { 28 | Write-Host "/AgentOnly was specified. LAPI is disabled, please register your agent manually and configure the service to start on boot." 29 | } -------------------------------------------------------------------------------- /windows/install_dev_windows.ps1: -------------------------------------------------------------------------------- 1 | #install choco 2 | Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1')) 3 | choco install -y golang 4 | choco install -y git 5 | choco install -y mingw 6 | refreshenv 7 | -------------------------------------------------------------------------------- /windows/install_installer_windows.ps1: -------------------------------------------------------------------------------- 1 | choco install -y wixtoolset 2 | $env:Path += ";C:\Program Files (x86)\WiX Toolset v3.11\bin" -------------------------------------------------------------------------------- /windows/installer/crowdsec_icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/windows/installer/crowdsec_icon.ico -------------------------------------------------------------------------------- /windows/installer/crowdsec_msi_top_banner.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/windows/installer/crowdsec_msi_top_banner.bmp -------------------------------------------------------------------------------- /windows/installer/installer_dialog.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crowdsecurity/crowdsec/1cede239d8465b0549d10a3a44aab7b96a2cd743/windows/installer/installer_dialog.bmp --------------------------------------------------------------------------------