├── .github ├── CONTRIBUTING.rst ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── ci.yml ├── .gitignore ├── .pylintrc ├── AUTHORS.rst ├── LICENSE ├── README.rst ├── Vagrantfile ├── conf ├── clusters │ └── prod.json ├── global.json ├── lambda.json ├── lookup_tables.json ├── normalized_types.json ├── outputs.json ├── scheduled_queries.json ├── schemas │ ├── aliyun.json │ ├── binaryalert.json │ ├── box.json │ ├── carbonblack.json │ ├── cloudtrail.json │ ├── cloudwatch.json │ ├── duo.json │ ├── fleet.json │ ├── ghe.json │ ├── gsuite.json │ ├── okta.json │ ├── onelogin.json │ ├── osquery.json │ ├── packetbeat.json │ ├── pan.json │ ├── slack.json │ ├── streamquery.json │ └── trendmicro.json └── threat_intel.json ├── constraints.txt ├── docs ├── Makefile ├── images │ ├── artifacts.png │ ├── athena-alerts-search.png │ ├── athena-data-search.png │ ├── cloudwatch_events.png │ ├── historical-search.png │ ├── join_search.png │ ├── normalization-arch.png │ ├── sa-banner.png │ ├── sa-complete-arch.png │ ├── sa-high-level-arch.png │ └── sa-square-logo-standalone.png ├── make.bat └── source │ ├── alternatives.rst │ ├── apps.rst │ ├── architecture.rst │ ├── conf-schemas-examples.rst │ ├── conf.py │ ├── config-clusters.rst │ ├── config-global.rst │ ├── config-schemas.rst │ ├── datasources.rst │ ├── datatypes.rst │ ├── deployment.rst │ ├── dynamic-outputs.rst │ ├── faq.rst │ ├── getting-started.rst │ ├── historical-search.rst │ ├── index.rst │ ├── lookup-tables.rst │ ├── metrics.rst │ ├── normalization.rst │ ├── outputs.rst │ ├── publishers.rst │ ├── rule-promotion.rst │ ├── rule-staging.rst │ ├── rules.rst │ ├── scheduled-queries.rst │ ├── testing.rst │ └── troubleshooting.rst ├── manage.py ├── matchers ├── __init__.py └── default.py ├── publishers ├── __init__.py ├── community │ ├── __init__.py │ ├── generic.py │ ├── pagerduty │ │ ├── __init__.py │ │ └── pagerduty_layout.py │ └── slack │ │ ├── __init__.py │ │ └── slack_layout.py └── sample │ ├── __init__.py │ └── sample_demisto.py ├── requirements-top-level.txt ├── requirements.txt ├── rules ├── __init__.py ├── classifier │ ├── aliyun │ │ └── aliyun_actiontrail.json │ ├── box │ │ └── box_admin_events.json │ ├── cloudtrail │ │ ├── cloudtrail_events.json │ │ └── cloudtrail_insights.json │ ├── cloudwatch │ │ ├── cloudwatch_cloudtrail.json │ │ ├── cloudwatch_control_message.json │ │ └── cloudwatch_rds_aurora.json │ ├── gsuite │ │ └── gsuite_reports.json │ ├── osquery │ │ └── osquery_snapshot.json │ ├── packbeat │ │ ├── packetbeat_dns.json │ │ └── packetbeat_flow.json │ └── slack │ │ ├── slack_access.json │ │ └── slack_integration.json ├── community │ ├── __init__.py │ ├── binaryalert │ │ ├── __init__.py │ │ ├── binaryalert_yara_match.json │ │ └── binaryalert_yara_match.py │ ├── cloudtrail │ │ ├── cloudtrail_aws_config.json │ │ └── cloudtrail_aws_config.py │ ├── cloudwatch_events │ │ ├── __init__.py │ │ ├── cloudtrail_critical_api_calls.json │ │ ├── cloudtrail_critical_api_calls.py │ │ ├── cloudtrail_ec2_image_creation.json │ │ ├── cloudtrail_ec2_image_creation.py │ │ ├── cloudtrail_mfa_policy_abuse_attempt.json │ │ ├── cloudtrail_mfa_policy_abuse_attempt.py │ │ ├── cloudtrail_network_acl_ingress_anywhere.json │ │ ├── cloudtrail_network_acl_ingress_anywhere.py │ │ ├── cloudtrail_public_resources.json │ │ ├── cloudtrail_public_resources.py │ │ ├── cloudtrail_put_bucket_acl.json │ │ ├── cloudtrail_put_bucket_acl.py │ │ ├── cloudtrail_put_object_acl_public.json │ │ ├── cloudtrail_put_object_acl_public.py │ │ ├── cloudtrail_put_object_acl_public_publisher_example.json │ │ ├── cloudtrail_put_object_acl_public_publisher_example.py │ │ ├── cloudtrail_root_account_usage.json │ │ ├── cloudtrail_root_account_usage.py │ │ ├── cloudtrail_security_group_ingress_anywhere.json │ │ ├── cloudtrail_security_group_ingress_anywhere.py │ │ ├── cloudtrail_snapshot_or_ami_made_public.json │ │ └── cloudtrail_snapshot_or_ami_made_public.py │ ├── duo_administrator │ │ ├── __init__.py │ │ ├── duo_bypass_code_create_non_auto_generated.json │ │ ├── duo_bypass_code_create_non_auto_generated.py │ │ ├── duo_bypass_code_create_non_expiring.json │ │ ├── duo_bypass_code_create_non_expiring.py │ │ ├── duo_bypass_code_create_unlimited_use.json │ │ └── duo_bypass_code_create_unlimited_use.py │ ├── duo_authentication │ │ ├── __init__.py │ │ ├── duo_anonymous_ip_failure.json │ │ ├── duo_anonymous_ip_failure.py │ │ ├── duo_fraud.json │ │ ├── duo_fraud.py │ │ ├── duo_lookup_tables_example.json │ │ └── duo_lookup_tables_example.py │ ├── fleet │ │ ├── __init__.py │ │ ├── fleet_bad_action.json │ │ └── fleet_bad_action.py │ ├── github │ │ ├── __init__.py │ │ ├── github_disable_dismiss_stale_pull_request_approvals.json │ │ ├── github_disable_dismiss_stale_pull_request_approvals.py │ │ ├── github_disable_protect_this_branch.json │ │ ├── github_disable_protect_this_branch.py │ │ ├── github_disable_required_pull_request_reviews.json │ │ ├── github_disable_required_pull_request_reviews.py │ │ ├── github_disable_required_status_checks.json │ │ ├── github_disable_required_status_checks.py │ │ ├── github_disable_two_factor_requirement_org.json │ │ ├── github_disable_two_factor_requirement_org.py │ │ ├── github_disable_two_factor_requirement_user.json │ │ ├── github_disable_two_factor_requirement_user.py │ │ ├── github_oauth_application_create.json │ │ ├── github_oauth_application_create.py │ │ ├── github_site_admin_action.json │ │ ├── github_site_admin_action.py │ │ ├── github_site_admin_user_promotion.json │ │ └── github_site_admin_user_promotion.py │ ├── guardduty │ │ ├── __init__.py │ │ ├── guard_duty_all.json │ │ └── guard_duty_all.py │ ├── mitre_attack │ │ ├── __init__.py │ │ └── defense_evasion │ │ │ ├── __init__.py │ │ │ └── multi │ │ │ ├── __init__.py │ │ │ └── obfuscated_files_or_information │ │ │ ├── __init__.py │ │ │ ├── right_to_left_character.json │ │ │ └── right_to_left_character.py │ ├── okta │ │ ├── __init__.py │ │ └── okta_new_login.json │ ├── onelogin │ │ ├── __init__.py │ │ ├── onelogin_events_assumed_role.json │ │ ├── onelogin_events_assumed_role.py │ │ ├── onelogin_events_threat_intel_example.json │ │ └── onelogin_events_threat_intel_example.py │ ├── osquery │ │ ├── __init__.py │ │ ├── ssh_login_activity.json │ │ └── ssh_login_activity.py │ ├── packetbeat │ │ ├── __init__.py │ │ ├── packetbeat_blacklisted_domain.json │ │ ├── packetbeat_blacklisted_domain.py │ │ ├── packetbeat_blacklisted_ip.json │ │ ├── packetbeat_blacklisted_ip.py │ │ ├── packetbeat_dns_lookup.json │ │ └── packetbeat_dns_lookup.py │ └── trendmicro │ │ ├── __init__.py │ │ ├── trendmicro_malware_event.json │ │ └── trendmicro_malware_event.py ├── default │ └── __init__.py ├── helpers │ ├── __init__.py │ └── base.py └── sample │ ├── __init__.py │ ├── sample_demisto.json │ └── sample_demisto.py ├── scheduled_queries ├── __init__.py └── sample │ ├── __init__.py │ └── athena.py ├── setup.cfg ├── streamalert ├── __init__.py ├── alert_merger │ ├── __init__.py │ └── main.py ├── alert_processor │ ├── __init__.py │ ├── helpers.py │ ├── main.py │ └── outputs │ │ ├── __init__.py │ │ ├── aws.py │ │ ├── carbonblack.py │ │ ├── credentials │ │ ├── __init__.py │ │ └── provider.py │ │ ├── demisto.py │ │ ├── github.py │ │ ├── jira.py │ │ ├── jira_v2.py │ │ ├── komand.py │ │ ├── output_base.py │ │ ├── pagerduty.py │ │ ├── phantom.py │ │ ├── slack.py │ │ ├── teams.py │ │ └── victorops.py ├── apps │ ├── __init__.py │ ├── _apps │ │ ├── README.rst │ │ ├── __init__.py │ │ ├── aliyun.py │ │ ├── box.py │ │ ├── duo.py │ │ ├── gsuite.py │ │ ├── intercom.py │ │ ├── onelogin.py │ │ ├── salesforce.py │ │ └── slack.py │ ├── app_base.py │ ├── batcher.py │ ├── config.py │ ├── exceptions.py │ └── main.py ├── athena_partitioner │ ├── __init__.py │ └── main.py ├── classifier │ ├── __init__.py │ ├── classifier.py │ ├── clients │ │ ├── __init__.py │ │ └── sqs.py │ ├── main.py │ ├── parsers.py │ └── payload │ │ ├── __init__.py │ │ ├── apps.py │ │ ├── kinesis.py │ │ ├── payload_base.py │ │ ├── s3.py │ │ └── sns.py ├── rule_promotion │ ├── __init__.py │ ├── main.py │ ├── promoter.py │ ├── publisher.py │ └── statistic.py ├── rules_engine │ ├── __init__.py │ ├── alert_forwarder.py │ ├── main.py │ ├── rules_engine.py │ └── threat_intel.py ├── scheduled_queries │ ├── __init__.py │ ├── command │ │ ├── __init__.py │ │ ├── application.py │ │ └── processor.py │ ├── config │ │ ├── __init__.py │ │ ├── lambda_conf.py │ │ └── services.py │ ├── container │ │ ├── __init__.py │ │ └── container.py │ ├── handlers │ │ ├── __init__.py │ │ └── athena.py │ ├── main.py │ ├── query_packs │ │ ├── __init__.py │ │ ├── configuration.py │ │ ├── manager.py │ │ └── parameters.py │ ├── state │ │ ├── __init__.py │ │ └── state_manager.py │ ├── streamalert │ │ ├── __init__.py │ │ └── kinesis.py │ └── support │ │ ├── __init__.py │ │ └── clock.py ├── shared │ ├── __init__.py │ ├── alert.py │ ├── alert_table.py │ ├── artifact_extractor.py │ ├── athena.py │ ├── backoff_handlers.py │ ├── config.py │ ├── description.py │ ├── exceptions.py │ ├── firehose.py │ ├── helpers │ │ ├── __init__.py │ │ ├── aws_api_client.py │ │ ├── boto.py │ │ └── dynamodb.py │ ├── importer.py │ ├── logger.py │ ├── lookup_tables │ │ ├── __init__.py │ │ ├── cache.py │ │ ├── configuration.py │ │ ├── core.py │ │ ├── driver_dynamodb.py │ │ ├── driver_s3.py │ │ ├── drivers.py │ │ ├── drivers_factory.py │ │ ├── errors.py │ │ ├── table.py │ │ └── utils.py │ ├── metrics.py │ ├── normalize.py │ ├── publisher.py │ ├── resources.py │ ├── rule.py │ ├── rule_table.py │ ├── stats.py │ └── utils.py └── threat_intel_downloader │ ├── __init__.py │ ├── exceptions.py │ └── main.py ├── streamalert_cli ├── __init__.py ├── _infrastructure │ ├── _include.tf │ ├── _providers.tf │ ├── _variables.tf │ └── modules │ │ ├── tf_alert_merger_iam │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_alert_processor_iam │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_app_iam │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_artifact_extractor │ │ ├── iam.tf │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_athena │ │ ├── README.md │ │ ├── iam.tf │ │ ├── kms.tf │ │ ├── main.tf │ │ ├── outputs.tf │ │ └── variables.tf │ │ ├── tf_classifier │ │ ├── README.md │ │ ├── firehose.tf │ │ ├── iam.tf │ │ ├── sns.tf │ │ └── variables.tf │ │ ├── tf_cloudtrail │ │ ├── README.md │ │ ├── main.tf │ │ ├── modules │ │ │ └── tf_cloudtrail_cloudwatch │ │ │ │ ├── README.md │ │ │ │ ├── main.tf │ │ │ │ ├── output.tf │ │ │ │ └── variables.tf │ │ └── variables.tf │ │ ├── tf_cloudwatch_events │ │ ├── README.md │ │ ├── cross_account │ │ │ ├── README.md │ │ │ ├── main.tf │ │ │ └── variables.tf │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_cloudwatch_logs_destination │ │ ├── README.md │ │ ├── iam.tf │ │ ├── modules │ │ │ └── destination │ │ │ │ ├── iam.tf │ │ │ │ ├── main.tf │ │ │ │ ├── output.tf │ │ │ │ └── variables.tf │ │ ├── output.tf │ │ └── variables.tf │ │ ├── tf_flow_logs │ │ ├── README.md │ │ ├── iam.tf │ │ ├── main.tf │ │ ├── output.tf │ │ └── variables.tf │ │ ├── tf_globals │ │ ├── README.md │ │ ├── alerts_firehose │ │ │ ├── iam.tf │ │ │ ├── main.tf │ │ │ └── variables.tf │ │ ├── classifier_queue │ │ │ ├── iam.tf │ │ │ ├── kms.tf │ │ │ ├── output.tf │ │ │ ├── sqs.tf │ │ │ └── variables.tf │ │ ├── lambda_layers │ │ │ ├── README.rst │ │ │ ├── aliyun-python-sdk-actiontrail==2.0.0_dependencies.zip │ │ │ └── boxsdk[jwt]==2.9.0_dependencies.zip │ │ ├── main.tf │ │ ├── output.tf │ │ └── variables.tf │ │ ├── tf_kinesis_events │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_kinesis_firehose_delivery_stream │ │ ├── README.md │ │ ├── main.tf │ │ ├── outputs.tf │ │ └── variables.tf │ │ ├── tf_kinesis_firehose_setup │ │ ├── README.md │ │ ├── iam.tf │ │ ├── main.tf │ │ ├── outputs.tf │ │ └── variables.tf │ │ ├── tf_kinesis_streams │ │ ├── README.md │ │ ├── iam.tf │ │ ├── main.tf │ │ ├── outputs.tf │ │ └── variables.tf │ │ ├── tf_lambda │ │ ├── README.md │ │ ├── cloudwatch.tf │ │ ├── iam.tf │ │ ├── main.tf │ │ ├── output.tf │ │ └── variables.tf │ │ ├── tf_lookup_tables_dynamodb │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_lookup_tables_policy │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_lookup_tables_s3 │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_metric_alarms │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_metric_filters │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_monitoring │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_rule_promotion_iam │ │ ├── README.md │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_rules_engine │ │ ├── README.md │ │ ├── iam.tf │ │ ├── lambda.tf │ │ └── variables.tf │ │ ├── tf_s3_events │ │ ├── main.tf │ │ └── variables.tf │ │ ├── tf_scheduled_queries │ │ ├── cloudwatch_schedule.tf │ │ ├── iam_roles.tf │ │ ├── lambda.tf │ │ ├── outputs.tf │ │ ├── step_function.tf │ │ └── variables.tf │ │ └── tf_threat_intel_downloader │ │ ├── README.md │ │ ├── dynamodb.tf │ │ ├── iam.tf │ │ └── variables.tf ├── apps │ ├── __init__.py │ ├── handler.py │ └── helpers.py ├── athena │ ├── __init__.py │ ├── handler.py │ └── helpers.py ├── config.py ├── configure │ ├── __init__.py │ └── handler.py ├── helpers.py ├── kinesis │ ├── __init__.py │ └── handler.py ├── logger.py ├── lookup_tables │ ├── __init__.py │ └── handler.py ├── manage_lambda │ ├── __init__.py │ ├── deploy.py │ ├── package.py │ └── rollback.py ├── metrics_alarms │ ├── __init__.py │ └── handler.py ├── outputs │ ├── __init__.py │ ├── handler.py │ └── helpers.py ├── rule_table.py ├── runner.py ├── status │ ├── __init__.py │ └── handler.py ├── terraform │ ├── __init__.py │ ├── alert_merger.py │ ├── alert_processor.py │ ├── apps.py │ ├── artifact_extractor.py │ ├── athena.py │ ├── classifier.py │ ├── cloudtrail.py │ ├── cloudwatch_destinations.py │ ├── cloudwatch_events.py │ ├── common.py │ ├── firehose.py │ ├── flow_logs.py │ ├── generate.py │ ├── handlers.py │ ├── helpers.py │ ├── kinesis_events.py │ ├── kinesis_streams.py │ ├── lambda_module.py │ ├── metrics.py │ ├── monitoring.py │ ├── rule_promotion.py │ ├── rules_engine.py │ ├── s3_events.py │ ├── scheduled_queries.py │ └── threat_intel_downloader.py ├── test │ ├── __init__.py │ ├── event.py │ ├── event_file.py │ ├── format.py │ ├── handler.py │ ├── mocks.py │ └── results.py ├── threat_intel │ ├── __init__.py │ └── handler.py ├── threat_intel_downloader │ ├── __init__.py │ └── handler.py └── utils.py ├── tests ├── __init__.py ├── scripts │ ├── autoflake.sh │ ├── autopep8.sh │ ├── covreport.sh │ ├── pylint.sh │ ├── rule_test.sh │ ├── sort_configs.py │ ├── test_the_docs.sh │ ├── unit_tests.sh │ └── update_reqs.sh └── unit │ ├── __init__.py │ ├── conf │ ├── clusters │ │ ├── advanced.json │ │ ├── test.json │ │ └── trusted.json │ ├── global.json │ ├── lambda.json │ ├── logs.json │ ├── lookup_tables.json │ ├── normalized_types.json │ ├── outputs.json │ ├── scheduled_queries.json │ └── threat_intel.json │ ├── conf_athena │ ├── clusters │ │ └── test.json │ ├── global.json │ ├── lambda.json │ └── schemas │ │ └── unit_test_schemas.json │ ├── helpers │ ├── __init__.py │ ├── aws_mocks.py │ ├── base.py │ ├── config.py │ └── mocks.py │ ├── streamalert │ ├── __init__.py │ ├── alert_merger │ │ ├── __init__.py │ │ └── test_main.py │ ├── alert_processor │ │ ├── __init__.py │ │ ├── helpers.py │ │ ├── outputs │ │ │ ├── __init__.py │ │ │ ├── credentials │ │ │ │ ├── __init__.py │ │ │ │ └── test_provider.py │ │ │ ├── test_aws.py │ │ │ ├── test_carbonblack.py │ │ │ ├── test_demisto.py │ │ │ ├── test_github.py │ │ │ ├── test_jira.py │ │ │ ├── test_jira_v2.py │ │ │ ├── test_komand.py │ │ │ ├── test_output_base.py │ │ │ ├── test_pagerduty.py │ │ │ ├── test_phantom.py │ │ │ ├── test_slack.py │ │ │ └── test_teams.py │ │ ├── test_helpers.py │ │ └── test_main.py │ ├── apps │ │ ├── __init__.py │ │ ├── test_apps │ │ │ ├── __init__.py │ │ │ ├── test_aliyun.py │ │ │ ├── test_app_base.py │ │ │ ├── test_box.py │ │ │ ├── test_duo.py │ │ │ ├── test_gsuite.py │ │ │ ├── test_intercom.py │ │ │ ├── test_onelogin.py │ │ │ ├── test_salesforce.py │ │ │ └── test_slack.py │ │ ├── test_batcher.py │ │ ├── test_config.py │ │ ├── test_helpers.py │ │ └── test_main.py │ ├── athena_partitioner │ │ ├── __init__.py │ │ └── test_main.py │ ├── classifier │ │ ├── __init__.py │ │ ├── clients │ │ │ ├── __init__.py │ │ │ └── test_sqs.py │ │ ├── payload │ │ │ ├── __init__.py │ │ │ ├── test_payload_apps.py │ │ │ ├── test_payload_base.py │ │ │ ├── test_payload_kinesis.py │ │ │ ├── test_payload_record.py │ │ │ ├── test_payload_s3.py │ │ │ └── test_payload_sns.py │ │ ├── test_classifier.py │ │ ├── test_parsers_base.py │ │ ├── test_parsers_csv.py │ │ ├── test_parsers_json.py │ │ ├── test_parsers_kv.py │ │ └── test_parsers_syslog.py │ ├── rule_promotion │ │ ├── __init__.py │ │ ├── test_promoter.py │ │ ├── test_publisher.py │ │ └── test_statistic.py │ ├── rules_engine │ │ ├── __init__.py │ │ ├── test_alerter.py │ │ ├── test_rules_engine.py │ │ └── test_threat_intel.py │ ├── scheduled_queries │ │ ├── __init__.py │ │ ├── command │ │ │ ├── __init__.py │ │ │ └── test_processor.py │ │ ├── container │ │ │ ├── __init__.py │ │ │ └── test_container.py │ │ ├── handlers │ │ │ ├── __init__.py │ │ │ └── test_athena.py │ │ ├── query_packs │ │ │ ├── __init__.py │ │ │ ├── test_configuration.py │ │ │ └── test_manager.py │ │ ├── state │ │ │ ├── .gitignore │ │ │ ├── __init__.py │ │ │ └── test_state_manager.py │ │ └── streamalert │ │ │ ├── __init__.py │ │ │ └── test_kinesis.py │ ├── shared │ │ ├── __init__.py │ │ ├── lookup_tables │ │ │ ├── __init__.py │ │ │ ├── test_cache.py │ │ │ ├── test_configuration.py │ │ │ ├── test_core.py │ │ │ ├── test_driver.py │ │ │ ├── test_driver_dynamodb.py │ │ │ ├── test_driver_s3.py │ │ │ └── test_table.py │ │ ├── test_alert.py │ │ ├── test_alert_table.py │ │ ├── test_artifact_extractor.py │ │ ├── test_athena.py │ │ ├── test_aws_api_client.py │ │ ├── test_backoff_handlers.py │ │ ├── test_config.py │ │ ├── test_description.py │ │ ├── test_firehose.py │ │ ├── test_importer.py │ │ ├── test_logger.py │ │ ├── test_metrics.py │ │ ├── test_normalizer.py │ │ ├── test_publisher.py │ │ ├── test_resources.py │ │ ├── test_rule.py │ │ ├── test_rule_table.py │ │ ├── test_stats.py │ │ └── test_utils.py │ └── threat_intel_downloader │ │ ├── __init__.py │ │ ├── test_helpers.py │ │ └── test_main.py │ └── streamalert_cli │ ├── __init__.py │ ├── athena │ ├── __init__.py │ ├── test_handler.py │ └── test_helpers.py │ ├── manage_lambda │ ├── __init__.py │ ├── test_deploy.py │ ├── test_package.py │ └── test_rollback.py │ ├── terraform │ ├── __init__.py │ ├── test_alert_processor.py │ ├── test_artifact_extractor.py │ ├── test_athena.py │ ├── test_firehose.py │ ├── test_generate.py │ ├── test_generate_classifier.py │ ├── test_generate_rules_engine.py │ ├── test_handlers.py │ ├── test_kinesis_events.py │ ├── test_kinesis_streams.py │ ├── test_monitoring.py │ ├── test_rule_promotion.py │ ├── test_s3_events.py │ └── test_scheduled_queries.py │ ├── test │ ├── __init__.py │ ├── helpers.py │ ├── test_event.py │ ├── test_event_file.py │ └── test_handler.py │ ├── test_cli_config.py │ └── test_helpers.py └── vagrant └── cli ├── python-virtualenvwrapper ├── configure.sh └── install.sh ├── streamalert ├── configure.sh ├── install.sh └── sshd_config └── terraform └── install.sh /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Background 2 | 3 | NOTE: Before filing this issue, please consider the following: 4 | 5 | Have you tried pinging us on Slack? 6 | https://streamalert.herokuapp.com/ 7 | 8 | Are you on the latest version of StreamAlert? 9 | 10 | ### Description 11 | 12 | ### Steps to Reproduce 13 | 14 | ## Desired Change 15 | 16 | High level overview of the desired change or outcome. 17 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | to: 2 | cc: @airbnb/streamalert-maintainers 3 | related to: 4 | resolves: 5 | 6 | ## Background 7 | 8 | Reason for the change 9 | 10 | ## Changes 11 | 12 | * Summary of changes 13 | * ... 14 | 15 | ## Testing 16 | 17 | Steps for how this change was tested and verified 18 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Actions CI 2 | on: 3 | pull_request: {} 4 | push: 5 | branches: 6 | - master 7 | - release-3-2-0 8 | tags: 9 | - 'v*.*.*' 10 | 11 | jobs: 12 | testing: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python: 17 | - 3.7 18 | task: 19 | - name: Lint 20 | command: | 21 | ./tests/scripts/pylint.sh 22 | - name: Test 23 | command: | 24 | ./tests/scripts/unit_tests.sh 25 | ./manage.py test rules 26 | ./manage.py test classifier 27 | - name: Docs 28 | command: | 29 | sphinx-build -W docs/source docs/build 30 | - name: Bandit 31 | command: | 32 | bandit --ini setup.cfg -r . 33 | name: "Python ${{ matrix.python }}/${{ matrix.task.name }}" 34 | steps: 35 | - uses: "actions/checkout@v2" 36 | - uses: "actions/setup-python@v1" 37 | with: 38 | python-version: ${{ matrix.python }} 39 | - name: Install requirements 40 | run: pip install -r requirements.txt 41 | - name: ${{ matrix.task.name }} 42 | run: ${{ matrix.task.command }} 43 | - name: Submit Coverage 44 | run: ([ -z "$COVERALLS_REPO_TOKEN" ] && echo "coveralls is skipped in forked repo tests" || coveralls) 45 | if: matrix.task.name == 'Test' 46 | env: 47 | COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Sphinx build directory 2 | docs/build 3 | 4 | # Compiled Python files 5 | *.pyc 6 | __pycache__ 7 | 8 | # Terraform files 9 | streamalert_cli/_infrastructure/.terraform/ 10 | streamalert_cli/_infrastructure/terraform.tfvars 11 | streamalert_cli/_infrastructure/terraform.tfstate* 12 | streamalert_cli/_infrastructure/*.zip 13 | streamalert_cli/_infrastructure/*.tf.json 14 | 15 | # Coveralls repo token 16 | .coveralls.yml 17 | 18 | # nose coverage file 19 | .coverage 20 | 21 | # virtualenv files in root of repo 22 | /venv 23 | 24 | # Vagrant VM artifacts 25 | ubuntu*.log 26 | /.vagrant 27 | 28 | # Coverage artifacts 29 | /htmlcov 30 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | Current Maintainers: 2 | 3 | * `Ryan Deivert `_ 4 | * `Chunyong Lin `_ 5 | * `Blake Motl `_ 6 | * `Derek Wang `_ 7 | 8 | Former Maintainers: 9 | 10 | * `Jamie Brim `_ 11 | * `Austin Byers `_ 12 | * `Mime Frame `_ 13 | * `Jack Naglieri `_ 14 | 15 | Contributors, in no particular order: 16 | 17 | * `javuto `_ 18 | * `javefang `_ 19 | * `0xdabbad00 `_ 20 | * `stoggi `_ 21 | * `armtash `_ 22 | * `patrickod `_ 23 | * `fusionrace `_ 24 | * `jbussing `_ 25 | * `GarretReece `_ 26 | * `ljharb `_ 27 | * `securityclippy `_ 28 | * `jack1902 `_ 29 | * `gavinelder `_ 30 | * `geremyCohen `_ 31 | * `catpham `_ 32 | * `ricardf-cmp `_ 33 | * `scoders `_ 34 | * `stoggi `_ 35 | -------------------------------------------------------------------------------- /conf/global.json: -------------------------------------------------------------------------------- 1 | { 2 | "account": { 3 | "aws_account_id": "AWS_ACCOUNT_ID_GOES_HERE", 4 | "prefix": "PREFIX_GOES_HERE", 5 | "region": "us-east-1" 6 | }, 7 | "general": { 8 | "terraform_files": [], 9 | "matcher_locations": [ 10 | "matchers" 11 | ], 12 | "rule_locations": [ 13 | "rules" 14 | ], 15 | "scheduled_query_locations": [ 16 | "scheduled_queries" 17 | ], 18 | "publisher_locations": [ 19 | "publishers" 20 | ], 21 | "third_party_libraries": [ 22 | "pathlib2==2.3.5", 23 | "policyuniverse==1.3.2.1" 24 | ] 25 | }, 26 | "infrastructure": { 27 | "alerts_table": { 28 | "read_capacity": 5, 29 | "write_capacity": 5 30 | }, 31 | "artifact_extractor": { 32 | "enabled": false, 33 | "firehose_buffer_size": 128, 34 | "firehose_buffer_interval": 900 35 | }, 36 | "firehose": { 37 | "use_prefix": true, 38 | "buffer_interval": 900, 39 | "buffer_size": 128, 40 | "enabled": false, 41 | "enabled_logs": {} 42 | }, 43 | "monitoring": {}, 44 | "rule_staging": { 45 | "cache_refresh_minutes": 10, 46 | "enabled": false, 47 | "table": { 48 | "read_capacity": 20, 49 | "write_capacity": 5 50 | } 51 | }, 52 | "classifier_sqs": { 53 | "use_prefix": true 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /conf/lookup_tables.json: -------------------------------------------------------------------------------- 1 | { 2 | "enabled": false, 3 | "tables": { 4 | "dynamo-backed-table": { 5 | "driver": "dynamodb", 6 | "table": "dynamodb-table-name", 7 | "partition_key": "partition-key", 8 | "value_key": "value-column", 9 | "cache_maximum_key_count": 3, 10 | "cache_refresh_minutes": 3, 11 | "consistent_read": false 12 | }, 13 | "s3-backed-table": { 14 | "driver": "s3", 15 | "bucket": "s3-bucket-name", 16 | "key": "file.json", 17 | "compression": false, 18 | "cache_refresh_minutes": 10 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /conf/outputs.json: -------------------------------------------------------------------------------- 1 | { 2 | "aws-lambda": { 3 | "sample-lambda": "function-name:qualifier" 4 | }, 5 | "aws-lambda-v2": [ 6 | "sample-lambda" 7 | ], 8 | "aws-s3": { 9 | "bucket": "aws-s3-bucket" 10 | }, 11 | "aws-ses": [ 12 | "sample-integration" 13 | ], 14 | "aws-sns": { 15 | "sample-topic": "sample-topic-name" 16 | }, 17 | "aws-sqs": { 18 | "sample-queue": "sample-queue-name" 19 | }, 20 | "demisto": [ 21 | "sample-integration" 22 | ], 23 | "komand": [ 24 | "sample-integration" 25 | ], 26 | "pagerduty": [ 27 | "sample-integration" 28 | ], 29 | "phantom": [ 30 | "sample-integration" 31 | ], 32 | "slack": [ 33 | "sample-channel" 34 | ], 35 | "teams": [ 36 | "sample-webhook" 37 | ] 38 | } -------------------------------------------------------------------------------- /conf/scheduled_queries.json: -------------------------------------------------------------------------------- 1 | { 2 | "enabled": false, 3 | "config": { 4 | "destination_kinesis_stream": "KINESIS_STREAM", 5 | "sfn_timeout_secs": 300, 6 | "sfn_wait_secs": 30 7 | }, 8 | "packs": { 9 | "sample": { 10 | "description": "Runs sample queries once per hour", 11 | "schedule_expression": "rate(1 hour)" 12 | } 13 | }, 14 | "lambda_config": { 15 | "log_level": "info", 16 | "log_retention_days": 14, 17 | "memory": 128, 18 | "timeout": 60, 19 | "alarms_enabled": false, 20 | "error_threshold": 1, 21 | "error_period_secs": 3600, 22 | "error_evaluation_periods": 2 23 | } 24 | } -------------------------------------------------------------------------------- /conf/schemas/aliyun.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliyun:actiontrail": { 3 | "schema": { 4 | "eventId": "string", 5 | "eventVersion": "string", 6 | "acsRegion": "string", 7 | "requestParameters": {}, 8 | "referencedResources": {}, 9 | "recipientAccountId": "string", 10 | "eventType": "string", 11 | "eventTime": "string", 12 | "apiVersion": "string", 13 | "eventName": "string", 14 | "userIdentity": {}, 15 | "userAgent": "string", 16 | "eventSource": "string", 17 | "requestId": "string", 18 | "serviceName": "string", 19 | "sourceIpAddress": "string", 20 | "errorCode": "string", 21 | "errorMessage": "string", 22 | "additionalEventData": {} 23 | }, 24 | "parser": "json", 25 | "configuration": { 26 | "optional_top_level_keys": [ 27 | "apiVersion", 28 | "errorCode", 29 | "errorMessage", 30 | "acsRegion", 31 | "referencedResources", 32 | "recipientAccountId", 33 | "userAgent", 34 | "additionalEventData" 35 | ] 36 | } 37 | } 38 | } -------------------------------------------------------------------------------- /conf/schemas/binaryalert.json: -------------------------------------------------------------------------------- 1 | { 2 | "binaryalert": { 3 | "schema": { 4 | "FileInfo": {}, 5 | "MatchedRules": {}, 6 | "NumMatchedRules": "integer" 7 | }, 8 | "parser": "json" 9 | } 10 | } -------------------------------------------------------------------------------- /conf/schemas/box.json: -------------------------------------------------------------------------------- 1 | { 2 | "box:admin_events": { 3 | "schema": { 4 | "accessible_by": {}, 5 | "additional_details": "string", 6 | "created_at": "string", 7 | "created_by": {}, 8 | "event_id": "string", 9 | "event_type": "string", 10 | "ip_address": "string", 11 | "session_id": "string", 12 | "source": {}, 13 | "type": "string" 14 | }, 15 | "parser": "json", 16 | "configuration": { 17 | "optional_top_level_keys": [ 18 | "accessible_by" 19 | ] 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /conf/schemas/duo.json: -------------------------------------------------------------------------------- 1 | { 2 | "duo:administrator": { 3 | "schema": { 4 | "action": "string", 5 | "description": "string", 6 | "object": "string", 7 | "timestamp": "integer", 8 | "username": "string" 9 | }, 10 | "parser": "json" 11 | }, 12 | "duo:authentication": { 13 | "schema": { 14 | "access_device": {}, 15 | "alias": "string", 16 | "device": "string", 17 | "factor": "string", 18 | "integration": "string", 19 | "ip": "string", 20 | "isotimestamp": "string", 21 | "location": {}, 22 | "new_enrollment": "boolean", 23 | "reason": "string", 24 | "result": "string", 25 | "timestamp": "integer", 26 | "username": "string" 27 | }, 28 | "parser": "json" 29 | } 30 | } -------------------------------------------------------------------------------- /conf/schemas/fleet.json: -------------------------------------------------------------------------------- 1 | { 2 | "fleet:results": { 3 | "schema": { 4 | "name": "string", 5 | "hostIdentifier": "string", 6 | "calendarTime": "string", 7 | "unixTime": "integer", 8 | "epoch": "integer", 9 | "counter": "integer", 10 | "decorations": {}, 11 | "columns": {}, 12 | "action": "string" 13 | }, 14 | "parser": "json" 15 | } 16 | } -------------------------------------------------------------------------------- /conf/schemas/gsuite.json: -------------------------------------------------------------------------------- 1 | { 2 | "gsuite:reports": { 3 | "schema": { 4 | "actor": {}, 5 | "etag": "string", 6 | "events": [], 7 | "id": { 8 | "applicationName": "string", 9 | "customerId": "string", 10 | "time": "string", 11 | "uniqueQualifier": "integer" 12 | }, 13 | "ipAddress": "string", 14 | "kind": "string", 15 | "ownerDomain": "string" 16 | }, 17 | "parser": "json", 18 | "configuration": { 19 | "optional_top_level_keys": [ 20 | "etag", 21 | "ipAddress", 22 | "ownerDomain" 23 | ] 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /conf/schemas/okta.json: -------------------------------------------------------------------------------- 1 | { 2 | "okta:logevents": { 3 | "schema": { 4 | "uuid": "string", 5 | "published": "string", 6 | "eventType": "string", 7 | "version": "string", 8 | "severity": "string", 9 | "legacyEventType": "string", 10 | "displayMessage": "string", 11 | "actor": {}, 12 | "client": {}, 13 | "outcome": {}, 14 | "target": [], 15 | "transaction": {}, 16 | "debugContext": {}, 17 | "authenticationContext": {}, 18 | "securityContext": {}, 19 | "request": {} 20 | }, 21 | "parser": "json", 22 | "configuration": { 23 | "optional_top_level_keys": [ 24 | "legacyEventType", 25 | "displayMessage", 26 | "client", 27 | "outcome", 28 | "transaction", 29 | "debugContext", 30 | "authenticationContext", 31 | "request" 32 | ] 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /conf/schemas/onelogin.json: -------------------------------------------------------------------------------- 1 | { 2 | "onelogin:events": { 3 | "schema": { 4 | "account_id": "integer", 5 | "actor_system": "string", 6 | "actor_user_id": "string", 7 | "actor_user_name": "string", 8 | "app_id": "string", 9 | "app_name": "string", 10 | "assuming_acting_user_id": "string", 11 | "client_id": "string", 12 | "created_at": "string", 13 | "custom_message": "string", 14 | "directory_id": "string", 15 | "directory_sync_run_id": "string", 16 | "error_description": "string", 17 | "event_type_id": "integer", 18 | "group_id": "string", 19 | "group_name": "string", 20 | "id": "integer", 21 | "ipaddr": "string", 22 | "notes": "string", 23 | "operation_name": "string", 24 | "otp_device_id": "string", 25 | "otp_device_name": "string", 26 | "policy_id": "string", 27 | "policy_name": "string", 28 | "proxy_ip": "string", 29 | "resolution": "string", 30 | "resource_type_id": "string", 31 | "role_id": "string", 32 | "role_name": "string", 33 | "user_id": "string", 34 | "user_name": "string" 35 | }, 36 | "parser": "json" 37 | } 38 | } -------------------------------------------------------------------------------- /conf/schemas/packetbeat.json: -------------------------------------------------------------------------------- 1 | { 2 | "packetbeat:dns": { 3 | "schema": { 4 | "@timestamp": "string", 5 | "client_ip": "string", 6 | "type": "string", 7 | "transport": "string", 8 | "bytes_in": "integer", 9 | "bytes_out": "integer", 10 | "dns": {} 11 | }, 12 | "parser": "json", 13 | "configuration": { 14 | "optional_top_level_keys": [ 15 | "bytes_in", 16 | "bytes_out" 17 | ] 18 | } 19 | }, 20 | "packetbeat:flow": { 21 | "schema": { 22 | "@timestamp": "string", 23 | "start_time": "string", 24 | "last_time": "string", 25 | "type": "string", 26 | "final": "boolean", 27 | "dest": {}, 28 | "source": {} 29 | }, 30 | "parser": "json" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /conf/schemas/slack.json: -------------------------------------------------------------------------------- 1 | { 2 | "slack:access": { 3 | "schema": { 4 | "user_id": "string", 5 | "username": "string", 6 | "date_first": "integer", 7 | "date_last": "integer", 8 | "count": "integer", 9 | "ip": "string", 10 | "user_agent": "string", 11 | "isp": "string", 12 | "country": "string", 13 | "region": "string" 14 | }, 15 | "parser": "json" 16 | }, 17 | "slack:integration": { 18 | "schema": { 19 | "channel": "string", 20 | "user_id": "string", 21 | "user_name": "string", 22 | "date": "string", 23 | "change_type": "string", 24 | "app_type": "string", 25 | "app_id": "string", 26 | "service_type": "string", 27 | "service_id": "string", 28 | "reason": "string", 29 | "scope": "string", 30 | "rss_feed": "string", 31 | "rss_feed_change_type": "string", 32 | "rss_feed_title": "string", 33 | "rss_feed_url": "string" 34 | }, 35 | "parser": "json", 36 | "configuration": { 37 | "optional_top_level_keys": [ 38 | "channel", 39 | "app_type", 40 | "app_id", 41 | "service_type", 42 | "service_id", 43 | "rss_feed", 44 | "rss_feed_change_type", 45 | "rss_feed_title", 46 | "rss_feed_url", 47 | "reason" 48 | ] 49 | } 50 | } 51 | } -------------------------------------------------------------------------------- /conf/schemas/streamquery.json: -------------------------------------------------------------------------------- 1 | { 2 | "streamquery:version1": { 3 | "parser": "json", 4 | "schema": { 5 | "streamquery_schema_version": "string", 6 | "execution": {}, 7 | "data": {} 8 | }, 9 | "configuration": { 10 | "log_patterns": { 11 | "streamquery_schema_version": [ 12 | "1.*.*" 13 | ] 14 | } 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /conf/threat_intel.json: -------------------------------------------------------------------------------- 1 | { 2 | "dynamodb_table_name": "table_name", 3 | "enabled": false, 4 | "excluded_iocs": { 5 | "ip": { 6 | "10.0.0.0/8": { 7 | "comment": "RFC1918" 8 | } 9 | } 10 | }, 11 | "normalized_ioc_types": { 12 | "domain": [ 13 | "destinationDomain" 14 | ], 15 | "ip": [ 16 | "sourceAddress", 17 | "destinationAddress", 18 | "deviceAddress" 19 | ], 20 | "md5": [ 21 | "fileHash" 22 | ] 23 | } 24 | } -------------------------------------------------------------------------------- /constraints.txt: -------------------------------------------------------------------------------- 1 | # botocore requires a version of docutils < 0.16, but sphinx-rtd-theme's requirement of >=0.12 breaks this 2 | docutils<0.16 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = streamalert 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/images/artifacts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/artifacts.png -------------------------------------------------------------------------------- /docs/images/athena-alerts-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/athena-alerts-search.png -------------------------------------------------------------------------------- /docs/images/athena-data-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/athena-data-search.png -------------------------------------------------------------------------------- /docs/images/cloudwatch_events.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/cloudwatch_events.png -------------------------------------------------------------------------------- /docs/images/historical-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/historical-search.png -------------------------------------------------------------------------------- /docs/images/join_search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/join_search.png -------------------------------------------------------------------------------- /docs/images/normalization-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/normalization-arch.png -------------------------------------------------------------------------------- /docs/images/sa-banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/sa-banner.png -------------------------------------------------------------------------------- /docs/images/sa-complete-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/sa-complete-arch.png -------------------------------------------------------------------------------- /docs/images/sa-high-level-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/sa-high-level-arch.png -------------------------------------------------------------------------------- /docs/images/sa-square-logo-standalone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/docs/images/sa-square-logo-standalone.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=streamalert 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/source/datatypes.rst: -------------------------------------------------------------------------------- 1 | ######### 2 | Datatypes 3 | ######### 4 | 5 | StreamAlert supports the following datatypes: 6 | 7 | 8 | **** 9 | JSON 10 | **** 11 | .. code-block:: json 12 | 13 | {"type": "json"} 14 | 15 | 16 | *** 17 | CSV 18 | *** 19 | .. code-block:: 20 | 21 | csv,data,example 22 | 23 | 24 | ********* 25 | Key-Value 26 | ********* 27 | .. code-block:: 28 | 29 | type=kv data=example 30 | 31 | 32 | ****** 33 | Syslog 34 | ****** 35 | .. code-block:: 36 | 37 | Jun 15 00:00:40 host1.mydomain.io application[pid] syslog message. 38 | -------------------------------------------------------------------------------- /matchers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/matchers/__init__.py -------------------------------------------------------------------------------- /publishers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/publishers/__init__.py -------------------------------------------------------------------------------- /publishers/community/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/publishers/community/__init__.py -------------------------------------------------------------------------------- /publishers/community/pagerduty/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/publishers/community/pagerduty/__init__.py -------------------------------------------------------------------------------- /publishers/community/slack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/publishers/community/slack/__init__.py -------------------------------------------------------------------------------- /publishers/sample/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/publishers/sample/__init__.py -------------------------------------------------------------------------------- /requirements-top-level.txt: -------------------------------------------------------------------------------- 1 | -c constraints.txt # remove if using pip freeze 2 | aliyun-python-sdk-core==2.13.5 3 | aliyun-python-sdk-actiontrail==2.0.0 4 | autoflake 5 | autopep8 6 | backoff 7 | bandit 8 | boto3 9 | boxsdk[jwt] 10 | cbapi 11 | coverage 12 | coveralls 13 | google-api-python-client 14 | jmespath 15 | jsonlines 16 | mock 17 | moto 18 | netaddr 19 | nose 20 | nose-timer 21 | pathlib2 22 | policyuniverse 23 | pyfakefs 24 | pylint==2.3.1 25 | pymsteams 26 | requests 27 | Sphinx 28 | sphinx-rtd-theme 29 | yapf 30 | -------------------------------------------------------------------------------- /rules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/__init__.py -------------------------------------------------------------------------------- /rules/classifier/box/box_admin_events.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "additional_details": null, 5 | "created_at": "2017-10-27T12:31:22-07:00", 6 | "created_by": { 7 | "id": "2810219233", 8 | "login": "testemail@email.com", 9 | "name": "User Name", 10 | "type": "user" 11 | }, 12 | "event_id": "0e0b8122-17ed-42ee-8a9d-d9a57bf8dd83", 13 | "event_type": "ADD_LOGIN_ACTIVITY_DEVICE", 14 | "ip_address": "1.1.1.1", 15 | "session_id": null, 16 | "source": { 17 | "id": "2810219233", 18 | "login": "testemail@email.com", 19 | "name": "User Name", 20 | "type": "user" 21 | }, 22 | "type": "event" 23 | }, 24 | "description": "Box admin event log example (validation only)", 25 | "log": "box:admin_events", 26 | "service": "streamalert_app", 27 | "source": "prefix_cluster_box_admin_events_sm-app-name_app", 28 | "classify_only": true 29 | } 30 | ] -------------------------------------------------------------------------------- /rules/classifier/cloudtrail/cloudtrail_events.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "Records": [ 5 | { 6 | "eventVersion": "1.05", 7 | "userIdentity": { 8 | "arn": "arn", 9 | "accountId": "accountId", 10 | "userName": "userName", 11 | "type": "type" 12 | }, 13 | "eventTime": "eventTime", 14 | "eventSource": "quicksight.amazonaws.com", 15 | "eventName": "QueryDatabase", 16 | "awsRegion": "awsRegion", 17 | "requestParameters": null, 18 | "responseElements": null, 19 | "eventID": "eventID", 20 | "readOnly": true, 21 | "eventType": "AwsServiceEvent", 22 | "recipientAccountId": "recipientAccountId", 23 | "serviceEventDetails": { 24 | "eventRequestDetails": { 25 | "dataSourceId": "dataSourceId", 26 | "queryId": "queryId", 27 | "resourceId": "resourceId", 28 | "dataSetId": "dataSetId", 29 | "dataSetMode": "dataSetMode" 30 | } 31 | } 32 | } 33 | ] 34 | }, 35 | "description": "quicksight event via cloudtrail", 36 | "log": "cloudtrail:events", 37 | "service": "s3", 38 | "source": "prefix.cluster.sample.bucket", 39 | "classify_only": true 40 | } 41 | ] -------------------------------------------------------------------------------- /rules/classifier/cloudwatch/cloudwatch_cloudtrail.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "messageType": "DATA_MESSAGE", 5 | "owner": "123456789012", 6 | "logGroup": "CloudTrail/DefaultLogGroup", 7 | "logStream": "123456789012_CloudTrail_us-east-1", 8 | "subscriptionFilters": [ 9 | "FooBarSubscription" 10 | ], 11 | "logEvents": [ 12 | { 13 | "id": "44056647182143267075860006634052172261824828947338793472", 14 | "timestamp": 1526951139360, 15 | "message": "{\"eventVersion\": \"foo\", \"eventID\": \"bar\", \"eventTime\": \"foo\", \"sharedEventID\": \"bar\", \"additionalEventData\": {}, \"requestParameters\": {}, \"eventType\": \"foo\", \"responseElements\": {}, \"awsRegion\": \"foo\", \"eventName\": \"bar\", \"readOnly\": true, \"userIdentity\": {}, \"eventSource\": \"foo\", \"requestID\": \"bar\", \"userAgent\": \"foo\", \"sourceIPAddress\": \"bar\", \"resources\": [], \"recipientAccountId\": \"bar\"}" 16 | } 17 | ] 18 | }, 19 | "description": "CloudTrail logs via CloudWatch logs DATA_MESSAGE (validation only)", 20 | "log": "cloudwatch:cloudtrail", 21 | "service": "kinesis", 22 | "source": "prefix_cluster1_streamalert", 23 | "classify_only": true 24 | } 25 | ] -------------------------------------------------------------------------------- /rules/classifier/cloudwatch/cloudwatch_control_message.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "override_record": { 4 | "messageType": "CONTROL_MESSAGE", 5 | "owner": "CloudwatchLogs", 6 | "logEvents": [ 7 | { 8 | "id": "100", 9 | "timestamp": 1512601480749, 10 | "message": "CWL CONTROL MESSAGE: Checking health of destination Kinesis stream." 11 | } 12 | ] 13 | }, 14 | "description": "CloudWatch Control Message (validation only)", 15 | "log": "cloudwatch:control_message", 16 | "service": "kinesis", 17 | "source": "prefix_cluster1_streamalert", 18 | "classify_only": true 19 | } 20 | ] -------------------------------------------------------------------------------- /rules/classifier/gsuite/gsuite_reports.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "actor": { 5 | "callerType": "USER", 6 | "email": "liz@example.com", 7 | "key": "consumer key of requestor in OAuth 2LO requests", 8 | "profileId": "user's unique G Suite profile ID" 9 | }, 10 | "events": [ 11 | { 12 | "name": "CHANGE_GROUP_SETTING", 13 | "parameters": [ 14 | { 15 | "boolValue": "boolean value of parameter", 16 | "intValue": "integer value of parameter", 17 | "name": "SETTING_NAME", 18 | "value": "WHO_CAN_JOIN" 19 | } 20 | ], 21 | "type": "GROUP_SETTINGS" 22 | } 23 | ], 24 | "id": { 25 | "applicationName": "admin", 26 | "customerId": "C03az79cb", 27 | "time": "2011-06-17T15:39:18.460Z", 28 | "uniqueQualifier": "-1234567890987654321" 29 | }, 30 | "ipAddress": "user's IP address", 31 | "kind": "audit#activity", 32 | "ownerDomain": "example.com" 33 | }, 34 | "description": "G Suite Admin Report Log example (validation only)", 35 | "log": "gsuite:reports", 36 | "service": "streamalert_app", 37 | "source": "prefix_cluster_gsuite_admin_sm-app-name_app", 38 | "classify_only": true 39 | } 40 | ] -------------------------------------------------------------------------------- /rules/classifier/osquery/osquery_snapshot.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "numerics": false, 5 | "name": "pack/windows-hardening/Disallowed", 6 | "calendarTime": "Thu Feb 27 14:34:21 2020 UTC", 7 | "counter": 0, 8 | "epoch": 0, 9 | "snapshot": [], 10 | "decorations": { 11 | "hostname": "foo-hostname", 12 | "hardware_serial": "8Q394Y2", 13 | "uuid": "4C4C4544-0051-XXXX-YYYY-ZZZZZZZZZZZZ" 14 | }, 15 | "unixTime": 1582814061, 16 | "action": "snapshot", 17 | "hostIdentifier": "4C4C4544-0051-XXXX-YYYY-ZZZZZZZZZZZZ" 18 | }, 19 | "log": "osquery:snapshot", 20 | "description": "OSQuery event.", 21 | "classify_only": true, 22 | "source": "prefix_cluster1_streamalert", 23 | "service": "kinesis" 24 | } 25 | ] 26 | -------------------------------------------------------------------------------- /rules/classifier/packbeat/packetbeat_dns.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "@timestamp": "2018-02-06T07:22:59.991Z", 5 | "bytes_in": 39, 6 | "bytes_out": 71, 7 | "client_ip": "172.16.1.10", 8 | "dns": { 9 | "answers_count": 2, 10 | "opt": { "udp_size": 512 }, 11 | "question": { "name": "evil.com.", "type": "A" }, 12 | "response_code": "NOERROR" 13 | }, 14 | "transport": "udp", 15 | "type": "dns" 16 | }, 17 | "description": "basic schema validation check for packetbeat:dns", 18 | "log": "packetbeat:dns", 19 | "source": "prefix.cluster.sample.bucket", 20 | "service": "s3", 21 | "classify_only": true 22 | } 23 | ] 24 | -------------------------------------------------------------------------------- /rules/classifier/packbeat/packetbeat_flow.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "@timestamp": "2018-02-06T07:23:00.010Z", 5 | "dest": { 6 | "ip": "172.16.2.3", 7 | "port": 50717 8 | }, 9 | "final": false, 10 | "last_time": "2018-02-06T07:22:34.933Z", 11 | "source": { 12 | "ip": "35.195.65.23", 13 | "mac": "00:08:a2:09:e4:6a", 14 | "port": 443 15 | }, 16 | "start_time": "2018-02-06T07:22:34.933Z", 17 | "type": "flow" 18 | }, 19 | "description": "basic schema validation check for packetbeat:flow", 20 | "log": "packetbeat:flow", 21 | "source": "prefix.cluster.sample.bucket", 22 | "service": "s3", 23 | "classify_only": true 24 | } 25 | ] 26 | -------------------------------------------------------------------------------- /rules/classifier/slack/slack_access.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "user_id": "U12345", 5 | "username": "bob", 6 | "date_first": 1422922864, 7 | "date_last": 1422922864, 8 | "count": 1, 9 | "ip": "127.0.0.1", 10 | "user_agent": "SlackWeb Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.35 Safari/537.36", 11 | "isp": "BigCo ISP", 12 | "country": "US", 13 | "region": "CA" 14 | }, 15 | "description": "basic schema validation check for slack:access", 16 | "log": "slack:access", 17 | "service": "streamalert_app", 18 | "source": "prefix_cluster_slack_access_sm-app-name_app", 19 | "classify_only": true 20 | } 21 | ] -------------------------------------------------------------------------------- /rules/classifier/slack/slack_integration.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "service_id": "1234567890", 5 | "service_type": "Google Calendar", 6 | "user_id": "U1234ABCD", 7 | "user_name": "Johnny", 8 | "channel": "C1234567890", 9 | "date": "1392163200", 10 | "change_type": "enabled", 11 | "scope": "incoming-webhook" 12 | }, 13 | "description": "schema validation check for slack:integration with service log type", 14 | "log": "slack:integration", 15 | "service": "streamalert_app", 16 | "source": "prefix_cluster_slack_integration_sm-app-name_app", 17 | "classify_only": true 18 | }, 19 | { 20 | "data": { 21 | "app_id": "2345678901", 22 | "app_type": "Johnny App", 23 | "user_id": "U2345BCDE", 24 | "user_name": "Billy", 25 | "date": "1392163201", 26 | "change_type": "added", 27 | "scope": "chat:write:user,channels:read" 28 | }, 29 | "description": "schema validation check for slack:integration with app log type", 30 | "log": "slack:integration", 31 | "service": "streamalert_app", 32 | "source": "prefix_cluster_slack_integration_sm-app-name_app", 33 | "classify_only": true 34 | } 35 | ] -------------------------------------------------------------------------------- /rules/community/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/__init__.py -------------------------------------------------------------------------------- /rules/community/binaryalert/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/binaryalert/__init__.py -------------------------------------------------------------------------------- /rules/community/binaryalert/binaryalert_yara_match.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "FileInfo": { 5 | "MD5": "...", 6 | "S3LastModified": "...", 7 | "S3Location": "...", 8 | "S3Metadata": {}, 9 | "SHA256": "..." 10 | }, 11 | "MatchedRules": { 12 | "Rule1": { 13 | "MatchedStrings": [ 14 | "$eicar_regex" 15 | ], 16 | "Meta": { 17 | "author": "Austin Byers (Airbnb CSIRT)", 18 | "description": "This is a standard AV test, intended to check whether BinaryAlert is working correctly.", 19 | "reference": "http://www.eicar.org/86-0-Intended-use.html" 20 | }, 21 | "RuleFile": "eicar.yar", 22 | "RuleName": "eicar_av_test", 23 | "RuleTags": [] 24 | } 25 | }, 26 | "NumMatchedRules": "1" 27 | }, 28 | "description": "All YARA matches from BinaryAlert trigger an alert", 29 | "log": "binaryalert", 30 | "service": "sns", 31 | "source": "prefix_cluster_sample_topic", 32 | "trigger_rules": [ 33 | "binaryalert_yara_match" 34 | ] 35 | }, 36 | { 37 | "data": { 38 | "FileInfo": {}, 39 | "MatchedRules": {}, 40 | "NumMatchedRules": "0" 41 | }, 42 | "description": "No alerts triggered if no YARA rules were matched", 43 | "log": "binaryalert", 44 | "service": "sns", 45 | "source": "prefix_cluster_sample_topic", 46 | "trigger_rules": [] 47 | } 48 | ] -------------------------------------------------------------------------------- /rules/community/binaryalert/binaryalert_yara_match.py: -------------------------------------------------------------------------------- 1 | """Alert on BinaryAlert YARA matches""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['binaryalert']) 6 | def binaryalert_yara_match(rec): 7 | """ 8 | author: Austin Byers (Airbnb CSIRT) 9 | description: BinaryAlert found a binary matching a YARA rule 10 | reference: https://binaryalert.io 11 | """ 12 | return rec['NumMatchedRules'] > 0 13 | -------------------------------------------------------------------------------- /rules/community/cloudwatch_events/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/cloudwatch_events/__init__.py -------------------------------------------------------------------------------- /rules/community/cloudwatch_events/cloudtrail_network_acl_ingress_anywhere.py: -------------------------------------------------------------------------------- 1 | """Alert on AWS Network ACLs that allow ingress from anywhere.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule( 6 | logs=['cloudwatch:events'], 7 | req_subkeys={ 8 | 'detail': ['eventName', 'requestParameters'] 9 | }) 10 | def cloudtrail_network_acl_ingress_anywhere(rec): 11 | """ 12 | author: @mimeframe 13 | description: Alert on AWS Network ACLs that allow ingress from anywhere. 14 | reference_1: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_ACLs.html 15 | reference_2: http://docs.aws.amazon.com/AWSEC2/ 16 | latest/APIReference/API_CreateNetworkAclEntry.html 17 | """ 18 | if rec['detail']['eventName'] != 'CreateNetworkAclEntry': 19 | return False 20 | 21 | req_params = rec['detail']['requestParameters'] 22 | 23 | return (req_params['cidrBlock'] == '0.0.0.0/0' 24 | and req_params['ruleAction'] == 'allow' 25 | and req_params['egress'] is False) 26 | -------------------------------------------------------------------------------- /rules/community/cloudwatch_events/cloudtrail_put_object_acl_public.py: -------------------------------------------------------------------------------- 1 | """Identifies new S3 object ACLs that grant access to the public.""" 2 | from rules.helpers.base import data_has_value_from_substring_list 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | _PUBLIC_ACLS = { 7 | 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers', 8 | 'http://acs.amazonaws.com/groups/global/AllUsers' 9 | } 10 | 11 | # s3 buckets that are expected to have public objects 12 | _PUBLIC_BUCKETS = {'example-bucket-to-ignore'} 13 | 14 | 15 | @rule( 16 | logs=['cloudwatch:events'], 17 | req_subkeys={ 18 | 'detail': ['requestParameters'] 19 | }) 20 | def cloudtrail_put_object_acl_public(rec): 21 | """ 22 | author: @mimeframe 23 | description: Identifies a change to an S3 object ACL that grants access 24 | to AllUsers (anyone on the internet) or AuthenticatedUsers 25 | (any user with an AWS account). 26 | reference: http://amzn.to/2yfRxzp 27 | playbook: (a) Verify if the object should be publicly accessible 28 | (b) If not, modify the object ACL 29 | """ 30 | request_params = rec['detail']['requestParameters'] 31 | return ( 32 | rec['detail']['eventName'] == 'PutObjectAcl' and 33 | # note: substring is used because it can exist as: 34 | # "http://acs.amazonaws.com/groups/global/AllUsers" or 35 | # "uri=http://acs.amazonaws.com/groups/global/AllUsers" 36 | data_has_value_from_substring_list(request_params, _PUBLIC_ACLS) 37 | and request_params.get('bucketName') not in _PUBLIC_BUCKETS) 38 | -------------------------------------------------------------------------------- /rules/community/cloudwatch_events/cloudtrail_root_account_usage.py: -------------------------------------------------------------------------------- 1 | """Alert when root AWS credentials are used.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule( 6 | logs=['cloudwatch:events'], 7 | req_subkeys={ 8 | 'detail': ['userIdentity', 'eventType'] 9 | }) 10 | def cloudtrail_root_account_usage(rec): 11 | """ 12 | author: airbnb_csirt 13 | description: Root AWS credentials are being used; 14 | This is against best practice and may be an attacker 15 | reference_1: https://aws.amazon.com/premiumsupport/knowledge-center/ 16 | cloudtrail-root-action-logs/ 17 | reference_2: http://docs.aws.amazon.com/IAM/latest/UserGuide/id_root-user.html 18 | playbook: (a) identify who is using the Root account 19 | (b) ping the individual to determine if intentional and/or legitimate 20 | """ 21 | # reference_1 contains details on logic below 22 | return (rec['detail']['userIdentity']['type'] == 'Root' 23 | and rec['detail']['userIdentity'].get('invokedBy') is None 24 | and rec['detail']['eventType'] != 'AwsServiceEvent') 25 | -------------------------------------------------------------------------------- /rules/community/cloudwatch_events/cloudtrail_security_group_ingress_anywhere.py: -------------------------------------------------------------------------------- 1 | """Alert on AWS Security Groups that allow ingress from anywhere.""" 2 | from rules.helpers.base import get_keys 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule( 7 | logs=['cloudwatch:events'], 8 | req_subkeys={ 9 | 'detail': ['eventName', 'requestParameters'] 10 | }) 11 | def cloudtrail_security_group_ingress_anywhere(rec): 12 | """ 13 | author: @mimeframe, @ryandeivert 14 | description: Alert on AWS Security Groups that allow ingress from anywhere. 15 | This rule accounts for both IPv4 and IPv6. 16 | reference: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ 17 | using-network-security.html#creating-security-group 18 | """ 19 | if rec['detail']['eventName'] != 'AuthorizeSecurityGroupIngress': 20 | return False 21 | 22 | ipv4_cidrs = get_keys(rec['detail']['requestParameters'], 'cidrIp') 23 | ipv6_cidrs = get_keys(rec['detail']['requestParameters'], 'cidrIpv6') 24 | 25 | if '0.0.0.0/0' in ipv4_cidrs: 26 | return True 27 | 28 | if '::/0' in ipv6_cidrs: 29 | return True 30 | 31 | return False 32 | -------------------------------------------------------------------------------- /rules/community/duo_administrator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/duo_administrator/__init__.py -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_non_auto_generated.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "action": "bypass_create", 5 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": false, \"valid_secs\": 10, \"remaining_uses\": 100, \"user_id\": \"...\"}", 6 | "object": "...", 7 | "timestamp": "1234567890", 8 | "username": "..." 9 | }, 10 | "description": "A DUO bypass code that was hand crafted should create an alert.", 11 | "log": "duo:administrator", 12 | "service": "streamalert_app", 13 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 14 | "trigger_rules": [ 15 | "duo_bypass_code_create_non_auto_generated" 16 | ] 17 | }, 18 | { 19 | "data": { 20 | "action": "bypass_create", 21 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": true, \"valid_secs\": 10, \"remaining_uses\": 100, \"user_id\": \"...\"}", 22 | "object": "...", 23 | "timestamp": "1234567890", 24 | "username": "..." 25 | }, 26 | "description": "A DUO bypass code that was auto generated should not create an alert.", 27 | "log": "duo:administrator", 28 | "service": "streamalert_app", 29 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 30 | "trigger_rules": [] 31 | } 32 | ] -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_non_auto_generated.py: -------------------------------------------------------------------------------- 1 | """Alert when a DUO bypass code is artisanally crafted and not auto-generated.""" 2 | from rules.helpers.base import safe_json_loads 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule(logs=['duo:administrator']) 7 | def duo_bypass_code_create_non_auto_generated(rec): 8 | """ 9 | author: @mimeframe 10 | description: Alert when a DUO bypass code is artisanly crafted and not auto-generated. 11 | reference: https://duo.com/docs/administration-users#generating-a-bypass-code 12 | """ 13 | return (rec['action'] == 'bypass_create' 14 | and safe_json_loads(rec['description']).get('auto_generated') is False) 15 | -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_non_expiring.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "action": "bypass_create", 5 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": true, \"valid_secs\": null, \"remaining_uses\": 1, \"user_id\": \"...\"}", 6 | "object": "...", 7 | "timestamp": "1234567890", 8 | "username": "..." 9 | }, 10 | "description": "A DUO bypass code that has no expiration should create an alert.", 11 | "log": "duo:administrator", 12 | "service": "streamalert_app", 13 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 14 | "trigger_rules": [ 15 | "duo_bypass_code_create_non_expiring" 16 | ] 17 | }, 18 | { 19 | "data": { 20 | "action": "bypass_create", 21 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": true, \"valid_secs\": 60, \"remaining_uses\": 1, \"user_id\": \"...\"}", 22 | "object": "...", 23 | "timestamp": "1234567890", 24 | "username": "..." 25 | }, 26 | "description": "A DUO bypass code that has an expiration should not create an alert.", 27 | "log": "duo:administrator", 28 | "service": "streamalert_app", 29 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 30 | "trigger_rules": [] 31 | } 32 | ] -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_non_expiring.py: -------------------------------------------------------------------------------- 1 | """Alert when a DUO bypass code is created that is non-expiring.""" 2 | from rules.helpers.base import safe_json_loads 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule(logs=['duo:administrator']) 7 | def duo_bypass_code_create_non_expiring(rec): 8 | """ 9 | author: @mimeframe 10 | description: Alert when a DUO bypass code is created that is non-expiring. 11 | reference: https://duo.com/docs/administration-users#generating-a-bypass-code 12 | """ 13 | return (rec['action'] == 'bypass_create' 14 | and safe_json_loads(rec['description']).get('valid_secs') is None) 15 | -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_unlimited_use.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "action": "bypass_create", 5 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": true, \"valid_secs\": 10, \"remaining_uses\": null, \"user_id\": \"...\"}", 6 | "object": "...", 7 | "timestamp": "1234567890", 8 | "username": "..." 9 | }, 10 | "description": "A DUO bypass code that has unlimited use should create an alert.", 11 | "log": "duo:administrator", 12 | "service": "streamalert_app", 13 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 14 | "trigger_rules": [ 15 | "duo_bypass_code_create_unlimited_use" 16 | ] 17 | }, 18 | { 19 | "data": { 20 | "action": "bypass_create", 21 | "description": "{\"bypass\": \"\", \"count\": 1, \"auto_generated\": true, \"valid_secs\": 60, \"remaining_uses\": 1, \"user_id\": \"...\"}", 22 | "object": "...", 23 | "timestamp": "1234567890", 24 | "username": "..." 25 | }, 26 | "description": "A DUO bypass code that has finite remaining uses should not create an alert.", 27 | "log": "duo:administrator", 28 | "service": "streamalert_app", 29 | "source": "prefix_cluster_duo_admin_sm-app-name_app", 30 | "trigger_rules": [] 31 | } 32 | ] -------------------------------------------------------------------------------- /rules/community/duo_administrator/duo_bypass_code_create_unlimited_use.py: -------------------------------------------------------------------------------- 1 | """Alert when a DUO bypass code is created that has unlimited use.""" 2 | from rules.helpers.base import safe_json_loads 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule(logs=['duo:administrator']) 7 | def duo_bypass_code_create_unlimited_use(rec): 8 | """ 9 | author: @mimeframe 10 | description: Alert when a DUO bypass code is created that has unlimited use. 11 | reference: https://duo.com/docs/administration-users#generating-a-bypass-code 12 | """ 13 | return (rec['action'] == 'bypass_create' 14 | and safe_json_loads(rec['description']).get('remaining_uses') is None) 15 | -------------------------------------------------------------------------------- /rules/community/duo_authentication/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/duo_authentication/__init__.py -------------------------------------------------------------------------------- /rules/community/duo_authentication/duo_anonymous_ip_failure.py: -------------------------------------------------------------------------------- 1 | """Alert on any Duo auth logs marked as a failure due to an Anonymous IP.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['duo:authentication']) 6 | def duo_anonymous_ip_failure(rec): 7 | """ 8 | author: airbnb_csirt 9 | description: Alert on Duo auth logs marked as a failure due to an Anonymous IP. 10 | reference: https://duo.com/docs/policy#anonymous-networks 11 | playbook: N/A 12 | """ 13 | return rec['result'] == 'FAILURE' and rec['reason'] == 'Anonymous IP' 14 | -------------------------------------------------------------------------------- /rules/community/duo_authentication/duo_fraud.py: -------------------------------------------------------------------------------- 1 | """Alert on any Duo auth logs marked as fraud.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['duo:authentication']) 6 | def duo_fraud(rec): 7 | """ 8 | author: airbnb_csirt 9 | description: Alert on any Duo authentication logs marked as fraud. 10 | reference: https://duo.com/docs/adminapi#authentication-logs 11 | playbook: N/A 12 | """ 13 | return rec['result'] == 'FRAUD' 14 | -------------------------------------------------------------------------------- /rules/community/duo_authentication/duo_lookup_tables_example.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "access_device": { 5 | "browser": "Netscape", 6 | "browser_version": "60.0.0000.80", 7 | "flash_version": "27.0.0.0", 8 | "java_version": "uninstalled", 9 | "os": "Mac OS X", 10 | "os_version": "10.12.6", 11 | "trusted_endpoint_status": "not trusted" 12 | }, 13 | "alias": "", 14 | "device": "555-123-4567", 15 | "factor": "Duo Push", 16 | "integration": "Test Integration", 17 | "ip": "12.123.123.12", 18 | "isotimestamp": "2017-09-13T15:28:19.000Z", 19 | "location": { 20 | "city": "Place", 21 | "country": "US", 22 | "state": "State" 23 | }, 24 | "new_enrollment": false, 25 | "reason": "", 26 | "result": "SUCCESS", 27 | "timestamp": 1505316499, 28 | "username": "user.name@email.com" 29 | }, 30 | "description": "Duo authentication log marked as failure as a result of 'Anonymous IP' that will create an alert", 31 | "log": "duo:authentication", 32 | "service": "streamalert_app", 33 | "source": "prefix_cluster_duo_auth_sm-app-name_app", 34 | "trigger_rules": [ 35 | "duo_lookup_tables_example" 36 | ], 37 | "test_fixtures": { 38 | "lookup_tables": { 39 | "dynamo-backed-table": { 40 | "duo_blacklisted_browsers": [ 41 | "Netscape" 42 | ] 43 | } 44 | } 45 | } 46 | } 47 | ] -------------------------------------------------------------------------------- /rules/community/duo_authentication/duo_lookup_tables_example.py: -------------------------------------------------------------------------------- 1 | """Alert on any Duo auth logs marked as a failure due to an Anonymous IP.""" 2 | from streamalert.shared.rule import rule 3 | from streamalert.shared.lookup_tables.core import LookupTables 4 | 5 | 6 | @rule(logs=['duo:authentication']) 7 | def duo_lookup_tables_example(rec): 8 | """ 9 | description: Alert on Duo auth logs from blacklisted browsers, as defined by a lookup table 10 | note: This is purely for example purposes in testing, and is not meant to be used as-is 11 | """ 12 | # The 'global' fixture file at rules/test_fixtures/lookup_tables/dynamo-backed-table.json 13 | # creates the 'dynamo-backed-table' containing the 'duo_blacklisted_browsers' value 14 | blacklisted_browsers = LookupTables.get('dynamo-backed-table', 'duo_blacklisted_browsers', []) 15 | 16 | # The test event contains a browser of 'Netscape', which is 17 | # included in the lookup table blacklist 18 | return rec['access_device'].get('browser') in set(blacklisted_browsers) 19 | -------------------------------------------------------------------------------- /rules/community/fleet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/fleet/__init__.py -------------------------------------------------------------------------------- /rules/community/fleet/fleet_bad_action.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "name": "Bad action query", 5 | "hostIdentifier": "pc-name", 6 | "calendarTime": "Wed May 8 15:28:02 2019 UTC", 7 | "unixTime": 1557329282, 8 | "epoch": 0, 9 | "counter": 19, 10 | "decorations": { 11 | "host_uuid": "ABCDEFG-XXXX-YYYY-ZZZZZZZZ", 12 | "hostname": "pc-name" 13 | }, 14 | "columns": { 15 | "mtime": "1557323732", 16 | "bad_action": "0" 17 | }, 18 | "action": "added" 19 | }, 20 | "log": "fleet:results", 21 | "description": "no bad actions have occoured", 22 | "trigger_rules": [], 23 | "source": "prefix.cluster.sample.bucket", 24 | "service": "s3" 25 | }, 26 | { 27 | "data": { 28 | "name": "Bad action query", 29 | "hostIdentifier": "pc-name", 30 | "calendarTime": "Wed May 8 15:28:02 2019 UTC", 31 | "unixTime": 1557329282, 32 | "epoch": 0, 33 | "counter": 19, 34 | "decorations": { 35 | "host_uuid": "ABCDEFG-XXXX-YYYY-ZZZZZZZZ", 36 | "hostname": "pc-name" 37 | }, 38 | "columns": { 39 | "mtime": "1557323732", 40 | "bad_action": "1" 41 | }, 42 | "action": "added" 43 | }, 44 | "log": "fleet:results", 45 | "description": "bad action", 46 | "trigger_rules": [ 47 | "fleet_bad_action" 48 | ], 49 | "source": "prefix.cluster.sample.bucket", 50 | "service": "s3" 51 | } 52 | ] -------------------------------------------------------------------------------- /rules/community/fleet/fleet_bad_action.py: -------------------------------------------------------------------------------- 1 | from streamalert.shared.rule import rule 2 | # Remove disable import if no rules are disabled using the @disable decorator. 3 | from streamalert.shared.rule import disable 4 | 5 | # Remove the @disable decorator to use this rule in a live deployment 6 | @disable 7 | @rule( 8 | logs=['fleet:results'] 9 | ) 10 | def fleet_bad_action(rec): 11 | """ 12 | author: gavinelder 13 | description: Alert when a user carries out a bad action. 14 | reference: N/A 15 | playbook: (a) Reach out to the user who made the modification and confirm intent. 16 | (b) Link appropriate Jira ticket. 17 | """ 18 | return ( 19 | rec['columns'].get('bad_action', '1') == '1' 20 | ) 21 | -------------------------------------------------------------------------------- /rules/community/github/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/github/__init__.py -------------------------------------------------------------------------------- /rules/community/github/github_disable_dismiss_stale_pull_request_approvals.py: -------------------------------------------------------------------------------- 1 | """Github setting 'Dismiss stale pull request approvals' was disabled for a repo.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_dismiss_stale_pull_request_approvals(rec): 7 | """ 8 | author: @mimeframe 9 | description: Setting 'Dismiss stale pull request approvals when new commits are pushed' 10 | was disabled. As a result, commits occurring after approval will not 11 | require approval. 12 | repro_steps: (a) Visit ///settings/branches/ 13 | (b) Uncheck 'Dismiss stale pull request approvals when new commits are pushed' 14 | (c) Click 'Save Changes' 15 | reference: https://help.github.com/articles/configuring-protected-branches/ 16 | """ 17 | return rec['action'] == 'protected_branch.dismiss_stale_reviews' 18 | -------------------------------------------------------------------------------- /rules/community/github/github_disable_protect_this_branch.py: -------------------------------------------------------------------------------- 1 | """Github setting 'Protect this branch' was disabled for a repo.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_protect_this_branch(rec): 7 | """ 8 | author: @mimeframe 9 | description: Github setting 'Protect this branch' was disabled for a repo. 10 | When unchecking this top-level option, it also disables 11 | 'Require pull request reviews before merging', 12 | 'Require review from Code Owners', and all other branch protections 13 | like status checks. 14 | repro_steps: (a) Visit ///settings/branches/ 15 | (b) Uncheck 'Protect this branch' 16 | (c) Click 'Save Changes' 17 | reference: https://help.github.com/articles/configuring-protected-branches/ 18 | """ 19 | return rec['action'] == 'protected_branch.destroy' 20 | -------------------------------------------------------------------------------- /rules/community/github/github_disable_required_pull_request_reviews.py: -------------------------------------------------------------------------------- 1 | """Github 'Require pull request reviews before merging' was disabled for a repo.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_required_pull_request_reviews(rec): 7 | """ 8 | author: @mimeframe 9 | description: Setting 'Require pull request reviews before merging' was disabled. 10 | When enabled, all commits must be made to a non-protected branch 11 | and submitted via a pull request with at least one approved review 12 | and no changes requested before it can be merged into master. 13 | repro_steps: (a) Visit ///settings/branches/ 14 | (b) Uncheck 'Require pull request reviews before merging' 15 | (c) Click 'Save Changes' 16 | reference: https://help.github.com/articles/enabling-required-reviews-for-pull-requests/ 17 | """ 18 | actor_ignorelist = {} 19 | return (rec['action'] == 'protected_branch.dismissal_restricted_users_teams' 20 | and rec['data'].get('authorized_actors_only') is True 21 | and rec['actor'] not in actor_ignorelist) 22 | -------------------------------------------------------------------------------- /rules/community/github/github_disable_required_status_checks.py: -------------------------------------------------------------------------------- 1 | """Github 'required status checks' was disabled for a repo.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_required_status_checks(rec): 7 | """ 8 | author: @mimeframe 9 | description: The 'required status checks' feature was disabled for a repository. 10 | Settings -> Branches -> Protected Branches -> 11 | repro_steps: (a) Choose a repository 12 | (b) Click Settings -> Branches -> Protected Branches -> 13 | (c) Uncheck 'Require status checks to pass before merging' 14 | reference: https://help.github.com/articles/enabling-required-status-checks/ 15 | """ 16 | return ( 17 | rec['action'] == 'protected_branch.update_required_status_checks_enforcement_level' and 18 | # 0 => unchecked 19 | # 1 => enabled for users 20 | # 2 => enabled for users and admins ('Include administrators') 21 | rec['data'].get('required_status_checks_enforcement_level') == 0) 22 | -------------------------------------------------------------------------------- /rules/community/github/github_disable_two_factor_requirement_org.py: -------------------------------------------------------------------------------- 1 | """Github two-factor authentication requirement was disabled for an org.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_two_factor_requirement_org(rec): 7 | """ 8 | author: @mimeframe 9 | description: Two-factor authentication requirement was disabled for an org. 10 | repro_steps: (a) Visit /organizations//settings/security 11 | (b) Uncheck 'Require two-factor authentication...' 12 | (c) Click 'Save' 13 | reference: https://help.github.com/ 14 | articles/requiring-two-factor-authentication-in-your-organization/ 15 | """ 16 | return rec['action'] == 'org.disable_two_factor_requirement' 17 | -------------------------------------------------------------------------------- /rules/community/github/github_disable_two_factor_requirement_user.py: -------------------------------------------------------------------------------- 1 | """Github two-factor authentication requirement was disabled for a user.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_disable_two_factor_requirement_user(rec): 7 | """ 8 | author: @mimeframe 9 | description: Two-factor authentication requirement was disabled for a user. 10 | repro_steps: (a) Visit /settings/two_factor_authentication/configure 11 | reference: https://help.github.com/enterprise/2.11/admin/articles/audited-actions/ 12 | """ 13 | return rec['action'] == 'two_factor_authentication.disabled' 14 | -------------------------------------------------------------------------------- /rules/community/github/github_oauth_application_create.py: -------------------------------------------------------------------------------- 1 | """An OAuth application was registered within Github.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_oauth_application_create(rec): 7 | """ 8 | author: @mimeframe 9 | description: An OAuth application was registered within Github. 10 | reference: https://developer.github.com 11 | /apps/building-integrations/setting-up-and-registering-oauth-apps/ 12 | """ 13 | return rec['action'] == 'oauth_application.create' 14 | -------------------------------------------------------------------------------- /rules/community/github/github_site_admin_action.py: -------------------------------------------------------------------------------- 1 | """A Github site admin tool/action was used.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_site_admin_action(rec): 7 | """ 8 | author: @mimeframe 9 | description: A Github site admin tool/action was used. 10 | Example: 'staff.fake_login' 11 | "A site admin signed into GitHub Enterprise as another user."" 12 | reference: https://help.github.com/enterprise/2.11/admin/articles/audited-actions/ 13 | """ 14 | return rec['action'].startswith('staff.') 15 | -------------------------------------------------------------------------------- /rules/community/github/github_site_admin_user_promotion.py: -------------------------------------------------------------------------------- 1 | """A Github Enterprise user account was promoted to a site admin.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['ghe:general']) 6 | def github_site_admin_user_promotion(rec): 7 | """ 8 | author: @fusionrace, @mimeframe 9 | description: Alert when a Github Enterprise user account is promoted to a 10 | Site Administrator (privileged account) 11 | reference: https://help.github.com/enterprise/2.11/admin/guides/ 12 | user-management/promoting-or-demoting-a-site-administrator/ 13 | """ 14 | return rec['action'] == 'user.promote' 15 | -------------------------------------------------------------------------------- /rules/community/guardduty/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/guardduty/__init__.py -------------------------------------------------------------------------------- /rules/community/guardduty/guard_duty_all.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "account": "111111111111", 5 | "detail": {}, 6 | "detail-type": "GuardDuty Finding", 7 | "id": "00000000-0000-0000-0000-000000000000", 8 | "region": "us-east-1", 9 | "resources": [], 10 | "source": "aws.guardduty", 11 | "time": "2018-02-13T18:25:01Z", 12 | "version": "0" 13 | }, 14 | "description": "GuardDuty", 15 | "log": "cloudwatch:events", 16 | "service": "kinesis", 17 | "source": "prefix_cluster1_streamalert", 18 | "trigger_rules": [ 19 | "guard_duty_all" 20 | ] 21 | } 22 | ] -------------------------------------------------------------------------------- /rules/community/guardduty/guard_duty_all.py: -------------------------------------------------------------------------------- 1 | """Alert on GuardDuty""" 2 | from matchers.default import AwsGuardDutyMatcher 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule(logs=['cloudwatch:events'], matchers=[AwsGuardDutyMatcher.guard_duty]) 7 | def guard_duty_all(*_): 8 | """ 9 | author: spiper 10 | description: Alert on GuardDuty events 11 | playbook: (a) identify the AWS account in the log 12 | (b) identify what resource(s) are impacted 13 | (c) contact the point-of-contact for the account 14 | testing: From the GuardDuty AWS page (https://console.aws.amazon.com/guardduty/home) 15 | click the button to "Generate Sample Findings" 16 | """ 17 | return True 18 | -------------------------------------------------------------------------------- /rules/community/mitre_attack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/mitre_attack/__init__.py -------------------------------------------------------------------------------- /rules/community/mitre_attack/defense_evasion/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/mitre_attack/defense_evasion/__init__.py -------------------------------------------------------------------------------- /rules/community/mitre_attack/defense_evasion/multi/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/mitre_attack/defense_evasion/multi/__init__.py -------------------------------------------------------------------------------- /rules/community/mitre_attack/defense_evasion/multi/obfuscated_files_or_information/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/mitre_attack/defense_evasion/multi/obfuscated_files_or_information/__init__.py -------------------------------------------------------------------------------- /rules/community/okta/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/okta/__init__.py -------------------------------------------------------------------------------- /rules/community/onelogin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/onelogin/__init__.py -------------------------------------------------------------------------------- /rules/community/onelogin/onelogin_events_assumed_role.py: -------------------------------------------------------------------------------- 1 | """Alert on the OneLogin event that a user has assumed the role of someone else.""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['onelogin:events']) 6 | def onelogin_events_assumed_role(rec): 7 | """ 8 | author: @javutin 9 | description: Alert on OneLogin users assuming a different role. 10 | reference_1: https://support.onelogin.com/hc/en-us/articles/202123164-Assuming-Users 11 | reference_2: https://developers.onelogin.com/api-docs/1/events/event-types 12 | """ 13 | return rec['event_type_id'] == 3 14 | -------------------------------------------------------------------------------- /rules/community/onelogin/onelogin_events_threat_intel_example.py: -------------------------------------------------------------------------------- 1 | """Alert on the OneLogin event that a user has assumed the role of someone else.""" 2 | from streamalert.shared.rule import disable, rule 3 | from streamalert.rules_engine.threat_intel import ThreatIntel 4 | 5 | 6 | # This example is disabled because it requires the threat_intel feature to be 7 | # enabled in the following locations: 8 | # https://github.com/airbnb/streamalert/blob/ 9 | # 791abf892983eedbaf30ff5aeb1f55e46e20d82a/conf/threat_intel.json#L3 10 | # and 11 | # https://github.com/airbnb/streamalert/blob/ 12 | # 791abf892983eedbaf30ff5aeb1f55e46e20d82a/conf/clusters/prod.json#L80 13 | @disable 14 | @rule(logs=['onelogin:events']) 15 | def onelogin_events_threat_intel_example(rec): 16 | """ 17 | description: Alert on OneLogin activity from a malicious IP address using threat intel 18 | note: This is purely for example purposes in testing, and is not meant to be used as-is 19 | """ 20 | # The 'local' fixture file at rules/community/onelogin/test_fixtures/threat_intel/example.json 21 | # mocks out the threat intel values used by this rule 22 | 23 | # In this case, the rec['ipaddr'] value is a "known" malicious IP, so this will alert 24 | return ThreatIntel.IOC_KEY in rec and 'ip' in rec[ThreatIntel.IOC_KEY] 25 | -------------------------------------------------------------------------------- /rules/community/osquery/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/osquery/__init__.py -------------------------------------------------------------------------------- /rules/community/osquery/ssh_login_activity.py: -------------------------------------------------------------------------------- 1 | """Detect ssh login activity based on osquery last table""" 2 | from matchers.default import OsqueryMatcher 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | @rule(logs=['osquery:differential'], 7 | matchers=[OsqueryMatcher.added, OsqueryMatcher.user_login]) 8 | def ssh_login_activity(_): 9 | """ 10 | author: chunyong-lin 11 | description: Detect on ssh login activity to the linux host based on osquery 12 | last table. This rule assumes we use default osquery pack 13 | shipped with osquery package located at 14 | /usr/share/osquery/packs/incident-response.conf on the linux 15 | host. Update the pack name in rules/matchers/matchers.py if different. 16 | reference: https://osquery.io/schema/4.1.2#last 17 | """ 18 | return True 19 | -------------------------------------------------------------------------------- /rules/community/packetbeat/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/packetbeat/__init__.py -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_blacklisted_domain.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "@timestamp": "2018-02-06T07:23:54.827Z", 5 | "bytes_in": 32, 6 | "bytes_out": 64, 7 | "client_ip": "172.16.3.33", 8 | "dns": { 9 | "answers_count": 2, 10 | "question": { "name": "evil.com.", "type": "A" }, 11 | "response_code": "NOERROR" 12 | }, 13 | "transport": "udp", 14 | "type": "dns" 15 | }, 16 | "description": "basic schema validation check for packetbeat:flow", 17 | "log": "packetbeat:dns", 18 | "source": "prefix.cluster.sample.bucket", 19 | "service": "s3", 20 | "trigger_rules": [ 21 | "packetbeat_blacklisted_domain" 22 | ] 23 | } 24 | ] 25 | -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_blacklisted_domain.py: -------------------------------------------------------------------------------- 1 | """Alert on PacketBeat events""" 2 | 3 | from streamalert.shared.rule import rule 4 | 5 | 6 | DNS_BLACKLIST = [ 7 | 'evil.com.' 8 | ] 9 | 10 | @rule(logs=['packetbeat:dns']) 11 | def packetbeat_blacklisted_domain(rec): 12 | """ 13 | author: gavin (gavinelder) 14 | description: Lookup for BlackListed DNS (CnC). 15 | testing: (a) Review traffic logs for machine in question. 16 | reference: https://www.elastic.co/guide/en/beats/packetbeat/master/packetbeat-overview.html 17 | """ 18 | return rec['dns']['question']['name'] in DNS_BLACKLIST 19 | -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_blacklisted_ip.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "@timestamp": "2018-02-06T07:24:00.006Z", 5 | "dest": { 6 | "ip": "222.173.190.239", 7 | "port": 44172 8 | }, 9 | "final": true, 10 | "last_time": "2018-02-06T07:23:00.620Z", 11 | "source": { 12 | "ip": "192.30.253.125", 13 | "mac": "00:08:a2:09:e4:6a", 14 | "port": 443 15 | }, 16 | "start_time": "2018-02-06T07:23:00.620Z", 17 | "type": "flow" 18 | }, 19 | "description": "packetbeat:flow showing outbound connection to bad domain", 20 | "log": "packetbeat:flow", 21 | "source": "prefix.cluster.sample.bucket", 22 | "service": "s3", 23 | "trigger_rules": [ 24 | "packetbeat_blacklisted_ip" 25 | ] 26 | } 27 | ] 28 | -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_blacklisted_ip.py: -------------------------------------------------------------------------------- 1 | """Alert on PacketBeat events""" 2 | import ipaddress 3 | from streamalert.shared.rule import rule 4 | 5 | IP_BLACKLIST = [ 6 | '222.173.190.239', 7 | ] 8 | 9 | 10 | @rule(logs=['packetbeat:flow']) 11 | def packetbeat_blacklisted_ip(rec): 12 | """ 13 | author: gavin (gavinelder) 14 | description: Network connection to blacklisted IP. 15 | testing: (a) Review traffic logs for machine in question. 16 | reference: https://www.elastic.co/guide/en/beats/packetbeat/master/packetbeat-overview.html 17 | """ 18 | return ipaddress.IPv4Address(rec['source']['ip']) and rec['dest']['ip'] in IP_BLACKLIST 19 | -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_dns_lookup.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "@timestamp": "2018-02-06T07:24:03.251Z", 5 | "bytes_in": 54, 6 | "bytes_out": 148, 7 | "client_ip": "172.16.2.97", 8 | "dns": { 9 | "answers_count": 0, 10 | "question": { 11 | "name": "foo.evil.com.", 12 | "type": "A" 13 | }, 14 | "response_code": "NXDOMAIN" 15 | }, 16 | "transport": "udp", 17 | "type": "dns" 18 | }, 19 | "description": "packetbeat:dns showing lookup to bad domain", 20 | "log": "packetbeat:dns", 21 | "source": "prefix.cluster.sample.bucket", 22 | "service": "s3", 23 | "trigger_rules": [ 24 | "packetbeat_dns_lookup" 25 | ] 26 | } 27 | ] 28 | -------------------------------------------------------------------------------- /rules/community/packetbeat/packetbeat_dns_lookup.py: -------------------------------------------------------------------------------- 1 | """Alert on PacketBeat events""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['packetbeat:dns']) 6 | def packetbeat_dns_lookup(rec): 7 | """ 8 | author: gavin (gavinelder) 9 | description: Alert on DNS lookup for Blacklisted domain 10 | testing: (a) Review traffic logs for machine in question. 11 | reference: https://www.elastic.co/guide/en/beats/packetbeat/master/packetbeat-overview.html 12 | """ 13 | return rec['dns']['question']['name'].endswith('.evil.com.') 14 | -------------------------------------------------------------------------------- /rules/community/trendmicro/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/community/trendmicro/__init__.py -------------------------------------------------------------------------------- /rules/community/trendmicro/trendmicro_malware_event.py: -------------------------------------------------------------------------------- 1 | """Alert on TrendMicro Malware events""" 2 | from streamalert.shared.rule import rule 3 | 4 | 5 | @rule(logs=['trendmicro:malwareevent']) 6 | def trendmicro_malware_event(_): 7 | """ 8 | author: jack (jack1902) 9 | description: Trend Micro identified malware on an agent 10 | testing: (a) Log on to a machine where Trend Agent is active 11 | (b) Upload EICAR Test File: 12 | http://docs.trendmicro.com/all/ent/de/v1.5/en-us/de_1.5_olh/ctm_ag/ctm1_ag_ch8/t_test_eicar_file.htm 13 | """ 14 | return True 15 | -------------------------------------------------------------------------------- /rules/default/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/default/__init__.py -------------------------------------------------------------------------------- /rules/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/helpers/__init__.py -------------------------------------------------------------------------------- /rules/sample/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/rules/sample/__init__.py -------------------------------------------------------------------------------- /rules/sample/sample_demisto.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": { 4 | "action": "added", 5 | "calendarTime": "Wed Feb 12 21:38:11 2020 UTC", 6 | "columns": { 7 | "host": "10.0.2.2", 8 | "pid": 12345, 9 | "time": 1581542540, 10 | "tty": "ttys001", 11 | "type": "8", 12 | "username": "runlevel" 13 | }, 14 | "decorations": { 15 | "envIdentifier": "fake-environment", 16 | "roleIdentifier": "fake-role" 17 | }, 18 | "epoch": "0", 19 | "hostIdentifier": "sample_demisto", 20 | "log_type": "result", 21 | "name": "pack_incident-response_last", 22 | "unixTime": "1581543491" 23 | }, 24 | "description": "Just shows how to do Demisto stuff", 25 | "log": "osquery:differential", 26 | "service": "kinesis", 27 | "source": "prefix_cluster1_streamalert", 28 | "trigger_rules": [ 29 | "sample_demisto" 30 | ], 31 | "publisher_tests": { 32 | "demisto:sample-integration": [ 33 | { 34 | "jmespath_expression": "\"@demisto.incident_type\"", 35 | "condition": "is", 36 | "value": "My sample type" 37 | } 38 | ] 39 | } 40 | } 41 | ] -------------------------------------------------------------------------------- /rules/sample/sample_demisto.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example for writing a Demisto role 3 | """ 4 | from publishers.sample.sample_demisto import demisto_classification 5 | from streamalert.shared.rule import rule 6 | 7 | 8 | @rule( 9 | logs=['osquery:differential'], 10 | outputs=['demisto:sample-integration'], 11 | publishers=[demisto_classification], 12 | context={ 13 | 'demisto': { 14 | 'incident_type': 'My sample type', 15 | 'playbook': 'A Playbook', 16 | 'severity': 'informational' 17 | }, 18 | } 19 | ) 20 | def sample_demisto(record, _): 21 | """ 22 | author: Derek Wang 23 | description: An example of how to write a Demisto alert using publishers to classify 24 | """ 25 | return record.get('hostIdentifier', '') == 'sample_demisto' 26 | -------------------------------------------------------------------------------- /scheduled_queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/scheduled_queries/__init__.py -------------------------------------------------------------------------------- /scheduled_queries/sample/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/scheduled_queries/sample/__init__.py -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [nosetests] 2 | cover-erase=1 3 | verbosity=2 4 | nocapture=1 5 | 6 | [pycodestyle] 7 | max-line-length=100 8 | 9 | [bandit] 10 | # Skip the venv/ and tests/ directories when scanning. 11 | exclude: venv,./tests 12 | 13 | # Skip the following tests: 14 | # 15 | # - [B108:hardcoded_tmp_directory] Probable insecure usage of temp file/directory. 16 | # Severity: Medium Confidence: Medium 17 | # In Lambda, /tmp is explicitly where local files must be stored 18 | # 19 | # - [B311:blacklist] Standard pseudo-random generators are not suitable for security/cryptographic purposes. 20 | # Severity: Low Confidence: High 21 | # StreamAlert doesn't need cryptographic random number generators 22 | # 23 | # - [B404:blacklist] Consider possible security implications associated with subprocess module. 24 | # Severity: Low Confidence: High 25 | # There are other warnings specific to subprocess calls (e.g. B603, B607) 26 | skips: B108,B311,B404 27 | 28 | [yapf] 29 | COLUMN_LIMIT=100 30 | 31 | [coverage:report] 32 | omit=streamalert_cli/* 33 | show_missing=True 34 | -------------------------------------------------------------------------------- /streamalert/__init__.py: -------------------------------------------------------------------------------- 1 | """StreamAlert version.""" 2 | __version__ = '3.5.0' 3 | -------------------------------------------------------------------------------- /streamalert/alert_merger/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/alert_merger/__init__.py -------------------------------------------------------------------------------- /streamalert/alert_processor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/alert_processor/__init__.py -------------------------------------------------------------------------------- /streamalert/alert_processor/outputs/__init__.py: -------------------------------------------------------------------------------- 1 | """Initialize logging for the alert processor.""" 2 | import importlib 3 | import os 4 | 5 | # Import all files containing subclasses of OutputDispatcher, skipping the common base class 6 | for output_file in os.listdir(os.path.dirname(__file__)): 7 | # Skip the common base file and any non-py files 8 | if output_file.startswith(('__init__', 'output_base')) or not output_file.endswith('.py'): 9 | continue 10 | 11 | full_import = [ 12 | 'streamalert', 13 | 'alert_processor', 14 | 'outputs', 15 | os.path.splitext(output_file)[0] 16 | ] 17 | 18 | importlib.import_module('.'.join(full_import)) 19 | -------------------------------------------------------------------------------- /streamalert/alert_processor/outputs/credentials/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/alert_processor/outputs/credentials/__init__.py -------------------------------------------------------------------------------- /streamalert/apps/_apps/__init__.py: -------------------------------------------------------------------------------- 1 | """Import some package level items to make implementing subclasses a bit nicer""" 2 | from streamalert.apps import StreamAlertApp 3 | from streamalert.apps.app_base import AppIntegration, safe_timeout 4 | from streamalert.shared.logger import get_logger 5 | -------------------------------------------------------------------------------- /streamalert/apps/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | 18 | class AppException(Exception): 19 | """Base exception class StreamAlert apps""" 20 | 21 | 22 | class AppConfigError(AppException): 23 | """Class for config related errors""" 24 | 25 | 26 | class AppAuthError(AppException): 27 | """Class for auth related errors""" 28 | 29 | 30 | class AppStateError(AppException): 31 | """Class for any errors when loading the state""" 32 | -------------------------------------------------------------------------------- /streamalert/apps/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | from streamalert.apps import StreamAlertApp 18 | 19 | 20 | def handler(event, context): 21 | """Main lambda handler use as the entry point 22 | 23 | Args: 24 | event (dict): Event object that can potentially contain details on what to 25 | during this invocation. An example of this is the 'invocation_type' key 26 | that is used as an override to allow for successive invocations (and in 27 | the future, support for historical invocations) 28 | context (LambdaContxt): AWS LambdaContext object 29 | """ 30 | StreamAlertApp.get_app(event['app_type'])(event, context).gather() 31 | -------------------------------------------------------------------------------- /streamalert/athena_partitioner/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/athena_partitioner/__init__.py -------------------------------------------------------------------------------- /streamalert/classifier/__init__.py: -------------------------------------------------------------------------------- 1 | """Imports from submodules to make higher up imports easier""" 2 | from streamalert.classifier.classifier import Classifier 3 | -------------------------------------------------------------------------------- /streamalert/classifier/clients/__init__.py: -------------------------------------------------------------------------------- 1 | """Imports from submodules to make higher up imports easier""" 2 | from .sqs import SQSClient 3 | -------------------------------------------------------------------------------- /streamalert/classifier/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | import json 18 | 19 | from streamalert.classifier import Classifier 20 | from streamalert.shared import logger 21 | 22 | 23 | def handler(event, _): 24 | """Main Lambda handler function""" 25 | try: 26 | Classifier().run(event.get('Records', [])) 27 | except Exception: 28 | logger.get_logger(__name__).exception('Invocation event: %s', json.dumps(event)) 29 | raise 30 | -------------------------------------------------------------------------------- /streamalert/classifier/payload/__init__.py: -------------------------------------------------------------------------------- 1 | """Imports from submodules to make higher up imports easier""" 2 | import importlib 3 | import os 4 | 5 | # Import all files containing subclasses of StreamPayload, skipping the common base class 6 | for input_file in os.listdir(os.path.dirname(__file__)): 7 | # Skip the common base file and any non-py files 8 | if input_file.startswith(('__init__', 'payload_base')) or not input_file.endswith('.py'): 9 | continue 10 | 11 | full_import = [ 12 | 'streamalert', 13 | 'classifier', 14 | 'payload', 15 | os.path.splitext(input_file)[0] 16 | ] 17 | 18 | importlib.import_module('.'.join(full_import)) 19 | -------------------------------------------------------------------------------- /streamalert/rule_promotion/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/rule_promotion/__init__.py -------------------------------------------------------------------------------- /streamalert/rule_promotion/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | from streamalert.rule_promotion.promoter import RulePromoter 18 | 19 | 20 | def handler(event, _): 21 | RulePromoter().run(event.get('send_digest', False)) 22 | -------------------------------------------------------------------------------- /streamalert/rules_engine/__init__.py: -------------------------------------------------------------------------------- 1 | """Imports from submodules to make higher up imports easier""" 2 | from streamalert.rules_engine.rules_engine import RulesEngine 3 | -------------------------------------------------------------------------------- /streamalert/rules_engine/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | import json 18 | 19 | from streamalert.rules_engine import RulesEngine 20 | from streamalert.shared import logger 21 | 22 | 23 | def handler(event, _): 24 | """Main Lambda handler function""" 25 | try: 26 | records = [] 27 | for record in event.get('Records', []): 28 | body = json.loads(record['body']) 29 | if isinstance(body, list): 30 | records.extend(body) 31 | else: 32 | records.append(body) 33 | RulesEngine().run(records) 34 | except Exception: 35 | logger.get_logger(__name__).exception('Invocation event: %s', json.dumps(event)) 36 | raise 37 | -------------------------------------------------------------------------------- /streamalert/scheduled_queries/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present, Airbnb Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | 17 | StreamQuery base module 18 | """ 19 | -------------------------------------------------------------------------------- /streamalert/scheduled_queries/command/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/command/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/config/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/config/lambda_conf.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present, Airbnb Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import os 17 | 18 | 19 | def get_streamquery_env_vars(): 20 | """Returns environment variables pertinent to StreamQuery""" 21 | return { 22 | 'command_name': 'StreamQuery', 23 | 'aws_region': os.environ['REGION'], 24 | 'log_level': os.environ['LOGGER_LEVEL'], 25 | 'athena_database': os.environ['ATHENA_DATABASE'], 26 | 'athena_results_bucket': os.environ['ATHENA_RESULTS_BUCKET'], 27 | 'kinesis_stream': os.environ['KINESIS_STREAM'], 28 | } 29 | -------------------------------------------------------------------------------- /streamalert/scheduled_queries/container/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/container/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/handlers/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present, Airbnb Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | 16 | 17 | This file is the entry point for AWS Lambda. 18 | """ 19 | from streamalert.scheduled_queries.command.application import ScheduledQueries 20 | 21 | 22 | def handler(event, _): 23 | return ScheduledQueries().run(event) 24 | -------------------------------------------------------------------------------- /streamalert/scheduled_queries/query_packs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/query_packs/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/state/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/state/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/streamalert/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/streamalert/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/support/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/scheduled_queries/support/__init__.py -------------------------------------------------------------------------------- /streamalert/scheduled_queries/support/clock.py: -------------------------------------------------------------------------------- 1 | from copy import copy 2 | from datetime import datetime 3 | 4 | 5 | class Clock: 6 | """A service that provides time and time-manipulation methods""" 7 | 8 | def __init__(self): 9 | self._internal_time = datetime.utcnow() 10 | 11 | @property 12 | def now(self): 13 | """Returns current time as a datetime object. 14 | 15 | (!) EXTREMELY IMPORTANT DETAIL: While this returns a modification-safe copy of the time, 16 | the internal clock will ALWAYS BE THE SAME and corresponds to the Clock's 17 | "_internal_time" property. 18 | 19 | Returns: 20 | datetime 21 | """ 22 | return copy(self._internal_time) 23 | 24 | def time_machine(self, new_time): 25 | """Changes the Clock's internal time 26 | 27 | Args: 28 | new_time (datetime) 29 | """ 30 | self._internal_time = new_time 31 | -------------------------------------------------------------------------------- /streamalert/shared/__init__.py: -------------------------------------------------------------------------------- 1 | """Define some shared resources.""" 2 | ALERT_MERGER_NAME = 'alert_merger' 3 | ALERT_PROCESSOR_NAME = 'alert_processor' 4 | ATHENA_PARTITIONER_NAME = 'athena_partitioner' 5 | CLASSIFIER_FUNCTION_NAME = 'classifier' 6 | RULES_ENGINE_FUNCTION_NAME = 'rules_engine' 7 | RULE_PROMOTION_NAME = 'rule_promotion' 8 | THREAT_INTEL_DOWNLOADER_NAME = 'threat_intel_downloader' 9 | 10 | CLUSTERED_FUNCTIONS = {CLASSIFIER_FUNCTION_NAME} 11 | -------------------------------------------------------------------------------- /streamalert/shared/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | 18 | class StreamAlertError(Exception): 19 | """Base streamalert exception for inheritance""" 20 | 21 | 22 | class ConfigError(StreamAlertError, ValueError): 23 | """Exception to be used for config related errors""" 24 | -------------------------------------------------------------------------------- /streamalert/shared/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/shared/helpers/__init__.py -------------------------------------------------------------------------------- /streamalert/shared/helpers/boto.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | from os import environ as env 17 | 18 | from botocore import client 19 | 20 | # Set a boto connect and read timeout in an attempt to shorten the time it takes to 21 | # send to firehose. This will effectively cause retries to happen quicker 22 | BOTO_TIMEOUT = 5 23 | 24 | # Read the region from the environment (typically Lambda env variables) 25 | REGION = env.get('AWS_REGION') or env.get('AWS_DEFAULT_REGION') or 'us-east-1' 26 | 27 | 28 | def default_config(timeout=BOTO_TIMEOUT, region=REGION): 29 | return client.Config( 30 | connect_timeout=timeout, 31 | read_timeout=timeout, 32 | region_name=region if region else REGION # Ensure region is never empty 33 | ) 34 | -------------------------------------------------------------------------------- /streamalert/shared/helpers/dynamodb.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | from botocore.exceptions import ClientError 17 | 18 | 19 | def ignore_conditional_failure(func): 20 | """Decorator which ignores ClientErrors due to ConditionalCheckFailed. 21 | 22 | Conditional checks prevent Dynamo updates from finishing if the existing state doesn't match 23 | expectations. For example, if an Alert no longer exists, we don't want to send any other updates 24 | 25 | Args: 26 | func (function): Function with a conditional Dynamo update call. 27 | 28 | Returns: 29 | function: Wrapped function which ignores failures due to conditional checks. 30 | """ 31 | def inner(*args, **kwargs): 32 | """Ignore ConditionalCheckFailedException""" 33 | try: 34 | func(*args, **kwargs) 35 | except ClientError as error: 36 | if error.response['Error']['Code'] != 'ConditionalCheckFailedException': 37 | raise 38 | 39 | return inner 40 | -------------------------------------------------------------------------------- /streamalert/shared/lookup_tables/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/shared/lookup_tables/__init__.py -------------------------------------------------------------------------------- /streamalert/shared/lookup_tables/drivers_factory.py: -------------------------------------------------------------------------------- 1 | from streamalert.shared.lookup_tables.drivers import PersistenceDriver, NullDriver, EphemeralDriver 2 | from streamalert.shared.lookup_tables.errors import LookupTablesConfigurationError 3 | 4 | 5 | def construct_persistence_driver(table_configuration): 6 | """ 7 | Constructs a raw, uninitialized PersistenceDriver from the given configuration. 8 | 9 | Params: 10 | table_configuration (dict) 11 | 12 | Returns: 13 | PersistenceDriver 14 | """ 15 | import streamalert.shared.lookup_tables.driver_dynamodb as driver_dynamodb 16 | import streamalert.shared.lookup_tables.driver_s3 as driver_s3 17 | 18 | driver_name = table_configuration.get('driver', False) 19 | 20 | if driver_name == PersistenceDriver.TYPE_S3: 21 | return driver_s3.S3Driver(table_configuration) 22 | 23 | if driver_name == PersistenceDriver.TYPE_DYNAMODB: 24 | return driver_dynamodb.DynamoDBDriver(table_configuration) 25 | 26 | if driver_name == PersistenceDriver.TYPE_NULL: 27 | return NullDriver(table_configuration) 28 | 29 | if driver_name == PersistenceDriver.TYPE_EPHEMERAL: 30 | return EphemeralDriver(table_configuration) 31 | 32 | raise LookupTablesConfigurationError( 33 | 'Unrecognized driver name: {}'.format(driver_name) 34 | ) 35 | -------------------------------------------------------------------------------- /streamalert/shared/lookup_tables/errors.py: -------------------------------------------------------------------------------- 1 | 2 | class LookupTablesError(RuntimeError): 3 | """Generic class for errors raised from LookupTables systems""" 4 | 5 | 6 | class LookupTablesInitializationError(LookupTablesError): 7 | """Any error raised when a specific table/driver is attempting to initialize""" 8 | 9 | 10 | class LookupTablesCommitError(LookupTablesError): 11 | """Any error raised when a LookupTable or driver fails to successfully commit changes""" 12 | 13 | 14 | class LookupTablesConfigurationError(LookupTablesError): 15 | """Errors raised that detect a misconfiguration for any LookupTables system""" 16 | -------------------------------------------------------------------------------- /streamalert/shared/lookup_tables/table.py: -------------------------------------------------------------------------------- 1 | from streamalert.shared.logger import get_logger 2 | 3 | LOGGER = get_logger(__name__) 4 | 5 | 6 | class LookupTable: 7 | """ 8 | A single LookupTable instance. 9 | 10 | LookupTables offer a standardized interface, backed by the PersistenceDriver system in 11 | the background. 12 | """ 13 | 14 | def __init__(self, table_name, driver, table_configuration): 15 | self._table_name = table_name 16 | self._table_configuration = table_configuration 17 | self._driver = driver # type: PersistenceDriver 18 | self._initialized = False 19 | 20 | @property 21 | def table_name(self): 22 | return self._table_name 23 | 24 | @property 25 | def driver_id(self): 26 | return self._driver.id 27 | 28 | @property 29 | def driver_type(self): 30 | return self._driver.driver_type 31 | 32 | def get(self, key, default=None): 33 | """ 34 | Retrieves the value of a key in the current LookupTable 35 | 36 | Args: 37 | key (str) 38 | default (mixed) 39 | 40 | Returns: 41 | mixed 42 | """ 43 | self._initialize_if_necessary() 44 | return self._driver.get(key, default) 45 | 46 | def _initialize_if_necessary(self): 47 | """ 48 | Initializes the LookupTable's underlying driver, if it has not yet been initialized. 49 | """ 50 | if self._initialized: 51 | return 52 | 53 | self._initialized = True 54 | self._driver.initialize() 55 | -------------------------------------------------------------------------------- /streamalert/threat_intel_downloader/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert/threat_intel_downloader/__init__.py -------------------------------------------------------------------------------- /streamalert/threat_intel_downloader/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | 18 | class ThreatStreamException(Exception): 19 | """Base exception class ThreatStream Error""" 20 | 21 | 22 | class ThreatStreamCredsError(ThreatStreamException): 23 | """Class for API Credential errors""" 24 | 25 | 26 | class ThreatStreamLambdaInvokeError(ThreatStreamException): 27 | """Class for Lambda Invoke Error""" 28 | 29 | 30 | class ThreatStreamRequestsError(ThreatStreamException): 31 | """Classe for requests return code errors""" 32 | -------------------------------------------------------------------------------- /streamalert_cli/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import os 17 | 18 | STREAMALERT_CLI_ROOT = os.path.dirname(os.path.abspath(__file__)) 19 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/_include.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = "~> 0.13.0" 3 | 4 | required_providers { 5 | aws = { 6 | source = "hashicorp/aws" 7 | version = ">= 3.60.0, < 4.0.0" 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/_variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | } 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_alert_merger_iam/README.md: -------------------------------------------------------------------------------- 1 | # Alert Merger Permissions 2 | This module adds IAM permissions specific to the alert merger: 3 | * Managing the alerts table 4 | * Invoking the alert processor 5 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_alert_merger_iam/main.tf: -------------------------------------------------------------------------------- 1 | // Allow the Alert Merger to query and update the alerts table 2 | resource "aws_iam_role_policy" "manage_alerts_table" { 3 | name = "ManageAlertsTable" 4 | role = var.role_id 5 | policy = data.aws_iam_policy_document.manage_alerts_table.json 6 | } 7 | 8 | data "aws_iam_policy_document" "manage_alerts_table" { 9 | statement { 10 | effect = "Allow" 11 | 12 | actions = [ 13 | "dynamodb:BatchWriteItem", 14 | "dynamodb:DeleteItem", 15 | "dynamodb:Query", 16 | "dynamodb:Scan", 17 | "dynamodb:UpdateItem", 18 | ] 19 | 20 | resources = ["arn:aws:dynamodb:${var.region}:${var.account_id}:table/${var.prefix}_streamalert_alerts"] 21 | } 22 | } 23 | 24 | // Allow the Alert Merger to invoke the Alert Processor 25 | resource "aws_iam_role_policy" "invoke_alert_processor" { 26 | name = "InvokeAlertProcessor" 27 | role = var.role_id 28 | policy = data.aws_iam_policy_document.invoke_alert_processor.json 29 | } 30 | 31 | data "aws_iam_policy_document" "invoke_alert_processor" { 32 | statement { 33 | effect = "Allow" 34 | actions = ["lambda:InvokeFunction"] 35 | resources = ["arn:aws:lambda:${var.region}:${var.account_id}:function:${var.prefix}_streamalert_alert_processor"] 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_alert_merger_iam/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | description = "12-digit AWS Account ID" 3 | } 4 | 5 | variable "region" { 6 | description = "AWS region identifier" 7 | } 8 | 9 | variable "prefix" { 10 | description = "Prefix for resource names" 11 | } 12 | 13 | variable "role_id" { 14 | description = "Alert processor IAM Role ID" 15 | } 16 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_alert_processor_iam/README.md: -------------------------------------------------------------------------------- 1 | # Alert Processor Permissions 2 | This module adds IAM permissions specific to the alert processor: 3 | * Reading and decrypting output secrets 4 | * Sending to outputs (Lambda, S3, SNS, SQS) 5 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_alert_processor_iam/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | description = "12-digit AWS Account ID" 3 | } 4 | 5 | variable "region" { 6 | description = "AWS region identifier" 7 | } 8 | 9 | variable "prefix" { 10 | description = "Prefix for resource names" 11 | } 12 | 13 | variable "role_id" { 14 | description = "Alert processor IAM Role ID" 15 | } 16 | 17 | variable "kms_key_arn" { 18 | description = "KMS key ARN used for (client-side) encrypting output secrets" 19 | } 20 | 21 | variable "sse_kms_key_arn" { 22 | description = "KMS key ARN for server-side encryption of the secrets bucket" 23 | } 24 | 25 | variable "output_lambda_functions" { 26 | type = list(string) 27 | default = [] 28 | description = "Optional list of configured Lambda outputs (function names)" 29 | } 30 | 31 | variable "output_s3_buckets" { 32 | type = list(string) 33 | default = [] 34 | description = "Optional list of configured S3 bucket outputs (bucket names)" 35 | } 36 | 37 | variable "output_sns_topics" { 38 | type = list(string) 39 | default = [] 40 | description = "Optional list of configured SNS outputs (topic names)" 41 | } 42 | 43 | variable "output_sqs_queues" { 44 | type = list(string) 45 | default = [] 46 | description = "Optional list of configured SQS outputs (queue names)" 47 | } 48 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_app_iam/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "function_name" { 10 | type = string 11 | } 12 | 13 | variable "function_role_id" { 14 | type = string 15 | } 16 | 17 | variable "destination_function_name" { 18 | type = string 19 | } 20 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_artifact_extractor/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "glue_catalog_db_name" { 14 | type = string 15 | description = "Athena Database name" 16 | } 17 | 18 | variable "glue_catalog_table_name" { 19 | type = string 20 | description = "Athena table name for Artifacts" 21 | } 22 | 23 | variable "s3_bucket_name" { 24 | type = string 25 | description = "StreamAlert data bucket name" 26 | } 27 | 28 | variable "stream_name" { 29 | type = string 30 | description = "Fully qualified name to use for delivery stream" 31 | } 32 | 33 | variable "buffer_size" { 34 | default = 5 35 | } 36 | 37 | variable "buffer_interval" { 38 | default = 300 39 | } 40 | 41 | variable "kms_key_arn" { 42 | type = string 43 | } 44 | 45 | variable "schema" { 46 | type = list(tuple([string, string])) 47 | } 48 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_athena/README.md: -------------------------------------------------------------------------------- 1 | # Athena Partitioner Permissions 2 | This module adds IAM permissions and other specific resources needed in the Athena partitioner function: 3 | * Athena Database for querying alerts and historical data 4 | * S3 Bucket for storing the results of Athena queries 5 | * SQS Queue for receiving event notifications from S3 buckets 6 | * S3 Event Notifications for sending messages to SQS Queue when objects are created 7 | * KMS Key and Alias for encrypting/decrypting messages on SQS Queue 8 | * Permissions for sending data to SQS Queue and reading/writing data in S3 9 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_athena/kms.tf: -------------------------------------------------------------------------------- 1 | // KMS key: Server-Side Encryption for SQS 2 | resource "aws_kms_key" "sse" { 3 | description = "Athena SQS server-side encryption" 4 | enable_key_rotation = true 5 | 6 | policy = data.aws_iam_policy_document.kms_sse.json 7 | 8 | tags = { 9 | Name = "StreamAlert" 10 | Subcomponent = "AthenaPartitioner" 11 | } 12 | } 13 | 14 | resource "aws_kms_alias" "sse" { 15 | name = "alias/${var.prefix}_streamalert_sqs_sse" 16 | target_key_id = aws_kms_key.sse.key_id 17 | } 18 | 19 | // Allow S3 to use the SSE key when publishing events to SQS 20 | data "aws_iam_policy_document" "kms_sse" { 21 | statement { 22 | sid = "Enable IAM User Permissions" 23 | effect = "Allow" 24 | 25 | principals { 26 | type = "AWS" 27 | identifiers = ["arn:aws:iam::${var.account_id}:root"] 28 | } 29 | 30 | actions = ["kms:*"] 31 | resources = ["*"] 32 | } 33 | 34 | statement { 35 | sid = "AllowS3ToUseKey" 36 | effect = "Allow" 37 | 38 | principals { 39 | type = "Service" 40 | identifiers = ["s3.amazonaws.com"] 41 | } 42 | 43 | actions = [ 44 | "kms:Decrypt", 45 | "kms:GenerateDataKey", 46 | ] 47 | 48 | resources = ["*"] 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_athena/outputs.tf: -------------------------------------------------------------------------------- 1 | output "results_bucket_arn" { 2 | value = aws_s3_bucket.athena_results_bucket.arn 3 | } 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_athena/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "prefix" { 6 | type = string 7 | } 8 | 9 | variable "function_role_id" { 10 | description = "Athena Partitioner function IAM Role ID, exported from the tf_lambda module" 11 | } 12 | 13 | variable "function_alias_arn" { 14 | description = "Athena Partitioner function alias arn, exported from the tf_lambda module" 15 | } 16 | 17 | variable "function_name" { 18 | description = "Athena Partitioner function name, exported from the tf_lambda module" 19 | } 20 | 21 | variable "athena_data_buckets" { 22 | type = list(string) 23 | } 24 | 25 | variable "results_bucket" { 26 | type = string 27 | } 28 | 29 | variable "kms_key_id" { 30 | type = string 31 | } 32 | 33 | variable "s3_logging_bucket" { 34 | type = string 35 | } 36 | 37 | variable "database_name" { 38 | type = string 39 | } 40 | 41 | variable "queue_name" { 42 | type = string 43 | } 44 | 45 | variable "lambda_timeout" { 46 | type = number 47 | } 48 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_classifier/README.md: -------------------------------------------------------------------------------- 1 | # Classifier Permissions 2 | This module adds IAM permissions and other specific resources needed in the classifier function: 3 | * Permissions for sending data to StreamAlert Data Firehoses 4 | * SQS Queue that the Rules Engine function reads from 5 | * Permissions for sending messages to the above SQS Queue 6 | * SNS topic subscription(s) for SNS topics that should be able to invoke the Classifier 7 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_classifier/firehose.tf: -------------------------------------------------------------------------------- 1 | // IAM Role Policy: Allow the Classifier to send data to Firehose 2 | resource "aws_iam_role_policy" "classifier_firehose" { 3 | name = "FirehoseWriteData" 4 | role = var.function_role_id 5 | policy = data.aws_iam_policy_document.classifier_firehose.json 6 | } 7 | 8 | locals { 9 | stream_prefix = "${var.firehose_use_prefix ? "${var.prefix}_" : ""}streamalert_" 10 | } 11 | 12 | // IAM Policy Doc: Allow the Classifier to PutRecord* on any StreamAlert Data Firehose 13 | data "aws_iam_policy_document" "classifier_firehose" { 14 | statement { 15 | effect = "Allow" 16 | 17 | actions = [ 18 | "firehose:PutRecord*", 19 | "firehose:DescribeDeliveryStream", 20 | "firehose:ListDeliveryStreams", 21 | ] 22 | 23 | resources = [ 24 | "arn:aws:firehose:${var.region}:${var.account_id}:deliverystream/${local.stream_prefix}*", 25 | ] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_classifier/iam.tf: -------------------------------------------------------------------------------- 1 | resource "aws_iam_role_policy" "classifier_policy" { 2 | name = "WriteAndEncryptSQS" 3 | role = var.function_role_id 4 | policy = data.aws_iam_policy_document.classifier_policy.json 5 | } 6 | 7 | # Sending messages to the classifier SQS queue 8 | data "aws_iam_policy_document" "classifier_policy" { 9 | statement { 10 | sid = "AllowSSE" 11 | 12 | actions = [ 13 | "kms:Decrypt", 14 | "kms:GenerateDataKey", 15 | ] 16 | 17 | resources = [var.classifier_sqs_sse_kms_key_arn] 18 | } 19 | 20 | statement { 21 | sid = "AllowPublishToQueue" 22 | actions = ["sqs:SendMessage*"] 23 | resources = [var.classifier_sqs_queue_arn] 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_classifier/sns.tf: -------------------------------------------------------------------------------- 1 | // SNS Topic Subscription: Subscribe the Classifier to configured SNS topics 2 | resource "aws_sns_topic_subscription" "input_topic_subscriptions" { 3 | count = length(var.input_sns_topics) 4 | topic_arn = element(var.input_sns_topics, count.index) 5 | endpoint = var.function_alias_arn 6 | protocol = "lambda" 7 | } 8 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_classifier/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "function_role_id" { 14 | description = "Classifier function IAM Role ID, exported from the tf_lambda module" 15 | } 16 | 17 | variable "function_alias_arn" { 18 | description = "Classifier function alias arn, exported from the tf_lambda module" 19 | } 20 | 21 | variable "function_name" { 22 | description = "Classifier function name, exported from the tf_lambda module" 23 | } 24 | 25 | variable "input_sns_topics" { 26 | description = "SNS topics to which the classifier function should subscribe" 27 | type = list(string) 28 | default = [] 29 | } 30 | 31 | variable "classifier_sqs_queue_arn" { 32 | description = "ARN of the SQS queue to which classified logs should be sent" 33 | } 34 | 35 | variable "classifier_sqs_sse_kms_key_arn" { 36 | description = "ARN of the KMS key that handles server-side-encryption of classifier SQS frames" 37 | } 38 | 39 | variable "firehose_use_prefix" { 40 | description = "When true, prepends the StreamAlert prefix to the AWS Firehose permissions" 41 | } 42 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudtrail/modules/tf_cloudtrail_cloudwatch/output.tf: -------------------------------------------------------------------------------- 1 | output "cloudtrail_to_cloudwatch_logs_role" { 2 | value = aws_iam_role.cloudtrail_to_cloudwatch_role.arn 3 | } 4 | 5 | // CloudTrail requires the log stream wildcard here 6 | output "cloudwatch_logs_group_arn" { 7 | value = "${aws_cloudwatch_log_group.cloudtrail_logging.arn}:*" 8 | } 9 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudtrail/modules/tf_cloudtrail_cloudwatch/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | description = "AWS region where the CloudWatch Logs resources should be created" 4 | } 5 | 6 | variable "prefix" { 7 | type = string 8 | description = "Resource prefix namespace" 9 | } 10 | 11 | variable "cluster" { 12 | type = string 13 | description = "Name of the cluster" 14 | } 15 | 16 | variable "cloudwatch_destination_arn" { 17 | type = string 18 | description = "ARN of the CloudWatch Destination to forward logs to that are sent to a CloudWatch Logs Group" 19 | } 20 | 21 | variable "retention_in_days" { 22 | default = 1 23 | description = "Days for which to retain logs in the CloudWatch Logs Group" 24 | } 25 | 26 | variable "exclude_home_region_events" { 27 | default = false 28 | description = "Set to `true` to omit CloudTrail events logged in the 'home' region. This is useful when global CloudTrail is desired, and a CloudWatch Logs Group is used, but home events are already collected (e.g. via another CloudTrail)" 29 | } 30 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_events/README.md: -------------------------------------------------------------------------------- 1 | # StreamAlert CloudWatch Events Terraform Module 2 | Configure the necessary resources to deliver all events published to CloudWatch Events to AWS Kinesis. 3 | 4 | ## Components 5 | * Configures a CloudWatch Event to log all API calls to Kinesis. 6 | * Creates an IAM Role/Policy to allow CloudWatch Events to deliver to Kinesis. 7 | 8 | ## Example 9 | ```hcl 10 | module "cloudwatch_events" { 11 | source = "./modules/tf_cloudwatch_events" 12 | prefix = "company" 13 | cluster = "prod" 14 | event_pattern = "{"accountId": ["123456789012"]}" 15 | kinesis_arn = "arn:aws:kinesis:us-east-1:123456789012:stream/company_prod_streamalert" // Output from another module 16 | } 17 | ``` 18 | 19 | ## Inputs 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
PropertyDescriptionDefault (None=Required)
prefixResource prefix namespaceNone
clusterName of the clusterNone
event_patternEvent pattern used to filter events. See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/CloudWatchEventsandEventPatterns.html#CloudWatchEventsPatternsnull (not required)
kinesis_arnThe ARN of the Kinesis Stream to deliver CloudTrail logsNone
47 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_events/cross_account/README.md: -------------------------------------------------------------------------------- 1 | # StreamAlert CloudWatch Events Cross Account Terraform Module 2 | Configure the necessary resources to allow for cross account CloudWatch Events via EventBridge Events Bus 3 | 4 | ## Components 5 | * Configures CloudWatch Event Permissions to allow external accounts or organizations to send events to the main account 6 | 7 | ## Example 8 | ```hcl 9 | module "cloudwatch_events_cross_account" { 10 | source = "./modules/tf_cloudwatch_events/cross_account" 11 | accounts = ["123456789012"] 12 | organizations = ["o-aabbccddee"] 13 | region = "us-east-1" 14 | } 15 | ``` 16 | 17 | ## Inputs 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 |
PropertyDescriptionDefault (None=Required)
accountsAWS Account IDs for which to enable cross account CloudWatch EventsNone
organizationsAWS Organization IDs for which to enable cross account CloudWatch EventsNone
regionAWS region in which this permission is being addedNone
40 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_events/cross_account/main.tf: -------------------------------------------------------------------------------- 1 | // CloudWatch Event Permission for Individual AWS Accounts 2 | resource "aws_cloudwatch_event_permission" "account_access" { 3 | count = length(var.accounts) 4 | principal = element(var.accounts, count.index) 5 | statement_id = "account_${element(var.accounts, count.index)}_${var.region}" 6 | } 7 | 8 | // CloudWatch Event Permission for AWS Orgs 9 | resource "aws_cloudwatch_event_permission" "organization_access" { 10 | count = length(var.organizations) 11 | principal = "*" 12 | statement_id = "organization_${element(var.organizations, count.index)}_${var.region}" 13 | 14 | condition { 15 | key = "aws:PrincipalOrgID" 16 | type = "StringEquals" 17 | value = element(var.organizations, count.index) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_events/cross_account/variables.tf: -------------------------------------------------------------------------------- 1 | variable "accounts" { 2 | type = list(string) 3 | } 4 | 5 | variable "organizations" { 6 | type = list(string) 7 | } 8 | 9 | variable "region" { 10 | type = string 11 | } 12 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_events/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | type = string 3 | } 4 | 5 | variable "cluster" { 6 | type = string 7 | } 8 | 9 | variable "event_pattern" { 10 | type = string 11 | default = null 12 | } 13 | 14 | variable "kinesis_arn" { 15 | type = string 16 | } 17 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/modules/destination/iam.tf: -------------------------------------------------------------------------------- 1 | // IAM Policy Doc: Allow Cross Account CloudWatch Logging 2 | data "aws_iam_policy_document" "cloudwatch_logs_destination_policy" { 3 | statement { 4 | sid = "DestinationPolicy" 5 | effect = "Allow" 6 | 7 | principals { 8 | type = "AWS" 9 | 10 | identifiers = var.account_ids 11 | } 12 | 13 | actions = [ 14 | "logs:PutSubscriptionFilter", 15 | ] 16 | 17 | resources = [ 18 | aws_cloudwatch_log_destination.cloudwatch_to_kinesis.arn, 19 | ] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/modules/destination/main.tf: -------------------------------------------------------------------------------- 1 | // Note: When creating cross-account log destinations, the destination must 2 | // be in the same AWS region as the log group that is sending it data. 3 | // However, the AWS resource that the destination points to can be 4 | // located in a different region. 5 | // Source: http://amzn.to/2zF7CS0 6 | resource "aws_cloudwatch_log_destination" "cloudwatch_to_kinesis" { 7 | name = "${var.prefix}_${var.cluster}_streamalert_log_destination" 8 | role_arn = var.cloudwatch_logs_subscription_role_arn 9 | target_arn = var.destination_kinesis_stream_arn 10 | } 11 | 12 | resource "aws_cloudwatch_log_destination_policy" "cloudwatch_to_kinesis" { 13 | destination_name = aws_cloudwatch_log_destination.cloudwatch_to_kinesis.name 14 | access_policy = data.aws_iam_policy_document.cloudwatch_logs_destination_policy.json 15 | } 16 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/modules/destination/output.tf: -------------------------------------------------------------------------------- 1 | output "cloudwatch_logs_destination_arn" { 2 | value = aws_cloudwatch_log_destination.cloudwatch_to_kinesis.arn 3 | } 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/modules/destination/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | type = string 3 | } 4 | 5 | variable "cluster" { 6 | type = string 7 | } 8 | 9 | variable "destination_kinesis_stream_arn" { 10 | type = string 11 | } 12 | 13 | variable "account_ids" { 14 | type = list(string) 15 | } 16 | 17 | // This is an output from the parent module 18 | variable "cloudwatch_logs_subscription_role_arn" { 19 | type = string 20 | } 21 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/output.tf: -------------------------------------------------------------------------------- 1 | output "cloudwatch_logs_subscription_role_arn" { 2 | value = aws_iam_role.subscription_role.arn 3 | } 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_cloudwatch_logs_destination/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | type = string 3 | } 4 | 5 | variable "cluster" { 6 | type = string 7 | } 8 | 9 | variable "destination_kinesis_stream_arn" { 10 | type = string 11 | } 12 | 13 | variable "regions" { 14 | type = list(string) 15 | } 16 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_flow_logs/iam.tf: -------------------------------------------------------------------------------- 1 | // IAM Policy Doc: AssumeRole for VPC Flow Logs 2 | data "aws_iam_policy_document" "flow_log_assume_role_policy" { 3 | statement { 4 | effect = "Allow" 5 | actions = ["sts:AssumeRole"] 6 | 7 | principals { 8 | type = "Service" 9 | identifiers = ["vpc-flow-logs.amazonaws.com"] 10 | } 11 | } 12 | } 13 | 14 | // Allow flow logs to write to CloudWatch 15 | // http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/flow-logs.html#flow-logs-iam 16 | // IAM Role: Clustered VPC Flow Log 17 | resource "aws_iam_role" "flow_log_role" { 18 | name = "${var.prefix}_${var.cluster}_flow_log_role" 19 | path = "/streamalert/" 20 | assume_role_policy = data.aws_iam_policy_document.flow_log_assume_role_policy.json 21 | 22 | tags = { 23 | Name = "StreamAlert" 24 | Cluster = var.cluster 25 | } 26 | } 27 | 28 | // IAM Policy: CloudWatch Put Events 29 | resource "aws_iam_role_policy" "flow_logs_write_to_cloudwatch_logs" { 30 | name = "CloudWatchPutEvents" 31 | role = aws_iam_role.flow_log_role.id 32 | policy = data.aws_iam_policy_document.flow_logs_write_to_cloudwatch_logs.json 33 | } 34 | 35 | // IAM Policy Doc: CloudWatch Put Events 36 | data "aws_iam_policy_document" "flow_logs_write_to_cloudwatch_logs" { 37 | statement { 38 | effect = "Allow" 39 | 40 | actions = [ 41 | "logs:CreateLogGroup", 42 | "logs:CreateLogStream", 43 | "logs:DescribeLogGroups", 44 | "logs:DescribeLogStreams", 45 | "logs:PutLogEvents", 46 | ] 47 | 48 | resources = [ 49 | "*", 50 | ] 51 | } 52 | } 53 | 54 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_flow_logs/output.tf: -------------------------------------------------------------------------------- 1 | output "cloudwatch_log_group" { 2 | value = aws_cloudwatch_log_group.flow_log_group.arn 3 | } 4 | 5 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_flow_logs/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | type = string 3 | } 4 | 5 | variable "cluster" { 6 | type = string 7 | } 8 | 9 | variable "cloudwatch_logs_destination_arn" { 10 | type = string 11 | } 12 | 13 | variable "flow_log_filter" { 14 | default = "[version, account, eni, source, destination, srcport, destport, protocol, packets, bytes, windowstart, windowend, action, flowlogstatus]" 15 | } 16 | 17 | variable "log_retention" { 18 | default = 7 19 | } 20 | 21 | variable "enis" { 22 | type = list(string) 23 | default = [] 24 | } 25 | 26 | variable "subnets" { 27 | type = list(string) 28 | default = [] 29 | } 30 | 31 | variable "vpcs" { 32 | type = list(string) 33 | default = [] 34 | } 35 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/README.md: -------------------------------------------------------------------------------- 1 | # StreamAlert Globals 2 | * This Terraform module creates various global infrastructure components 3 | 4 | ## Components 5 | * Kinesis Firehose Delivery Stream for Putting Alerts on S3 6 | 7 | ## Example 8 | ``` 9 | module "globals" { 10 | source = "../modules/tf_globals" 11 | account_id = "112233445566" 12 | region = "us-east-1" 13 | prefix = "mycompany" 14 | } 15 | ``` 16 | 17 | ## Inputs 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 |
PropertyDescriptionDefaultRequired
account_idYour AWS Account IDNoneTrue
regionThe AWS region for your streamNoneTrue
prefixThe resource prefix, normally an organizational name or descriptorNoneTrue
44 | 45 | ## Outputs 46 | 47 | 48 | 49 | 50 | 51 |
PropertyDescription
52 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/alerts_firehose/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "bucket_name" { 14 | type = string 15 | } 16 | 17 | variable "buffer_size" { 18 | type = number 19 | } 20 | 21 | variable "buffer_interval" { 22 | type = number 23 | } 24 | 25 | variable "cloudwatch_log_retention" { 26 | type = number 27 | } 28 | 29 | variable "file_format" { 30 | type = string 31 | } 32 | 33 | variable "kms_key_arn" { 34 | type = string 35 | } 36 | 37 | variable "alerts_db_name" { 38 | type = string 39 | } 40 | 41 | variable "alerts_schema" { 42 | type = list(tuple([string, string])) 43 | } 44 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/classifier_queue/iam.tf: -------------------------------------------------------------------------------- 1 | // Allow Lambda to use the SSE key when publishing events to SQS 2 | data "aws_iam_policy_document" "kms_sse_allow" { 3 | statement { 4 | sid = "Enable IAM User Permissions" 5 | effect = "Allow" 6 | 7 | principals { 8 | type = "AWS" 9 | identifiers = ["arn:aws:iam::${var.account_id}:root"] 10 | } 11 | 12 | actions = ["kms:*"] 13 | resources = ["*"] 14 | } 15 | } 16 | 17 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/classifier_queue/kms.tf: -------------------------------------------------------------------------------- 1 | // KMS key: Server-Side Encryption for Classifier SQS 2 | resource "aws_kms_key" "sqs_sse" { 3 | description = "Classifier SQS server-side encryption" 4 | enable_key_rotation = true 5 | 6 | policy = data.aws_iam_policy_document.kms_sse_allow.json 7 | 8 | tags = { 9 | Name = "StreamAlert" 10 | } 11 | } 12 | 13 | resource "aws_kms_alias" "sqs_sse" { 14 | name = "alias/${var.prefix}_streamalert_classifier_sqs_sse" 15 | target_key_id = aws_kms_key.sqs_sse.key_id 16 | } 17 | 18 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/classifier_queue/output.tf: -------------------------------------------------------------------------------- 1 | # Using a list concat since terraform destroy throws errors if this does not exist 2 | output "sqs_queue_url" { 3 | value = element(concat(aws_sqs_queue.classifier_queue.*.id, [""]), 0) 4 | } 5 | 6 | # Using a list concat since terraform destroy throws errors if this does not exist 7 | output "sqs_queue_arn" { 8 | value = element(concat(aws_sqs_queue.classifier_queue.*.arn, [""]), 0) 9 | } 10 | 11 | # Using a list concat since terraform destroy throws errors if this does not exist 12 | output "sqs_sse_kms_key_arn" { 13 | value = element(concat(aws_kms_key.sqs_sse.*.arn, [""]), 0) 14 | } 15 | 16 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/classifier_queue/sqs.tf: -------------------------------------------------------------------------------- 1 | // SQS Queue: Send logs from the Classifier to the SQS queue 2 | resource "aws_sqs_queue" "classifier_queue" { 3 | name = "${var.use_prefix ? "${var.prefix}_" : ""}streamalert_classified_logs" 4 | 5 | # The amount of time messages are hidden after being received from a consumer 6 | # Default this to 2 seconds longer than the maximum AWS Lambda duration 7 | visibility_timeout_seconds = var.rules_engine_timeout + 2 8 | 9 | # Enable queue encryption of messages in the queue 10 | kms_master_key_id = aws_kms_key.sqs_sse.arn 11 | 12 | tags = { 13 | Name = "StreamAlert" 14 | } 15 | } 16 | 17 | // SQS Queue Policy: Allow the Classifiers to send messages to SQS 18 | resource "aws_sqs_queue_policy" "classifier_queue" { 19 | queue_url = aws_sqs_queue.classifier_queue.id 20 | policy = data.aws_iam_policy_document.classifier_queue.json 21 | } 22 | 23 | // IAM Policy Doc: Allow Classifiers to send messages to SQS 24 | data "aws_iam_policy_document" "classifier_queue" { 25 | statement { 26 | effect = "Allow" 27 | sid = "AllowPublishToQueue" 28 | 29 | principals { 30 | type = "AWS" 31 | identifiers = ["*"] 32 | } 33 | 34 | actions = ["sqs:SendMessage"] 35 | resources = [aws_sqs_queue.classifier_queue.arn] 36 | 37 | condition { 38 | test = "ArnLike" 39 | variable = "aws:SourceArn" 40 | 41 | values = [ 42 | "arn:aws:lambda:${var.region}:${var.account_id}:function:${var.prefix}_*_streamalert_classifier", 43 | ] 44 | } 45 | } 46 | } 47 | 48 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/classifier_queue/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "rules_engine_timeout" { 14 | } 15 | 16 | variable "use_prefix" { 17 | description = "When true, prepends the StreamAlert prefix to SQS resource name." 18 | } 19 | 20 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/lambda_layers/aliyun-python-sdk-actiontrail==2.0.0_dependencies.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/_infrastructure/modules/tf_globals/lambda_layers/aliyun-python-sdk-actiontrail==2.0.0_dependencies.zip -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/lambda_layers/boxsdk[jwt]==2.9.0_dependencies.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/_infrastructure/modules/tf_globals/lambda_layers/boxsdk[jwt]==2.9.0_dependencies.zip -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/output.tf: -------------------------------------------------------------------------------- 1 | output "rules_table_arn" { 2 | value = element(concat(aws_dynamodb_table.rules_table.*.arn, [""]), 0) 3 | } 4 | 5 | output "classifier_sqs_queue_url" { 6 | value = module.classifier_queue.sqs_queue_url 7 | } 8 | 9 | output "classifier_sqs_queue_arn" { 10 | value = module.classifier_queue.sqs_queue_arn 11 | } 12 | 13 | output "classifier_sqs_sse_kms_key_arn" { 14 | value = module.classifier_queue.sqs_sse_kms_key_arn 15 | } 16 | 17 | output "lamdba_layer_arns" { 18 | value = [ 19 | aws_lambda_layer_version.aliyun_dependencies.arn, 20 | aws_lambda_layer_version.box_dependencies.arn, 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_globals/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "kms_key_arn" { 14 | type = string 15 | } 16 | 17 | variable "alerts_firehose_bucket_name" { 18 | type = string 19 | default = "" 20 | } 21 | 22 | variable "alerts_firehose_buffer_size" { 23 | type = number 24 | default = 128 25 | } 26 | 27 | variable "alerts_firehose_buffer_interval" { 28 | type = number 29 | default = 300 30 | } 31 | 32 | variable "alerts_firehose_cloudwatch_log_retention" { 33 | type = number 34 | default = 14 35 | } 36 | 37 | variable "alerts_table_read_capacity" { 38 | type = number 39 | default = 5 40 | } 41 | 42 | variable "alerts_table_write_capacity" { 43 | type = number 44 | default = 5 45 | } 46 | 47 | variable "alerts_db_name" {} 48 | 49 | variable "alerts_file_format" { 50 | type = string 51 | description = "Either parquet or json" 52 | } 53 | 54 | variable "alerts_schema" { 55 | type = list(tuple([string, string])) 56 | description = "Schema used to create Athena alerts table in terraform" 57 | } 58 | 59 | variable "enable_rule_staging" { 60 | default = false 61 | } 62 | 63 | variable "rules_table_read_capacity" { 64 | type = number 65 | default = 5 66 | } 67 | 68 | variable "rules_table_write_capacity" { 69 | type = number 70 | default = 5 71 | } 72 | 73 | variable "rules_engine_timeout" { 74 | default = 300 75 | } 76 | 77 | variable "sqs_use_prefix" { 78 | default = false 79 | } 80 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_events/README.md: -------------------------------------------------------------------------------- 1 | # StreamAlert Kinesis Event Terraform Module 2 | 3 | * This Terraform module configures a AWS Lambda function to read events from a speecific Kinesis Stream. 4 | 5 | ## Inputs 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 |
PropertyDescriptionDefaultRequired
batch_sizeThe number of records fetched from Kinesis on a single Lambda invocation100False
kinesis_stream_arnThe ARN of the Kinesis StreamNoneTrue
lambda_role_idThe AWS IAM Role ID attached to the Lambda functionNoneTrue
lambda_production_enabledEnable/Disable this event source mappingNoneTrue
lambda_function_arnThe ARN of the Lambda function to read from KinesisNoneTrue
44 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_events/main.tf: -------------------------------------------------------------------------------- 1 | // AWS Lambda Function Policy 2 | resource "aws_iam_role_policy" "streamalert_lambda_kinesis" { 3 | name = "KinesisGetRecords" 4 | role = var.lambda_role_id 5 | policy = data.aws_iam_policy_document.kinesis_read.json 6 | } 7 | 8 | // IAM Policy Doc: List and Get records from Kinesis 9 | data "aws_iam_policy_document" "kinesis_read" { 10 | statement { 11 | effect = "Allow" 12 | 13 | actions = [ 14 | "kinesis:ListStreams", 15 | ] 16 | 17 | resources = [ 18 | "*", 19 | ] 20 | } 21 | 22 | statement { 23 | effect = "Allow" 24 | 25 | actions = [ 26 | "kinesis:DescribeStream", 27 | "kinesis:GetRecords", 28 | "kinesis:GetShardIterator", 29 | ] 30 | 31 | resources = [var.kinesis_stream_arn] 32 | } 33 | } 34 | 35 | resource "aws_lambda_event_source_mapping" "streamalert_kinesis_production_event_mapping" { 36 | enabled = var.lambda_production_enabled 37 | batch_size = var.batch_size 38 | event_source_arn = var.kinesis_stream_arn 39 | function_name = var.lambda_function_alias_arn 40 | starting_position = "TRIM_HORIZON" 41 | } 42 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_events/variables.tf: -------------------------------------------------------------------------------- 1 | variable "batch_size" { 2 | default = 100 3 | } 4 | 5 | variable "kinesis_stream_arn" { 6 | type = string 7 | } 8 | 9 | variable "lambda_role_id" { 10 | type = string 11 | } 12 | 13 | variable "lambda_production_enabled" { 14 | } 15 | 16 | variable "lambda_function_alias_arn" { 17 | type = string 18 | } 19 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_firehose_delivery_stream/outputs.tf: -------------------------------------------------------------------------------- 1 | output "firehose_arn" { 2 | value = aws_kinesis_firehose_delivery_stream.streamalert_data.arn 3 | } 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_firehose_setup/main.tf: -------------------------------------------------------------------------------- 1 | // Policy for S3 bucket 2 | data "aws_iam_policy_document" "streamalert_data" { 3 | # Force SSL access only 4 | statement { 5 | sid = "ForceSSLOnlyAccess" 6 | 7 | effect = "Deny" 8 | 9 | principals { 10 | type = "AWS" 11 | identifiers = ["*"] 12 | } 13 | 14 | actions = ["s3:*"] 15 | 16 | resources = [ 17 | "arn:aws:s3:::${var.s3_bucket_name}", 18 | "arn:aws:s3:::${var.s3_bucket_name}/*", 19 | ] 20 | 21 | condition { 22 | test = "Bool" 23 | variable = "aws:SecureTransport" 24 | values = ["false"] 25 | } 26 | } 27 | } 28 | 29 | resource "aws_s3_bucket" "streamalert_data" { 30 | bucket = var.s3_bucket_name 31 | acl = "private" 32 | policy = data.aws_iam_policy_document.streamalert_data.json 33 | force_destroy = false 34 | 35 | versioning { 36 | enabled = true 37 | } 38 | 39 | logging { 40 | target_bucket = var.s3_logging_bucket 41 | target_prefix = "${var.s3_bucket_name}/" 42 | } 43 | 44 | server_side_encryption_configuration { 45 | rule { 46 | apply_server_side_encryption_by_default { 47 | sse_algorithm = "aws:kms" 48 | kms_master_key_id = var.kms_key_id 49 | } 50 | } 51 | } 52 | 53 | tags = { 54 | Name = "StreamAlert" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_firehose_setup/outputs.tf: -------------------------------------------------------------------------------- 1 | output "data_bucket_arn" { 2 | value = aws_s3_bucket.streamalert_data.arn 3 | } 4 | 5 | output "data_bucket_name" { 6 | value = aws_s3_bucket.streamalert_data.bucket 7 | } 8 | 9 | output "firehose_role_arn" { 10 | value = aws_iam_role.streamalert_kinesis_firehose.arn 11 | } 12 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_firehose_setup/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "cloudwatch_log_group" { 14 | type = string 15 | default = "/aws/kinesisfirehose/streamalert" 16 | } 17 | 18 | variable "s3_bucket_name" { 19 | type = string 20 | } 21 | 22 | variable "s3_logging_bucket" { 23 | type = string 24 | } 25 | 26 | variable "kms_key_id" { 27 | type = string 28 | } 29 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_streams/main.tf: -------------------------------------------------------------------------------- 1 | // AWS Kinesis Stream 2 | resource "aws_kinesis_stream" "streamalert_stream" { 3 | name = var.stream_name 4 | shard_count = var.shards 5 | retention_period = var.retention 6 | 7 | shard_level_metrics = var.shard_level_metrics 8 | 9 | tags = { 10 | Name = "StreamAlert" 11 | Cluster = var.cluster 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_streams/outputs.tf: -------------------------------------------------------------------------------- 1 | output "arn" { 2 | value = aws_kinesis_stream.streamalert_stream.arn 3 | } 4 | 5 | output "stream_name" { 6 | value = "${aws_kinesis_stream.streamalert_stream.name}" 7 | } 8 | 9 | output "username" { 10 | value = aws_iam_user.streamalert.*.name 11 | } 12 | 13 | output "user_arn" { 14 | value = aws_iam_user.streamalert.*.arn 15 | } 16 | 17 | output "access_key_id" { 18 | value = aws_iam_access_key.streamalert.*.id 19 | } 20 | 21 | output "secret_key" { 22 | value = aws_iam_access_key.streamalert.*.secret 23 | } 24 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_kinesis_streams/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "cluster" { 14 | type = string 15 | } 16 | 17 | variable "access_key_count" { 18 | default = 1 19 | } 20 | 21 | variable "create_user" { 22 | default = true 23 | } 24 | 25 | variable "trusted_accounts" { 26 | default = [] 27 | } 28 | 29 | variable "retention" { 30 | default = 24 31 | } 32 | 33 | variable "stream_name" { 34 | type = string 35 | } 36 | 37 | variable "shards" { 38 | default = 1 39 | } 40 | 41 | // Default values for shard_level_metrics 42 | variable "shard_level_metrics" { 43 | type = list(string) 44 | default = [] 45 | } 46 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lambda/iam.tf: -------------------------------------------------------------------------------- 1 | data "aws_iam_policy_document" "lambda_execution_policy" { 2 | statement { 3 | effect = "Allow" 4 | actions = ["sts:AssumeRole"] 5 | 6 | principals { 7 | type = "Service" 8 | identifiers = ["lambda.amazonaws.com"] 9 | } 10 | } 11 | } 12 | 13 | // Create the execution role for the Lambda function. 14 | resource "aws_iam_role" "role" { 15 | name = "${var.function_name}_role" 16 | path = "/streamalert/" 17 | assume_role_policy = data.aws_iam_policy_document.lambda_execution_policy.json 18 | 19 | tags = local.tags 20 | } 21 | 22 | // Attach basic Lambda permissions 23 | resource "aws_iam_role_policy_attachment" "lambda_basic_policy" { 24 | role = aws_iam_role.role.id 25 | policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" 26 | } 27 | 28 | // Attach VPC policy (if applicable) 29 | resource "aws_iam_role_policy_attachment" "vpc_access" { 30 | count = local.vpc_enabled ? 1 : 0 31 | role = aws_iam_role.role.id 32 | policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" 33 | } 34 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lambda/output.tf: -------------------------------------------------------------------------------- 1 | output "function_arn" { 2 | value = aws_lambda_function.function.arn 3 | } 4 | 5 | output "role_arn" { 6 | value = aws_iam_role.role.arn 7 | } 8 | 9 | output "role_id" { 10 | value = aws_iam_role.role.id 11 | } 12 | 13 | output "function_alias" { 14 | value = aws_lambda_alias.alias.name 15 | } 16 | 17 | output "function_name" { 18 | value = aws_lambda_function.function.function_name 19 | } 20 | 21 | output "function_alias_arn" { 22 | value = aws_lambda_alias.alias.arn 23 | } 24 | 25 | output "log_group_name" { 26 | value = aws_cloudwatch_log_group.lambda_log_group.name 27 | } 28 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_dynamodb/README.md: -------------------------------------------------------------------------------- 1 | # Lookup Tables Terraform for DynamoDB 2 | This module adds IAM permissions to read from LookupTables's DynamoDB resources. It grants these 3 | permissions all Lambdas of the given roles. 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_dynamodb/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_iam_policy_document" "streamalert_read_items_from_lookup_tables_dynamodb" { 2 | statement { 3 | actions = [ 4 | "dynamodb:GetItem", 5 | "dynamodb:DescribeTable", 6 | ] 7 | 8 | resources = local.dynamodb_table_arns 9 | } 10 | } 11 | 12 | module "aws_iam_policy_module" { 13 | source = "../tf_lookup_tables_policy" 14 | 15 | policy_json = data.aws_iam_policy_document.streamalert_read_items_from_lookup_tables_dynamodb.json 16 | roles = var.roles 17 | role_count = var.role_count 18 | type = "dynamodb" 19 | prefix = var.prefix 20 | } 21 | 22 | locals { 23 | // use the list of dynamodb table names to generate a list of ARNs 24 | dynamodb_table_arns = formatlist( 25 | "arn:aws:dynamodb:%s:%s:table/%s", 26 | var.region, 27 | var.account_id, 28 | var.dynamodb_tables, 29 | ) 30 | } 31 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_dynamodb/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | description = "AWS Account Id that the DynamoDB tables reside in" 3 | type = string 4 | } 5 | 6 | variable "region" { 7 | description = "AWS Region that the DynamoDB tables reside in" 8 | type = string 9 | } 10 | 11 | variable "prefix" { 12 | description = "StreamAlert prefix" 13 | type = string 14 | } 15 | 16 | variable "dynamodb_tables" { 17 | description = "List of DynamoDB table names to grant LookupTable access to; Cannot be empty!" 18 | type = list(string) 19 | } 20 | 21 | variable "roles" { 22 | description = "List of role ids to grant LookupTable access to" 23 | type = list(string) 24 | } 25 | 26 | // The below is only necessary becuase of: 27 | // https://github.com/hashicorp/terraform/issues/10857 28 | // Fixed here: https://github.com/hashicorp/terraform/issues/12570#issuecomment-512621787 29 | variable "role_count" { 30 | description = "Count of role ids to grant LookupTable access to. Note: this is a workaround until terraform v0.12.0 is supported" 31 | } 32 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_policy/README.md: -------------------------------------------------------------------------------- 1 | # Lookup Tables Terraform Policies 2 | This module is a reusable component that generates IAM Policies and Policy Attachments to attach to 3 | Lambda functions. 4 | 5 | This module is not meant to be used on its own; it is exclusively used by the other tf_lookup_tables_* 6 | modules as reusable code. -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_policy/main.tf: -------------------------------------------------------------------------------- 1 | resource "aws_iam_policy" "streamalert_read_from_lookup_tables" { 2 | name = "${var.prefix}_StreamAlertReadFromLookupTablesPolicy_${var.type}" 3 | policy = var.policy_json 4 | } 5 | 6 | resource "aws_iam_role_policy_attachment" "streamalert_read_from_lookup_tables" { 7 | count = var.role_count 8 | role = element(var.roles, count.index) 9 | policy_arn = aws_iam_policy.streamalert_read_from_lookup_tables.arn 10 | } 11 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_policy/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | description = "StreamAlert prefix" 3 | type = string 4 | } 5 | 6 | variable "roles" { 7 | description = "A list of role ids to grant LookupTable access to" 8 | type = list(string) 9 | } 10 | 11 | variable "policy_json" { 12 | description = "Full json document of the policy document" 13 | type = string 14 | } 15 | 16 | variable "type" { 17 | description = "Type of access (e.g. s3 or dynamodb); used to suffix the policy name" 18 | type = string 19 | } 20 | 21 | // The below is only necessary becuase of: 22 | // https://github.com/hashicorp/terraform/issues/10857 23 | // Fixed here: https://github.com/hashicorp/terraform/issues/12570#issuecomment-512621787 24 | variable "role_count" { 25 | description = "Count of role ids to grant LookupTable access to. Note: this is a workaround until terraform v0.12.0 is supported" 26 | } 27 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_s3/README.md: -------------------------------------------------------------------------------- 1 | # Lookup Tables Terraform for S3 2 | This module adds IAM permissions to read from LookupTables's S3 resources. It grants these 3 | permissions all Lambdas of the given roles. 4 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_s3/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_iam_policy_document" "streamalert_read_items_from_lookup_tables_s3" { 2 | statement { 3 | actions = ["s3:List*"] 4 | resources = local.s3_bucket_arns 5 | } 6 | 7 | statement { 8 | actions = ["s3:Get*"] 9 | resources = local.s3_bucket_arn_star 10 | } 11 | } 12 | 13 | module "aws_iam_policy_module" { 14 | source = "../tf_lookup_tables_policy" 15 | policy_json = data.aws_iam_policy_document.streamalert_read_items_from_lookup_tables_s3.json 16 | roles = var.roles 17 | role_count = var.role_count 18 | type = "s3" 19 | prefix = var.prefix 20 | } 21 | 22 | locals { 23 | # Generate a list of S3 bucket ARNs 24 | s3_bucket_arns = formatlist("arn:aws:s3:::%s", var.s3_buckets) 25 | 26 | # Generate a list of S3 bucket ARNs, plus asterisk at the end to match any object in the bucket 27 | s3_bucket_arn_star = formatlist("arn:aws:s3:::%s/*", var.s3_buckets) 28 | } 29 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_lookup_tables_s3/variables.tf: -------------------------------------------------------------------------------- 1 | variable "prefix" { 2 | description = "StreamAlert prefix" 3 | type = string 4 | } 5 | 6 | variable "s3_buckets" { 7 | description = "A list of S3 bucket names to grant LookupTables access to. Cannot be empty!" 8 | type = list(string) 9 | } 10 | 11 | variable "roles" { 12 | description = "List of role ids to grant LookupTable access to" 13 | type = list(string) 14 | } 15 | 16 | // The below is only necessary becuase of: 17 | // https://github.com/hashicorp/terraform/issues/10857 18 | // Fixed here: https://github.com/hashicorp/terraform/issues/12570#issuecomment-512621787 19 | variable "role_count" { 20 | description = "Count of role ids to grant LookupTable access to. Note: this is a workaround until terraform v0.12.0 is supported" 21 | } 22 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_alarms/README.md: -------------------------------------------------------------------------------- 1 | # Custom Metric Alarms Terraform Module 2 | This Terraform module creates metric alarms for custom metrics. 3 | 4 | Creates `aws_cloudwatch_metric_alarm` resources for custom metrics. This is in its own module 5 | due to Terraform interpolation restrictions. 6 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_alarms/main.tf: -------------------------------------------------------------------------------- 1 | // CloudWatch metric alarm for a given metric 2 | resource "aws_cloudwatch_metric_alarm" "cloudwatch_metric_alarms" { 3 | alarm_name = var.alarm_name 4 | alarm_description = var.alarm_description 5 | comparison_operator = var.comparison_operator 6 | evaluation_periods = var.evaluation_periods 7 | metric_name = var.metric_name 8 | period = var.period 9 | statistic = var.statistic 10 | threshold = var.threshold 11 | alarm_actions = [var.sns_topic_arn] 12 | 13 | namespace = "StreamAlert" 14 | 15 | tags = { 16 | Name = "StreamAlert" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_alarms/variables.tf: -------------------------------------------------------------------------------- 1 | variable "alarm_name" { 2 | description = "Name for the alarm being created" 3 | type = string 4 | } 5 | 6 | variable "alarm_description" { 7 | description = "Description for the alarm being created" 8 | type = string 9 | } 10 | 11 | variable "comparison_operator" { 12 | description = "Comparison operator to use for this alarm. Choices are: GreaterThanOrEqualToThreshold, GreaterThanThreshold, LessThanThreshold, or LessThanOrEqualToThreshold" 13 | type = string 14 | } 15 | 16 | variable "evaluation_periods" { 17 | description = "Consecutive periods the metric threshold must be breached before triggering an alarm" 18 | type = string 19 | } 20 | 21 | variable "metric_name" { 22 | description = "Name of the metric being evaluated for this alarm" 23 | type = string 24 | } 25 | 26 | variable "period" { 27 | description = "Period over which to count the occurrences of this metric" 28 | type = string 29 | } 30 | 31 | variable "statistic" { 32 | description = "CloudWatch metric statistic to use when evaluating this metric. Choices are: SampleCount, Average, Sum, Minimum, or Maximum" 33 | type = string 34 | } 35 | 36 | variable "threshold" { 37 | description = "Alarm if number of occurrences of this metric exceed this value in the specified period(s)" 38 | type = string 39 | } 40 | 41 | variable "sns_topic_arn" { 42 | description = "SNS topic arn to use for alarm actions" 43 | type = string 44 | } 45 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_filters/README.md: -------------------------------------------------------------------------------- 1 | # Metric Filters Terraform Module 2 | This Terraform module creates metric filters that are applied to CloudWatch Log Group(s). 3 | 4 | Creates `aws_cloudwatch_log_metric_filter` resources for custom metrics. This is in its own module 5 | due to Terraform interpolation restrictions. 6 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_filters/main.tf: -------------------------------------------------------------------------------- 1 | // CloudWatch metric filters for the specified log group 2 | resource "aws_cloudwatch_log_metric_filter" "cloudwatch_metric_filters" { 3 | name = var.metric_name 4 | pattern = var.metric_pattern 5 | log_group_name = var.log_group_name 6 | 7 | metric_transformation { 8 | name = var.metric_name 9 | value = var.metric_value 10 | default_value = var.metric_default_value 11 | 12 | namespace = "StreamAlert" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_metric_filters/variables.tf: -------------------------------------------------------------------------------- 1 | variable "metric_name" { 2 | description = "Name to assign to the custom metric being created" 3 | type = string 4 | } 5 | 6 | variable "metric_value" { 7 | description = "The value that should be published to the metric" 8 | type = string 9 | } 10 | 11 | variable "metric_default_value" { 12 | description = "The value to emit when a filter pattern does not match a log event." 13 | default = 0 14 | } 15 | 16 | // See: https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/FilterAndPatternSyntax.html 17 | variable "metric_pattern" { 18 | description = "A valid CloudWatch Logs filter pattern for extracting metric data out of ingested log events" 19 | type = string 20 | } 21 | 22 | variable "log_group_name" { 23 | description = "CloudWatch Log Group name to which the filter should be applied" 24 | type = string 25 | } 26 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_rule_promotion_iam/README.md: -------------------------------------------------------------------------------- 1 | # Rule Promotion Permissions 2 | This module adds IAM permissions specific to the rule promotion function: 3 | * Querying Athena for data 4 | * Reading the Rules DynamoDB Table 5 | * Sending message to SNS topic 6 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_rule_promotion_iam/variables.tf: -------------------------------------------------------------------------------- 1 | variable "digest_sns_topic" { 2 | description = "SNS topic name to use for alert statistics digests" 3 | } 4 | 5 | variable "rules_table_arn" { 6 | description = "Rules DynamoDB Table arn, exported from the tf_globals module" 7 | } 8 | 9 | variable "role_id" { 10 | description = "Rule Promotion IAM Role ID, exported from the tf_lambda module" 11 | } 12 | 13 | variable "function_alias_arn" { 14 | description = "Rule Promotion function alias arn, exported from the tf_lambda module" 15 | } 16 | 17 | variable "function_name" { 18 | description = "Rule Promotion function name, exported from the tf_lambda module" 19 | } 20 | 21 | variable "send_digest_schedule_expression" { 22 | description = "Cron or rate expression to be used for scheduling the sending of the rule staging digest" 23 | } 24 | 25 | variable "athena_results_bucket_arn" { 26 | description = "S3 bucket arn to use for Athena search results" 27 | } 28 | 29 | variable "alerts_bucket" { 30 | description = "Name of S3 bucket where alerts are stored and queryable by Athena" 31 | type = string 32 | } 33 | 34 | variable "s3_kms_key_arn" { 35 | description = "KMS key ARN used for server-side encryption" 36 | } 37 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_rules_engine/README.md: -------------------------------------------------------------------------------- 1 | # Rules Engine Permissions 2 | This module adds IAM permissions and other specific resources needed in the rules engine function: 3 | * Permissions for reading, decrypting, and deleting messages from classifier SQS Queue 4 | * Permissions for reading the threat intel DynamoDB table 5 | * Permissions for reading the rules DynamoDB table 6 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_rules_engine/lambda.tf: -------------------------------------------------------------------------------- 1 | // Invoke rules engine Lambda from downloader SQS queue 2 | resource "aws_lambda_event_source_mapping" "invoke_via_sqs" { 3 | batch_size = var.sqs_record_batch_size 4 | event_source_arn = var.classifier_sqs_queue_arn 5 | function_name = var.function_alias_arn 6 | } 7 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_rules_engine/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "function_role_id" { 14 | description = "Classifier function IAM Role ID, exported from the tf_lambda module" 15 | } 16 | 17 | variable "function_alias_arn" { 18 | description = "Classifier function alias arn, exported from the tf_lambda module" 19 | } 20 | 21 | variable "function_name" { 22 | description = "Classifier function name, exported from the tf_lambda module" 23 | } 24 | 25 | variable "threat_intel_enabled" { 26 | default = false 27 | } 28 | 29 | variable "dynamodb_table_name" { 30 | default = "streamalert_threat_intel_ioc_table" 31 | } 32 | 33 | variable "enable_rule_staging" { 34 | description = "Deploy rule staging resources if enabled" 35 | default = false 36 | } 37 | 38 | variable "rules_table_arn" { 39 | description = "ARN of the rules table for reading rule staging information" 40 | } 41 | 42 | variable "classifier_sqs_queue_arn" { 43 | description = "ARN of the SQS queue to which classified logs should be sent" 44 | } 45 | 46 | variable "classifier_sqs_sse_kms_key_arn" { 47 | description = "URL of the SQS queue to which classified logs should be sent" 48 | } 49 | 50 | variable "sqs_record_batch_size" { 51 | description = "Number of records the Lambda function should read from the SQS queue each time (max=10)" 52 | } 53 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_s3_events/variables.tf: -------------------------------------------------------------------------------- 1 | variable "bucket_name" { 2 | type = string 3 | } 4 | 5 | // Map of prefixes and suffixes 6 | variable "filters" { 7 | type = list(map(string)) 8 | } 9 | 10 | variable "lambda_function_alias_arn" { 11 | type = string 12 | } 13 | 14 | variable "lambda_function_name" { 15 | type = string 16 | } 17 | 18 | variable "lambda_function_alias" { 19 | default = "production" 20 | } 21 | 22 | variable "lambda_role_id" { 23 | type = string 24 | } 25 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_scheduled_queries/lambda.tf: -------------------------------------------------------------------------------- 1 | module "scheduled_queries_lambda" { 2 | source = "../tf_lambda" 3 | 4 | function_name = "${var.prefix}_streamalert_scheduled_queries_runner" 5 | description = "Lambda function that powers StreamQuery, StreamAlert's scheduled query service" 6 | runtime = "python3.7" 7 | handler = var.lambda_handler 8 | 9 | memory_size_mb = var.lambda_memory 10 | timeout_sec = var.lambda_timeout 11 | 12 | concurrency_limit = var.lambda_concurrency_limit 13 | 14 | environment_variables = { 15 | REGION = var.region 16 | ATHENA_DATABASE = var.athena_database 17 | ATHENA_RESULTS_BUCKET = var.athena_results_bucket 18 | KINESIS_STREAM = var.destination_kinesis_stream 19 | LOGGER_LEVEL = var.lambda_log_level 20 | } 21 | 22 | tags = { 23 | Subcomponent = "StreamQuery" 24 | } 25 | 26 | auto_publish_versions = true 27 | 28 | log_retention_days = var.lambda_log_retention_days 29 | alarm_actions = var.lambda_alarm_actions 30 | 31 | errors_alarm_enabled = var.lambda_alarms_enabled 32 | errors_alarm_evaluation_periods = var.lambda_error_evaluation_periods 33 | errors_alarm_period_secs = var.lambda_error_period_secs 34 | errors_alarm_threshold = var.lambda_error_threshold 35 | } 36 | -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_scheduled_queries/outputs.tf: -------------------------------------------------------------------------------- 1 | # Role id of the lambda function that runs scheduled queries 2 | output "lambda_function_role_id" { 3 | value = module.scheduled_queries_lambda.role_id 4 | } -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_threat_intel_downloader/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/_infrastructure/modules/tf_threat_intel_downloader/README.md -------------------------------------------------------------------------------- /streamalert_cli/_infrastructure/modules/tf_threat_intel_downloader/variables.tf: -------------------------------------------------------------------------------- 1 | variable "account_id" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } 8 | 9 | variable "prefix" { 10 | type = string 11 | } 12 | 13 | variable "function_role_id" { 14 | description = "Threat Intel Downloader function IAM Role ID, exported from the tf_lambda module" 15 | } 16 | 17 | variable "function_alias_arn" { 18 | description = "Threat Intel Downloader function alias arn, exported from the tf_lambda module" 19 | } 20 | 21 | variable "function_cloudwatch_log_group_name" { 22 | description = "Threat Intel Downloader function cloudwatch log group name, exported from the tf_lambda module" 23 | } 24 | 25 | variable "parameter_name" { 26 | default = "threat_intel_downloader_api_creds" 27 | type = string 28 | } 29 | 30 | variable "monitoring_sns_topic" { 31 | } 32 | 33 | // ***** DynamoDB Table configuration ***** 34 | 35 | variable "table_rcu" { 36 | default = 10 37 | } 38 | 39 | variable "table_wcu" { 40 | default = 10 41 | } 42 | 43 | variable "max_read_capacity" { 44 | default = 5 45 | } 46 | 47 | variable "min_read_capacity" { 48 | default = 5 49 | } 50 | 51 | variable "target_utilization" { 52 | default = 70 53 | } 54 | -------------------------------------------------------------------------------- /streamalert_cli/apps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/apps/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/athena/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/athena/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/configure/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/configure/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/kinesis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/kinesis/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import logging 17 | import logging.handlers 18 | 19 | 20 | def set_logger_levels(debug=False): 21 | """Set all of the logger levels 22 | 23 | Args: 24 | debug (bool): True to enable debug logging, False otherwise 25 | """ 26 | for name, logger in logging.Logger.manager.loggerDict.items(): 27 | if isinstance(logger, logging.PlaceHolder): 28 | continue 29 | 30 | if debug and name.startswith('streamalert'): 31 | logger.setLevel(logging.DEBUG if debug else logging.INFO) 32 | elif name.startswith(__package__): 33 | logger.setLevel(logging.INFO) 34 | else: 35 | logger.disabled = True # disable this logger if it's not one of ours 36 | -------------------------------------------------------------------------------- /streamalert_cli/lookup_tables/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/lookup_tables/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/manage_lambda/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/manage_lambda/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/metrics_alarms/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/metrics_alarms/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/outputs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/outputs/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/status/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/status/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/terraform/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from streamalert_cli import STREAMALERT_CLI_ROOT 4 | 5 | TERRAFORM_FILES_PATH = os.path.join(STREAMALERT_CLI_ROOT, '_infrastructure') 6 | -------------------------------------------------------------------------------- /streamalert_cli/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/test/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/test/format.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | 18 | def format_green(value): 19 | return '\033[0;32;1m{value}\033[0m'.format(value=value) 20 | 21 | 22 | def format_red(value): 23 | return '\033[0;31;1m{value}\033[0m'.format(value=value) 24 | 25 | 26 | def format_underline(value): 27 | return '\033[4m{value}\033[0m'.format(value=value) 28 | 29 | 30 | def format_yellow(value): 31 | return '\033[0;33;1m{value}\033[0m'.format(value=value) 32 | -------------------------------------------------------------------------------- /streamalert_cli/threat_intel/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/threat_intel/__init__.py -------------------------------------------------------------------------------- /streamalert_cli/threat_intel_downloader/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/streamalert_cli/threat_intel_downloader/__init__.py -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/__init__.py -------------------------------------------------------------------------------- /tests/scripts/autoflake.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | autoflake --recursive --in-place --remove-all-unused-imports $1 3 | -------------------------------------------------------------------------------- /tests/scripts/autopep8.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | autopep8 --in-place --max-line-length 100 --aggressive --aggressive $1 -------------------------------------------------------------------------------- /tests/scripts/covreport.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | if [ -f ".coverage" ]; then 3 | echo "Removing previous coverage file" 4 | rm .coverage 5 | fi 6 | 7 | if [ -d "htmlcov" ]; then 8 | echo "Removing previously generated html coverage folder" 9 | rm -rf htmlcov 10 | fi 11 | 12 | tests/scripts/unit_tests.sh 13 | coverage html 14 | open htmlcov/index.html 15 | -------------------------------------------------------------------------------- /tests/scripts/pylint.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | echo 'Starting pylint script' 3 | pylint *.py rules streamalert streamalert_cli tests 4 | -------------------------------------------------------------------------------- /tests/scripts/rule_test.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | ./manage.py test rules $@ 3 | -------------------------------------------------------------------------------- /tests/scripts/sort_configs.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """Script to sort conf/logs.json schema file""" 3 | import json 4 | import os 5 | import logging 6 | 7 | from collections import OrderedDict 8 | 9 | CONF_LOGS_FILE = os.path.join(os.path.dirname(__file__), '../../conf/logs.json') 10 | 11 | 12 | class JsonFileSorter: 13 | """ 14 | Tests that the conf.json file is formatted properly and is sorted alphabetically 15 | on the top-level key 16 | """ 17 | 18 | def __init__(self): 19 | self._logger = logging.getLogger() 20 | 21 | def sort_json_file(self, file_path): 22 | self._logger.info('Sorting file: {}...'.format(file_path)) 23 | 24 | with open(file_path, 'r') as infile: 25 | original_text = infile.read().strip() 26 | 27 | # Load the JSON document using OrderedDict, as it allows us to preserve the ordering 28 | # of the internal json keys. This is important for certain schemas, such as ones parsed 29 | # via csv, as the ordering of the keys does matter. 30 | schema = json.loads(original_text, object_pairs_hook=OrderedDict) 31 | 32 | # Sort the loaded schema by top-level key. Preserve the ordering of internal keys. 33 | ordered_schema = OrderedDict(sorted(list(schema.items()), key=lambda k: k[0])) 34 | 35 | with open(file_path, 'w') as outfile: 36 | json.dump(ordered_schema, outfile, indent=2, separators=(',', ': ')) 37 | outfile.write('\n') 38 | 39 | self._logger.info('Sorting completed.') 40 | 41 | 42 | if __name__ == "__main__": 43 | sorter = JsonFileSorter() 44 | sorter.sort_json_file(CONF_LOGS_FILE) 45 | -------------------------------------------------------------------------------- /tests/scripts/test_the_docs.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | sphinx-build -W docs/source docs/build && open docs/build/index.html -------------------------------------------------------------------------------- /tests/scripts/unit_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Export fake creds to keep moto from complaining 4 | export AWS_ACCESS_KEY_ID=foobar_key 5 | export AWS_SECRET_ACCESS_KEY=foobar_secret 6 | export AWS_SESSION_TOKEN=foobar_session_token 7 | export AWS_DEFAULT_REGION=us-east-1 8 | 9 | nosetests tests/unit \ 10 | --with-coverage \ 11 | --cover-erase \ 12 | --cover-package=streamalert \ 13 | --cover-package=streamalert_cli \ 14 | --cover-min-percentage=80 \ 15 | --cover-html \ 16 | --cover-html-dir=htmlcov \ 17 | --with-timer \ 18 | --timer-top-n=10 19 | -------------------------------------------------------------------------------- /tests/scripts/update_reqs.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | pip install -r requirements-top-level.txt --upgrade --force-reinstall --no-cache-dir 3 | pip freeze -r requirements-top-level.txt > requirements.txt 4 | 5 | echo "Please also update library versions in streamalert_cli/manage_lambda/package.py" 6 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/conf/clusters/trusted.json: -------------------------------------------------------------------------------- 1 | { 2 | "data_sources": {}, 3 | "id": "trusted", 4 | "classifier_config": { 5 | "inputs": { 6 | "aws-sns": [] 7 | }, 8 | "log_level": "info", 9 | "log_retention_days": 14, 10 | "memory": 128, 11 | "metric_alarms": { 12 | "errors": { 13 | "enabled": true, 14 | "evaluation_periods": 1, 15 | "period_secs": 120, 16 | "threshold": 0 17 | }, 18 | "throttles": { 19 | "enabled": true, 20 | "evaluation_periods": 1, 21 | "period_secs": 120, 22 | "threshold": 0 23 | } 24 | }, 25 | "timeout": 60, 26 | "vpc_config": { 27 | "security_group_ids": [], 28 | "subnet_ids": [] 29 | } 30 | }, 31 | "enable_threat_intel": false, 32 | "modules": { 33 | "cloudwatch_monitoring": { 34 | "enabled": false 35 | }, 36 | "kinesis": { 37 | "streams": { 38 | "retention": 24, 39 | "shards": 1, 40 | "trusted_accounts": [ 41 | "98765432100" 42 | ], 43 | "terraform_outputs": [ 44 | "username", 45 | "access_key_id", 46 | "secret_key" 47 | ] 48 | } 49 | }, 50 | "kinesis_events": { 51 | "enabled": true 52 | }, 53 | "s3_events": { 54 | "unit-test-bucket": [] 55 | } 56 | } 57 | } -------------------------------------------------------------------------------- /tests/unit/conf/normalized_types.json: -------------------------------------------------------------------------------- 1 | { 2 | "test_cloudtrail": { 3 | "sourceAddress": [ 4 | "sourceIPAddress" 5 | ] 6 | }, 7 | "test_cloudwatch": { 8 | "destinationAddress": [ 9 | "destination" 10 | ] 11 | } 12 | } -------------------------------------------------------------------------------- /tests/unit/conf/outputs.json: -------------------------------------------------------------------------------- 1 | { 2 | "aws-cloudwatch-log": [ 3 | "unit_test_default" 4 | ], 5 | "aws-firehose": { 6 | "unit_test_delivery_stream": "unit_test_delivery_stream" 7 | }, 8 | "aws-lambda": { 9 | "unit_test_lambda": "unit_test_function", 10 | "unit_test_lambda_qual": "unit_test_qualified_function:production" 11 | }, 12 | "aws-s3": { 13 | "unit_test_bucket": "unit.test.bucket.name" 14 | }, 15 | "aws-sns": { 16 | "unit_test_topic": "unit_test_topic_name" 17 | }, 18 | "aws-sqs": { 19 | "unit_test_queue": "unit_test_queue_name" 20 | }, 21 | "demisto": [ 22 | "unit_test_demisto" 23 | ], 24 | "pagerduty": [ 25 | "unit_test_pagerduty" 26 | ], 27 | "phantom": [ 28 | "unit_test_phantom" 29 | ], 30 | "slack": [ 31 | "unit_test_channel" 32 | ], 33 | "jira-v2": [ 34 | "unit_test_channel" 35 | ] 36 | } -------------------------------------------------------------------------------- /tests/unit/conf/scheduled_queries.json: -------------------------------------------------------------------------------- 1 | { 2 | "enabled": true, 3 | "config": { 4 | "destination_kinesis_stream": "unit-test_stream", 5 | "sfn_timeout_secs": 3600, 6 | "sfn_wait_secs": 30 7 | }, 8 | "packs": { 9 | "hourly": { 10 | "description": "Runs all hourly queries. Once per day on :05", 11 | "schedule_expression": "cron(5 * * * ? *)" 12 | } 13 | }, 14 | "lambda_config": { 15 | "log_level": "info", 16 | "log_retention_days": 14, 17 | "memory": 128, 18 | "timeout": 60, 19 | "alarms_enabled": true, 20 | "error_threshold": 1, 21 | "error_period_secs": 3600, 22 | "error_evaluation_periods": 2 23 | } 24 | } -------------------------------------------------------------------------------- /tests/unit/conf/threat_intel.json: -------------------------------------------------------------------------------- 1 | { 2 | "dynamodb_table_name": "table_name", 3 | "enabled": false, 4 | "excluded_iocs": { 5 | "ip": { 6 | "10.0.0.0/8": { 7 | "comment": "RFC1918" 8 | } 9 | } 10 | }, 11 | "normalized_ioc_types": { 12 | "ip": [ 13 | "sourceAddress", 14 | "destinationAddress" 15 | ] 16 | } 17 | } -------------------------------------------------------------------------------- /tests/unit/conf_athena/clusters/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "classifier_config": {}, 3 | "data_sources": {}, 4 | "id": "test" 5 | } -------------------------------------------------------------------------------- /tests/unit/conf_athena/global.json: -------------------------------------------------------------------------------- 1 | { 2 | "account": { 3 | "aws_account_id": "12345678910", 4 | "kms_key_alias": "alternate-alias", 5 | "prefix": "unit-test", 6 | "region": "us-west-1" 7 | }, 8 | "infrastructure": { 9 | "alerts_table": { 10 | "read_capacity": 5, 11 | "write_capacity": 5 12 | }, 13 | "firehose": { 14 | "buffer_interval": 900, 15 | "buffer_size": 128, 16 | "enabled": true, 17 | "enabled_logs": {}, 18 | "use_prefix": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /tests/unit/conf_athena/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "athena_partitioner_config": { 3 | "file_format": "json", 4 | "memory": "128", 5 | "timeout": "60" 6 | } 7 | } -------------------------------------------------------------------------------- /tests/unit/conf_athena/schemas/unit_test_schemas.json: -------------------------------------------------------------------------------- 1 | { 2 | "json:embedded": { 3 | "schema": { 4 | "nested_key_01": "string", 5 | "nested_key_02": "string" 6 | }, 7 | "parser": "json", 8 | "configuration": { 9 | "embedded_json": true, 10 | "envelope_keys": { 11 | "env_key_01": "string", 12 | "env_key_02": "string" 13 | }, 14 | "json_path": "test_list[*].message" 15 | } 16 | } 17 | } -------------------------------------------------------------------------------- /tests/unit/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/helpers/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/alert_merger/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/alert_merger/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/alert_processor/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | http://www.apache.org/licenses/LICENSE-2.0 7 | Unless required by applicable law or agreed to in writing, software 8 | distributed under the License is distributed on an "AS IS" BASIS, 9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | See the License for the specific language governing permissions and 11 | limitations under the License. 12 | """ 13 | import json 14 | 15 | from streamalert.shared import resources 16 | from streamalert.shared.config import load_config 17 | 18 | REGION = 'us-east-1' 19 | ACCOUNT_ID = '123456789012' 20 | PREFIX = 'prefix' 21 | FUNCTION_NAME = '{}_streamalert_alert_processor'.format(PREFIX) 22 | 23 | base_config = load_config('tests/unit/conf/', include={'outputs.json'})['outputs'] 24 | CONFIG = resources.merge_required_outputs(base_config, PREFIX) 25 | 26 | ALERTS_TABLE = '{}_streamalert_alerts'.format(PREFIX) 27 | KMS_ALIAS = 'alias/streamalert_secrets_test' 28 | 29 | MOCK_ENV = { 30 | 'AWS_ACCOUNT_ID': ACCOUNT_ID, 31 | 'STREAMALERT_PREFIX': PREFIX, 32 | 'AWS_DEFAULT_REGION': REGION 33 | } 34 | -------------------------------------------------------------------------------- /tests/unit/streamalert/alert_processor/outputs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/alert_processor/outputs/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/alert_processor/outputs/credentials/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/alert_processor/outputs/credentials/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/alert_processor/test_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | from nose.tools import assert_equal 17 | 18 | from streamalert.alert_processor.helpers import elide_string_middle 19 | 20 | 21 | def test_elide_string_middle(): 22 | """Alert Processor - Helpers - String Truncation""" 23 | alphabet = 'abcdefghijklmnopqrstuvwxyz' 24 | 25 | # String shortened 26 | assert_equal('ab ... yz', elide_string_middle(alphabet, 10)) 27 | assert_equal('abcde ... vwxyz', elide_string_middle(alphabet, 15)) 28 | assert_equal('abcdefg ... tuvwxyz', elide_string_middle(alphabet, 20)) 29 | assert_equal('abcdefghij ... qrstuvwxyz', elide_string_middle(alphabet, 25)) 30 | 31 | # String unchanged 32 | assert_equal(alphabet, elide_string_middle(alphabet, 26)) 33 | assert_equal(alphabet, elide_string_middle(alphabet, 50)) 34 | -------------------------------------------------------------------------------- /tests/unit/streamalert/apps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/apps/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/apps/test_apps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/apps/test_apps/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/apps/test_main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import os 17 | 18 | from mock import patch 19 | from moto import mock_ssm 20 | 21 | from streamalert.apps.main import handler 22 | from tests.unit.streamalert.apps.test_helpers import get_event, put_mock_params 23 | from tests.unit.streamalert.shared.test_config import get_mock_lambda_context 24 | 25 | 26 | @mock_ssm 27 | @patch.dict(os.environ, {'AWS_DEFAULT_REGION': 'us-east-1'}) 28 | @patch('streamalert.apps.app_base.AppIntegration.gather') 29 | def test_handler(gather_mock): 30 | """StreamAlertApp Lambda - Test Handler""" 31 | app_type = 'duo_auth' 32 | event = get_event(app_type) 33 | put_mock_params(app_type) 34 | handler(event, get_mock_lambda_context(app_type)) 35 | gather_mock.assert_called_once() 36 | -------------------------------------------------------------------------------- /tests/unit/streamalert/athena_partitioner/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/athena_partitioner/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/classifier/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/classifier/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/classifier/clients/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/classifier/clients/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/classifier/payload/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/classifier/payload/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/classifier/payload/test_payload_apps.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | from nose.tools import assert_equal 17 | 18 | from streamalert.classifier.payload.apps import AppPayload 19 | 20 | 21 | class TestAppPayload: 22 | """AppPayload tests""" 23 | # pylint: disable=no-self-use 24 | 25 | def test_pre_parse(self): 26 | """AppPayload - Pre Parse""" 27 | # pylint: disable=protected-access 28 | expected_result = [ 29 | { 30 | 'record_01': 'value' 31 | }, 32 | { 33 | 'record_02': 'value' 34 | } 35 | ] 36 | record = { 37 | 'logs': expected_result 38 | } 39 | 40 | payload = AppPayload(None, record) 41 | result = [rec._record_data for rec in list(payload.pre_parse())] 42 | assert_equal(result, expected_result) 43 | -------------------------------------------------------------------------------- /tests/unit/streamalert/classifier/payload/test_payload_sns.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | from nose.tools import assert_equal 17 | 18 | from streamalert.classifier.payload.sns import SnsPayload 19 | 20 | 21 | class TestSnsPayload: 22 | """SnsPayload tests""" 23 | # pylint: disable=no-self-use,protected-access 24 | 25 | def test_pre_parse(self): 26 | """SnsPayload - Pre Parse""" 27 | # pylint: disable=protected-access 28 | expected_result = ['foobarbaz'] 29 | record = { 30 | 'Sns': { 31 | 'MessageId': 'db42ca0e-215c-5f63-9e92-9e2e953c4e6c', 32 | 'Message': expected_result[0] 33 | }, 34 | 'EventSubscriptionArn': ( 35 | 'arn:aws:sns:us-east-1:123456789012:foobar:44dbbe73-3aca-4bb1-863b-b82f058c0b19' 36 | ) 37 | } 38 | 39 | payload = SnsPayload(None, record) 40 | result = [rec._record_data for rec in list(payload.pre_parse())] 41 | assert_equal(result, expected_result) 42 | -------------------------------------------------------------------------------- /tests/unit/streamalert/rule_promotion/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/rule_promotion/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/rules_engine/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/rules_engine/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/command/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/command/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/container/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/container/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/handlers/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/query_packs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/query_packs/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/state/.gitignore: -------------------------------------------------------------------------------- 1 | testfile.json 2 | -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/state/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/state/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/scheduled_queries/streamalert/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/scheduled_queries/streamalert/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/shared/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/shared/lookup_tables/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/shared/lookup_tables/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/threat_intel_downloader/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert/threat_intel_downloader/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert/threat_intel_downloader/test_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import json 17 | 18 | import boto3 19 | 20 | 21 | def put_mock_params(key, value): 22 | """Helper function to put mock parameters in parameter store""" 23 | ssm_client = boto3.client('ssm') 24 | ssm_client.put_parameter( 25 | Name=key, 26 | Value=json.dumps(value), 27 | Type='SecureString', 28 | Overwrite=True 29 | ) 30 | -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert_cli/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/athena/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert_cli/athena/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/manage_lambda/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert_cli/manage_lambda/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/terraform/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert_cli/terraform/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/airbnb/streamalert/75ba140d2e1aa6e903313d88326920adcb8bff45/tests/unit/streamalert_cli/test/__init__.py -------------------------------------------------------------------------------- /tests/unit/streamalert_cli/test/helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2017-present Airbnb, Inc. 3 | 4 | Licensed under the Apache License, Version 2.0 (the 'License'); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an 'AS IS' BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | import json 17 | 18 | from nose.tools import nottest 19 | 20 | 21 | @nottest 22 | def basic_test_file_json(**kwargs): 23 | return json.dumps([basic_test_event_data(**kwargs)]) 24 | 25 | 26 | @nottest 27 | def basic_test_event_data( 28 | log='misc_log_type', 29 | service='unit-test-service', 30 | source='unit-test-source', 31 | override_data=None): 32 | result = { 33 | 'data': { 34 | 'key': 'value' 35 | }, 36 | 'description': 'Integration test event for unit testing', 37 | 'log': log, 38 | 'service': service, 39 | 'source': source, 40 | 'trigger_rules': [ 41 | 'misc_rule' 42 | ] 43 | } 44 | 45 | if override_data: 46 | del result['data'] 47 | result['override_record'] = override_data 48 | result['log'] = 'override_log_type' 49 | 50 | return result 51 | -------------------------------------------------------------------------------- /vagrant/cli/python-virtualenvwrapper/configure.sh: -------------------------------------------------------------------------------- 1 | # Set up the virtual environment 2 | source /usr/share/virtualenvwrapper/virtualenvwrapper.sh 3 | mkvirtualenv --python=/usr/bin/$PYTHON_VERSION streamalert 4 | 5 | # Add virtualenvwrapper to the bashrc 6 | echo "source /usr/share/virtualenvwrapper/virtualenvwrapper.sh" >> ~/.bashrc 7 | echo "workon streamalert" >> ~/.bashrc 8 | -------------------------------------------------------------------------------- /vagrant/cli/python-virtualenvwrapper/install.sh: -------------------------------------------------------------------------------- 1 | # Install python dependencies 2 | apt-get install python-pip python3.7-venv virtualenvwrapper -y 3 | 4 | # Install Python with the version specified from the deadsnakes ppa 5 | apt-get install software-properties-common -y 6 | add-apt-repository ppa:deadsnakes/ppa -y 7 | apt-get update 8 | apt-get install ${PYTHON_VERSION} -y 9 | 10 | # Install the dev headers for extensions support 11 | apt-get install ${PYTHON_VERSION}-dev -y 12 | -------------------------------------------------------------------------------- /vagrant/cli/streamalert/configure.sh: -------------------------------------------------------------------------------- 1 | # Move into the source tree shared directory 2 | cd /vagrant 3 | 4 | # Enable the `workon` command 5 | source /usr/share/virtualenvwrapper/virtualenvwrapper.sh 6 | workon streamalert 7 | 8 | # Install the requirements.txt into the streamalert virtualenv 9 | pip install -r requirements.txt 10 | -------------------------------------------------------------------------------- /vagrant/cli/streamalert/install.sh: -------------------------------------------------------------------------------- 1 | # Install git so we can clone the streamalert repository 2 | apt-get install git awscli -y 3 | 4 | # Configure ssh to allow env variables AWS_* to be passed through 5 | cp /vagrant/vagrant/cli/streamalert/sshd_config /etc/ssh/sshd_config && \ 6 | /etc/init.d/ssh restart 7 | 8 | -------------------------------------------------------------------------------- /vagrant/cli/terraform/install.sh: -------------------------------------------------------------------------------- 1 | # Install the unzip utility 2 | apt-get install unzip -y 3 | 4 | # Pull down the version of Terraform that we want from the remote 5 | wget https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip 6 | 7 | # Unzip the Terraform binary, give it executable permissions, and put it in $PATH 8 | unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip 9 | chmod +x terraform 10 | mv terraform /usr/bin/terraform 11 | --------------------------------------------------------------------------------